hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3d71fa9e2abe22d155154c76e5151b1d3926e5d7 | 1,410 | py | Python | validate_staging_area.py | DataBiosphere/hca-import-validation | f57710ec05e3b343bac15cc85d372b4ce2fbe15f | [
"Apache-2.0"
] | null | null | null | validate_staging_area.py | DataBiosphere/hca-import-validation | f57710ec05e3b343bac15cc85d372b4ce2fbe15f | [
"Apache-2.0"
] | 11 | 2021-02-17T21:16:36.000Z | 2022-01-14T22:49:27.000Z | validate_staging_area.py | DataBiosphere/hca-import-validation | f57710ec05e3b343bac15cc85d372b4ce2fbe15f | [
"Apache-2.0"
] | 1 | 2021-06-24T15:10:03.000Z | 2021-06-24T15:10:03.000Z | """
Runs a pre-check of a staging area to identify issues that might cause the
snapshot or indexing processes to fail.
"""
import argparse
import sys
from hca.staging_area_validator import StagingAreaValidator
if __name__ == '__main__':
args = _parse_args(sys.argv[1:])
adapter = StagingAreaValidator(
staging_area=args.staging_area,
ignore_dangling_inputs=args.ignore_dangling_inputs,
validate_json=args.validate_json
)
sys.exit(adapter.main())
| 38.108108 | 84 | 0.592199 |
3d7257323cd6a29d01231ce12bd9760e4b104696 | 6,621 | py | Python | spider_service/app/spider/selenium/webdriver.py | seniortesting/python-spider | 0b70817373e2e22267ddf3b80b9b7eb15931e41e | [
"MIT"
] | null | null | null | spider_service/app/spider/selenium/webdriver.py | seniortesting/python-spider | 0b70817373e2e22267ddf3b80b9b7eb15931e41e | [
"MIT"
] | null | null | null | spider_service/app/spider/selenium/webdriver.py | seniortesting/python-spider | 0b70817373e2e22267ddf3b80b9b7eb15931e41e | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
import random
import time
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from app.api.util.web_request import WebRequest, USER_AGENT_PC, USER_AGENT_MOBILE
if __name__ == '__main__':
adsenseClick()
| 37.196629 | 144 | 0.564718 |
3d7270ed2ccd3fdf53730944e85357d2c3e72251 | 2,879 | py | Python | Extended Programming Challenges Python/Mnozenie Macierzy/test_main.py | szachovy/School-and-Training | 70f07c0d077da7ba1920d28d881fff7ddcbc37d9 | [
"MIT"
] | null | null | null | Extended Programming Challenges Python/Mnozenie Macierzy/test_main.py | szachovy/School-and-Training | 70f07c0d077da7ba1920d28d881fff7ddcbc37d9 | [
"MIT"
] | null | null | null | Extended Programming Challenges Python/Mnozenie Macierzy/test_main.py | szachovy/School-and-Training | 70f07c0d077da7ba1920d28d881fff7ddcbc37d9 | [
"MIT"
] | null | null | null | import unittest
import main
import re
if __name__ == '__main__':
unittest.main()
| 38.905405 | 118 | 0.632511 |
3d73ea7a25229da399450bef857ee8338b98b235 | 1,210 | py | Python | setup.py | m45t3r/livedumper | f6441283269b4a602cafea3be5cda9446fc64005 | [
"BSD-2-Clause"
] | 17 | 2015-02-10T12:18:22.000Z | 2018-03-23T05:28:51.000Z | setup.py | m45t3r/livedumper | f6441283269b4a602cafea3be5cda9446fc64005 | [
"BSD-2-Clause"
] | 3 | 2015-01-12T17:32:20.000Z | 2016-12-13T23:55:38.000Z | setup.py | m45t3r/livedumper | f6441283269b4a602cafea3be5cda9446fc64005 | [
"BSD-2-Clause"
] | 3 | 2015-02-06T09:58:09.000Z | 2016-01-04T23:46:28.000Z | import os
from setuptools import setup
setup(
name="livedumper",
version="0.3.0",
author="Thiago Kenji Okada",
author_email="thiago.mast3r@gmail.com",
description=("Livestreamer stream dumper"),
license="Simplified BSD",
keywords="video streaming downloader dumper",
url='https://github.com/m45t3r/livedumper',
packages=["livedumper"],
package_dir={"": "src"},
scripts=["src/livedumper_cli/livedumper"],
install_requires=("appdirs", "livestreamer", "requests"),
long_description=read("README.rst"),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Environment :: Console",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Multimedia :: Video",
"Topic :: Utilities",
],
)
| 31.842105 | 61 | 0.613223 |
3d75f72f3f1eb09ca962b85e8adb34487fcfe9b8 | 2,862 | py | Python | scripts/show_yolo.py | markpp/object_detectors | 8a6cac32ec2d8b578c0d301feceef19390343e85 | [
"MIT"
] | 2 | 2021-03-10T13:13:46.000Z | 2021-03-11T09:03:33.000Z | scripts/show_yolo.py | markpp/object_detectors | 8a6cac32ec2d8b578c0d301feceef19390343e85 | [
"MIT"
] | null | null | null | scripts/show_yolo.py | markpp/object_detectors | 8a6cac32ec2d8b578c0d301feceef19390343e85 | [
"MIT"
] | null | null | null | import os
import argparse
import numpy as np
import csv
import cv2
img_w = 0
img_h = 0
if __name__ == "__main__":
"""
Command:
python show_yolo.py -g
"""
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-g", "--gt", type=str,
help="Path to gt bb .txt")
args = vars(ap.parse_args())
img_path = args["gt"].replace("txt", "png")
img = cv2.imread(img_path,-1)
if len(img.shape) < 3:
img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
# start a new yolo txt file with name of image
boxes = get_bbs_from_file(args["gt"])
img = map_bbs_to_img(img, boxes)
'''
if img.shape[0] > img.shape[1]:
img, _ = ResizeWithAspectRatio(img, height=1400)
else:
img, _ = ResizeWithAspectRatio(img, width=1400)
'''
'''
print(img.shape)
img_h, img_w = img.shape[1], img.shape[0]
boxes = []
lines = []
with open(args["gt"]) as f:
lines = f.read().splitlines()
for line in lines:
cl, c_x, c_y, w, h = line.split(' ')
boxes.append(relativ2pixel([float(c_x), float(c_y), float(w), float(h)], img_w, img_h))
for box in boxes:
print(box)
cv2.rectangle(img, (box[0],box[1]), (box[0]+box[2],box[1]+box[3]), (0,255,0), 1)
'''
cv2.putText(img, os.path.basename(img_path), (10,40), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 1, cv2.LINE_AA)
cv2.imshow("output",img[-400:,:])
key = cv2.waitKey()
| 28.336634 | 123 | 0.589099 |
3d770d3cc83356d38e93ea226253df080988393a | 8,687 | py | Python | xp/build/scripts/gg_post_process_xcode_project.py | vladcorneci/golden-gate | fab6e11c4df942c6a915328d805d3265f9ccc8e0 | [
"Apache-2.0"
] | 262 | 2020-05-05T21:25:17.000Z | 2022-03-22T09:11:15.000Z | xp/build/scripts/gg_post_process_xcode_project.py | vladcorneci/golden-gate | fab6e11c4df942c6a915328d805d3265f9ccc8e0 | [
"Apache-2.0"
] | 22 | 2020-05-07T21:20:42.000Z | 2022-02-25T02:44:50.000Z | xp/build/scripts/gg_post_process_xcode_project.py | vladcorneci/golden-gate | fab6e11c4df942c6a915328d805d3265f9ccc8e0 | [
"Apache-2.0"
] | 18 | 2020-05-06T07:21:43.000Z | 2022-02-08T09:49:23.000Z | #! /urs/bin/env python
# Copyright 2017-2020 Fitbit, Inc
# SPDX-License-Identifier: Apache-2.0
#####################################################################
# This script post-processes the XCode project generated
# by CMake, so that it no longer contains absolute paths.
# It also remaps UUIDs so that they are stable across invocations
# of this script, which allows the generated project to be put under
# source code control.
#####################################################################
#####################################################################
# Imports
#####################################################################
import sys
import re
import os
import shutil
#####################################################################
# Constants
#####################################################################
XCODE_PROJECT_FILE_NAME = "project.pbxproj"
#####################################################################
#####################################################################
#####################################################################
#####################################################################
# Even after making paths relative, we still have some include paths
# path point to CMake-generated directories.
# They have the form: xp/build/cmake/<platform>
# We replace them by an equivalent, pointing to the `generated` subdir
# of xp/build
#####################################################################
#####################################################################
#####################################################################
#####################################################################
# main
#####################################################################
if __name__ == '__main__':
sys.exit(main())
| 44.778351 | 138 | 0.610568 |
3d780dd389a1180a4ebe2e338ba4584066d6c9fa | 3,091 | py | Python | scripts/US-visa-early-appointment.py | atb00ker/scripts-lab | 71a5cc9c7f301c274798686db4a227e84b65926a | [
"MIT"
] | 2 | 2020-03-16T17:18:20.000Z | 2020-10-19T05:11:19.000Z | scripts/US-visa-early-appointment.py | atb00ker/scripts-lab | 71a5cc9c7f301c274798686db4a227e84b65926a | [
"MIT"
] | null | null | null | scripts/US-visa-early-appointment.py | atb00ker/scripts-lab | 71a5cc9c7f301c274798686db4a227e84b65926a | [
"MIT"
] | null | null | null | #!/bin/python3
# Application for getting early US visa interview:
# The tool will Scrape the CGI website and check
# available date before the current appointment date,
# if a date is available, the program will beep.
# NOTE: SET THESE GLOBAL VARIABLES BEFORE USE
# COOKIE: After you login, there is a `cookie`
# header send in your request, paste
# the value of that variable here.
# CURRENT_APPOINTMENT_DATE: Date you've currently have for embassy.
# CURRENT_VAC_DATE: Date you current have for VAC appointment.
import subprocess
import time
import os
# For users to change
CURRENT_APPOINTMENT_DATE = "March 22, 2019"
CURRENT_VAC_DATE = "March 11, 2019"
COOKIE = ""
# For developer usage only
AGENT = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.96 Safari/537.36"
SED_COMMAND = "'s/First Available Appointment Is \w* //p'"
if __name__ == "__main__":
reqModprobe()
checkAppointmentTime()
| 38.160494 | 413 | 0.591071 |
3d7952d5919e3aadff896edcbf8705b6c7253f29 | 3,883 | py | Python | src/misc_utils.py | wr339988/TencentAlgo19 | 6506bc47dbc301018064e96cd1e7528609b5cb6c | [
"Apache-2.0"
] | null | null | null | src/misc_utils.py | wr339988/TencentAlgo19 | 6506bc47dbc301018064e96cd1e7528609b5cb6c | [
"Apache-2.0"
] | 4 | 2021-04-08T16:38:32.000Z | 2021-04-12T08:36:59.000Z | src/misc_utils.py | wr339988/TencentAlgo19 | 6506bc47dbc301018064e96cd1e7528609b5cb6c | [
"Apache-2.0"
] | 1 | 2021-04-02T11:09:05.000Z | 2021-04-02T11:09:05.000Z | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generally useful utility functions."""
from __future__ import print_function
import codecs
import collections
import json
import math
import os
import sys
import time
import numpy as np
import tensorflow as tf
import pandas as pd
def print_time(s, start_time):
"""Take a start time, print elapsed duration, and return a new time."""
print("%s, time %ds, %s." % (s, (time.time() - start_time), time.ctime()))
sys.stdout.flush()
return time.time()
def print_out(s, f=None, new_line=True):
"""Similar to print but with support to flush and output to a file."""
if isinstance(s, bytes):
s = s.decode("utf-8")
if f:
f.write(s.encode("utf-8"))
if new_line:
f.write(b"\n")
# stdout
out_s = s.encode("utf-8")
if not isinstance(out_s, str):
out_s = out_s.decode("utf-8")
print(out_s, end="", file=sys.stdout)
if new_line:
sys.stdout.write("\n")
sys.stdout.flush()
def print_hparams(hparams, skip_patterns=None, header=None):
"""Print hparams, can skip keys based on pattern."""
if header: print_out("%s" % header)
values = hparams.values()
for key in sorted(values.keys()):
if not skip_patterns or all(
[skip_pattern not in key for skip_pattern in skip_patterns]):
print_out(" %s=%s" % (key, str(values[key])))
def normalize(inputs, epsilon=1e-8):
'''
Applies layer normalization
Args:
inputs: A tensor with 2 or more dimensions
epsilon: A floating number to prevent Zero Division
Returns:
A tensor with the same shape and data dtype
'''
inputs_shape = inputs.get_shape()
params_shape = inputs_shape[-1:]
mean, variance = tf.nn.moments(inputs, [-1], keep_dims=True)
beta = tf.Variable(tf.zeros(params_shape))
gamma = tf.Variable(tf.ones(params_shape))
normalized = (inputs - mean) / ((variance + epsilon) ** (.5))
outputs = gamma * normalized + beta
return outputs | 33.188034 | 112 | 0.64409 |
3d7a603d1af477e68cfea29362bbe8cb1160699c | 10,713 | py | Python | custom/icds_reports/ucr/tests/test_infra_form_ucr.py | rochakchauhan/commcare-hq | aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236 | [
"BSD-3-Clause"
] | 1 | 2020-07-14T13:00:23.000Z | 2020-07-14T13:00:23.000Z | custom/icds_reports/ucr/tests/test_infra_form_ucr.py | rochakchauhan/commcare-hq | aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236 | [
"BSD-3-Clause"
] | 1 | 2021-06-02T04:45:16.000Z | 2021-06-02T04:45:16.000Z | custom/icds_reports/ucr/tests/test_infra_form_ucr.py | rochakchauhan/commcare-hq | aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236 | [
"BSD-3-Clause"
] | null | null | null | from mock import patch
from custom.icds_reports.ucr.tests.test_base_form_ucr import BaseFormsTest
| 39.677778 | 74 | 0.493419 |
3d7ab6cf1374f5cd2e87a03c6e24173bb82d35b7 | 2,898 | py | Python | uq_benchmark_2019/imagenet/end_to_end_test.py | deepneuralmachine/google-research | d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231 | [
"Apache-2.0"
] | 23,901 | 2018-10-04T19:48:53.000Z | 2022-03-31T21:27:42.000Z | uq_benchmark_2019/imagenet/end_to_end_test.py | deepneuralmachine/google-research | d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231 | [
"Apache-2.0"
] | 891 | 2018-11-10T06:16:13.000Z | 2022-03-31T10:42:34.000Z | uq_benchmark_2019/imagenet/end_to_end_test.py | deepneuralmachine/google-research | d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231 | [
"Apache-2.0"
] | 6,047 | 2018-10-12T06:31:02.000Z | 2022-03-31T13:59:28.000Z | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""End-to-end test for ImageNet.
Tests for imagenet.resnet50_train, run_predict, run_temp_scaling, and
run_metrics. Real data doesn't work under blaze, so execute the test binary
directly.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
from absl import flags
from absl.testing import absltest
from absl.testing import parameterized
import tensorflow.compat.v2 as tf
from uq_benchmark_2019.imagenet import resnet50_train # pylint: disable=line-too-long
from uq_benchmark_2019.imagenet import run_metrics
from uq_benchmark_2019.imagenet import run_predict
from uq_benchmark_2019.imagenet import run_temp_scaling
gfile = tf.io.gfile
flags.DEFINE_bool('fake_data', True, 'Use dummy random data.')
flags.DEFINE_bool('fake_training', True, 'Train with trivial number of steps.')
DATA_NAMES = ['train', 'test', 'corrupt-static-gaussian_noise-2', 'celeb_a']
METHODS = ['vanilla', 'll_dropout', 'll_svi', 'dropout']
if __name__ == '__main__':
absltest.main()
| 35.341463 | 118 | 0.733954 |
3d7b2d7375396a8c241a8c99281ec5431deb5055 | 1,257 | py | Python | tests/windows/get_physicaldisk/test_getting_unique_ids_from_output.py | Abd-Elrazek/InQRy | ab9d19a737a41673e8dcc419d49ca0e96476d560 | [
"MIT"
] | 37 | 2017-05-12T02:32:26.000Z | 2019-05-03T14:43:08.000Z | tests/windows/get_physicaldisk/test_getting_unique_ids_from_output.py | Abd-Elrazek/InQRy | ab9d19a737a41673e8dcc419d49ca0e96476d560 | [
"MIT"
] | 11 | 2017-08-27T03:36:18.000Z | 2018-10-28T01:31:12.000Z | tests/windows/get_physicaldisk/test_getting_unique_ids_from_output.py | Abd-Elrazek/InQRy | ab9d19a737a41673e8dcc419d49ca0e96476d560 | [
"MIT"
] | 15 | 2019-06-13T11:29:12.000Z | 2022-02-28T06:40:14.000Z | from inqry.system_specs import win_physical_disk
UNIQUE_ID_OUTPUT = """
UniqueId
--------
{256a2559-ce63-5434-1bee-3ff629daa3a7}
{4069d186-f178-856e-cff3-ba250c28446d}
{4da19f06-2e28-2722-a0fb-33c02696abcd}
50014EE20D887D66
eui.0025384161B6798A
5000C5007A75E216
500A07510F1A545C
ATA LITEONIT LMT-256M6M mSATA 256GB TW0XXM305508532M0705
IDE\Diskpacker-virtualbox-iso-1421140659-disk1__F.R7BNPC\5&1944dbef&0&0.0.0:vagrant-2012-r2
"""
| 43.344828 | 125 | 0.638823 |
3d7ca16d1d0cb0fd5ce512de12142e0f598017a2 | 572 | py | Python | app/models/link.py | aries-zhang/flask-template | 369d77f2910f653f46668dd9bda735954b6c145e | [
"MIT"
] | null | null | null | app/models/link.py | aries-zhang/flask-template | 369d77f2910f653f46668dd9bda735954b6c145e | [
"MIT"
] | null | null | null | app/models/link.py | aries-zhang/flask-template | 369d77f2910f653f46668dd9bda735954b6c145e | [
"MIT"
] | null | null | null | import time # NOQA
from app import db
| 27.238095 | 63 | 0.63986 |
3d7e43dc6fabcfe8138a99da18574265d9a525c8 | 1,786 | py | Python | pyopenproject/business/services/command/priority/find_all.py | webu/pyopenproject | 40b2cb9fe0fa3f89bc0fe2a3be323422d9ecf966 | [
"MIT"
] | 5 | 2021-02-25T15:54:28.000Z | 2021-04-22T15:43:36.000Z | pyopenproject/business/services/command/priority/find_all.py | webu/pyopenproject | 40b2cb9fe0fa3f89bc0fe2a3be323422d9ecf966 | [
"MIT"
] | 7 | 2021-03-15T16:26:23.000Z | 2022-03-16T13:45:18.000Z | pyopenproject/business/services/command/priority/find_all.py | webu/pyopenproject | 40b2cb9fe0fa3f89bc0fe2a3be323422d9ecf966 | [
"MIT"
] | 6 | 2021-06-18T18:59:11.000Z | 2022-03-27T04:58:52.000Z | from pyopenproject.api_connection.exceptions.request_exception import RequestError
from pyopenproject.api_connection.requests.get_request import GetRequest
from pyopenproject.business.exception.business_error import BusinessError
from pyopenproject.business.services.command.find_list_command import FindListCommand
from pyopenproject.business.services.command.priority.priority_command import PriorityCommand
from pyopenproject.business.util.filters import Filters
from pyopenproject.business.util.url import URL
from pyopenproject.business.util.url_parameter import URLParameter
from pyopenproject.model.priority import Priority
| 49.611111 | 93 | 0.594625 |
3d7f09d4c114419bab9ec9c8e10674cc7fff831b | 1,745 | py | Python | photos/tests/test_views.py | AndreasMilants/django-photos | 721c2515879a424333859ac48f65d6382b7a48d4 | [
"BSD-3-Clause"
] | null | null | null | photos/tests/test_views.py | AndreasMilants/django-photos | 721c2515879a424333859ac48f65d6382b7a48d4 | [
"BSD-3-Clause"
] | null | null | null | photos/tests/test_views.py | AndreasMilants/django-photos | 721c2515879a424333859ac48f65d6382b7a48d4 | [
"BSD-3-Clause"
] | null | null | null | from django.test import TestCase
from django.urls import reverse_lazy
from ..models import PHOTO_MODEL, UploadedPhotoModel, IMAGE_SIZES
from .model_factories import get_image_file, get_zip_file
import time
from uuid import uuid4
| 39.659091 | 109 | 0.719198 |
3d81056b0a805d88fa50d75883361df24c0f7eae | 16,756 | py | Python | app.py | cherishsince/PUBG_USB | f9b06d213a0fe294afe4cf2cf6dccce4bb363062 | [
"MulanPSL-1.0"
] | 46 | 2020-07-04T13:33:40.000Z | 2022-03-29T13:42:29.000Z | app.py | kiminh/PUBG_USB | f3a1fa1aedce751fc48aeefd60699a1f02a29a70 | [
"MulanPSL-1.0"
] | 1 | 2020-09-01T01:58:29.000Z | 2020-09-06T11:45:46.000Z | app.py | kiminh/PUBG_USB | f3a1fa1aedce751fc48aeefd60699a1f02a29a70 | [
"MulanPSL-1.0"
] | 21 | 2020-07-08T07:53:56.000Z | 2022-02-02T23:43:56.000Z | import os
import time
from PIL import Image
import pyscreenshot as ImageGrab
import resource
from drive import box_drive64
from util import image_util, data_config_parser
from util.data_parser import read_data
from weapon import weapon, page_check, weapon_selection, left_right_correction
import environment
import threadpool
import threading
import pythoncom
import PyHook3
import logging
from util import common
#
_executor = environment.env.executor
#
_identifying_parts = []
# init
_lib, _handle, _init_data, _init_weapon_name_data = -1, -1, [], {}
#
_config_data = {}
# -
_current_config_data = None
_current_parts = None
#
_has_shoot = False
_shoot_task = None
# tab tab
_has_tab_open = False
#
_has_open_selection = False
#
_has_selection = False
#
_has_identification = False
#
_weapon_select = 1
# count
_shoot_count = 0
#
_shoot_correction = 0
#
_capture_image = None
def onMouseEvent(event):
"""
:param event:
:return:
"""
global _has_shoot, _executor, _shoot_task, _has_identification
# 522
#
if event.Message == 513:
logging.debug(" 513 -> {}".format(event.MessageName))
_has_shoot = True
#
# if _has_identification:
# _shoot_task = _executor.submit(handle_shoot_correction)
# _shoot_task = _executor.submit(handle_control_shoot)
#
elif event.Message == 514:
logging.debug(" 514 -> {}".format(event.MessageName))
_has_shoot = False
if _shoot_task is not None:
print('....')
#
elif event.Message == 516:
logging.debug(" 516 -> {}".format(event.MessageName))
#
elif event.Message == 517:
logging.debug(" 517 -> {}".format(event.MessageName))
else:
pass
return True
def onKeyboardEvent(event):
"""
:param event:
:return:
"""
global _has_tab_open, _executor
keyid = event.KeyID
# 1 492 503 51
if keyid == 9:
# tab
logging.debug('tab ')
#
if not _has_tab_open:
_has_tab_open = True
_executor.submit(handle_tab)
if keyid == 49 or keyid == 50 or keyid == 51:
print('123')
#
# if not _has_open_selection:
# _executor.submit(handle_weapon_select)
else:
pass
return True
def handle_capture_image():
"""
0.03366827964782715
0.03325605392456055
0.03352046012878418
0.033231496810913086
0.033119916915893555
0.034018754959106445
:return:
"""
global _capture_image
while 1:
_capture_image = image_util.capture(None)
time.sleep(0.2)
"""
/////////////
/////////////
"""
def handle_tab():
"""
tab
:return:
"""
global _identifying_parts, _lib, _handle, _init_data, _init_weapon_name_data, \
_has_tab_open, _config_data, _has_identification, _executor, _capture_image
# time.sleep(0.5)
# image = image_util.capture()
try:
#
# image = image_util.capture(None)
package_positions = page_check.package_positions()
package_position_images = page_check.package_positions_images(_capture_image, package_positions)
has_package_page = page_check.has_package_page(package_position_images)
#
# image_util.drawing_line(image, package_positions)
# image.show()
# package_position_images[0].show()
# return
print(' {}'.format(has_package_page))
if not has_package_page:
return
#
main_positions = weapon.main_weapon_parts_positions()
main_parts_images = weapon.get_weapon_parts(_capture_image, main_positions)
#
now = time.time()
identifying_parts = weapon.identifying_parts(_init_data, _init_weapon_name_data, main_parts_images)
print(identifying_parts)
if len(identifying_parts) <= 0:
print(' !')
return
_identifying_parts = identifying_parts
print(" {}".format(time.time() - now))
#
_has_identification = True
#
# _executor.submit(handle_weapon_select)
except Exception as e:
print(e)
finally:
#
_has_tab_open = False
def capture_selection():
"""
:return:
"""
#
if environment.is_debug():
# path = resource.resource_path(os.path.join('img', 'screenshot', '20190413085144_2.jpg'))
# image = Image.open(path)
image = ImageGrab.grab()
else:
image = ImageGrab.grab()
return image
def handle_weapon_select():
"""
:return:
"""
global _identifying_parts, _lib, _handle, _init_data, _init_weapon_name_data, \
_has_tab_open, _config_data, _current_config_data, _current_parts, \
_has_open_selection, _has_selection, _capture_image
weapon_positions = weapon_selection.weapon_positions()
while True:
try:
#
# image = capture_selection()
weapon_images = weapon_selection.weapon_selection_images(_capture_image, weapon_positions)
weapon_index = weapon_selection.get_selection(weapon_images)
# logging.info('! {}'.format(weapon_index))
if weapon_index is None:
# 0.1
_has_selection = False
time.sleep(0.6)
logging.debug('!')
# print('')
continue
logging.info('! {}'.format(weapon_index))
# -
index = 0
for parts_info in _identifying_parts:
index = index + 1
if weapon_index != index:
continue
if parts_info['name'] is None:
continue
weapon_config_data = _config_data[parts_info['name']]
if weapon_config_data is None:
logging.info(' {}', parts_info)
#
_current_parts = parts_info
_current_config_data = weapon_config_data
_has_open_selection = False
_has_selection = True
break
# 0.1
time.sleep(0.6)
except Exception as e:
print(e)
def handle_shoot_correction():
"""
:return:
"""
global _lib, _handle, _init_data, _init_weapon_name_data, _has_identification, \
_has_shoot, _current_config_data, _current_parts, _shoot_count, _shoot_correction
correction_positions = left_right_correction.get_positions()
# 0
_shoot_correction = 0
#
corr_first_diff = None
while True:
#
if not _has_identification:
time.sleep(0.1)
continue
# continue
if not _has_shoot:
time.sleep(0.1)
# 0
_shoot_correction = 0
#
corr_first_diff = None
continue
now1 = time.time()
overtime = None
#
has_left_right_correction = _current_config_data.left_right_correction
#
speed = _current_config_data.speed
if has_left_right_correction == 1:
overtime = now1 + speed - 0.01
if overtime is None:
logging.debug('error ')
now = time.time()
#
# image = image_util.capture(None)
image = _capture_image
if corr_first_diff is None:
position_images = left_right_correction.get_position_images(image, correction_positions)
corr_first_1, corr_first_2 = left_right_correction.correction(position_images)
corr_first_diff = corr_first_1 + corr_first_2
else:
#
position_images = left_right_correction.get_position_images(image, correction_positions)
corr_first_1, corr_first_2 = left_right_correction.correction(position_images)
corr_diff = corr_first_1 + corr_first_2
x_diff = corr_first_diff - corr_diff
#
if x_diff < 0:
_shoot_correction = abs(x_diff)
elif x_diff > 0:
_shoot_correction = -abs(x_diff)
# sleep
now2 = time.time()
while True:
time.sleep(0.005)
if overtime <= time.time():
break
logging.info(' {} {} {}'.format(now2 - now, time.time() - now, _shoot_correction))
def handle_control_shoot():
"""
:return:
"""
global _lib, _handle, _init_data, _init_weapon_name_data, _has_identification, \
_has_shoot, _current_config_data, _current_parts, _shoot_count, \
_shoot_correction, _has_selection, _capture_image
try:
while True:
#
if not _has_identification:
time.sleep(0.1)
continue
# continue
if not _has_shoot:
time.sleep(0.1)
_shoot_count = 0
continue
#
if _current_config_data is None:
time.sleep(0.1)
print('_current_config_data')
continue
if not _has_selection:
time.sleep(0.1)
print('_has_selection')
continue
y = 0
x = 0
# _shoot_count
now = time.time()
#
overtime1 = None
overtime2 = None
#
shoot_images = page_check.shoot_images(_capture_image, page_check.shoot_positions())
has_shoot = page_check.check_shoot(shoot_images)
if not has_shoot:
time.sleep(0.1)
continue
#
has_left_right_correction = _current_config_data.left_right_correction
#
speed = _current_config_data.speed
if has_left_right_correction == 1:
overtime1 = now + speed - 0.02
overtime2 = now + speed
#
stance_images = page_check.stance_images(_capture_image, page_check.stance_positions())
stance = page_check.check_stance(stance_images)
if stance is None:
stance = 'stand'
shoot_type = stance
# 1
parts5_value = _current_parts['parts5']
if parts5_value is None:
parts5_value = 1
sight = _current_config_data.sight
shoot_type_data = sight[shoot_type]
has_parts5_value = common.arr_contain(shoot_type_data.keys(), str(parts5_value))
if has_parts5_value:
shoot_type_data2 = shoot_type_data[str(parts5_value)]
y = y + mouse_calc_config_data(_shoot_count, shoot_type_data2)
#
parts1_values = _current_parts['parts1']
if parts1_values is not None:
muzzle = _current_config_data.muzzle
muzzle_type_data = muzzle[shoot_type]
has_muzzle_type_data = common.arr_contain(muzzle_type_data.keys(), str(parts1_values))
if has_muzzle_type_data:
muzzle_type_data2 = muzzle_type_data[parts1_values]
y = y + mouse_calc_config_data(_shoot_count, muzzle_type_data2)
#
parts2_values = _current_parts['parts2']
if parts2_values is not None:
grip = _current_config_data.grip
grip_type_data = grip[shoot_type]
has_grip_type_data = common.arr_contain(grip_type_data.keys(), str(parts2_values))
if has_grip_type_data:
grip_type_data2 = grip_type_data[parts2_values]
y = y + mouse_calc_config_data(_shoot_count, grip_type_data2)
#
parts4_values = _current_parts['parts4']
if parts4_values is not None:
butt = _current_config_data.butt
butt_type_data = butt[shoot_type]
has_butt_type_data = common.arr_contain(butt_type_data.keys(), str(parts2_values))
if has_butt_type_data:
butt_type_data2 = butt_type_data[parts2_values]
y = y + mouse_calc_config_data(_shoot_count, butt_type_data2)
# sleep
while 1:
# now9 = time.time()
time.sleep(0.001)
# print('{}'.format(time.time() - now9))
if overtime1 <= time.time():
break
#
x = _shoot_correction
box_drive64.mouse_move_r(_lib, _handle, x, y)
_shoot_count = _shoot_count + 1
# sleep
while 1:
# now9 = time.time()
time.sleep(0.001)
# print('{}'.format(time.time() - now9))
if overtime2 <= time.time():
break
logging.info(" {} x {} y {} {} :{}"
.format(_shoot_count - 1, x, y, shoot_type, time.time() - now))
except Exception as e:
print(e)
finally:
print('finally')
def mouse_calc_config_data(count, data_arr):
"""
data_config data
:return:
"""
for i in range(len(data_arr)):
data = data_arr[len(data_arr) - 1 - i]
max_count = data[0]
move_speed = data[1]
if count >= max_count:
# print("move_speed {}", move_speed)
return move_speed
return 0
"""
/////////////
/////////////
"""
if __name__ == '__main__':
try:
# path = resource.resource_path(os.path.join('img', 'screenshot', '20190413085144_2.jpg'))
# print('path {}'.format(path))
# Image.open(path).show()
#
if environment.is_debug():
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.INFO)
#
lib, handle, init_data, init_weapon_name_data = init()
#
# _executor.submit(handle_shoot_correction)
# logging.info('!')
_executor.submit(handle_control_shoot)
logging.info('-!')
logging.info('-0!')
logging.info('-!')
#
_executor.submit(handle_weapon_select)
logging.info('-!')
#
_executor.submit(handle_capture_image)
logging.info('-!')
#
hm = PyHook3.HookManager()
hm.KeyDown = onKeyboardEvent
hm.HookKeyboard()
hm.MouseAll = onMouseEvent
hm.HookMouse()
pythoncom.PumpMessages()
except Exception as e:
print(e)
finally:
os.system('pause') | 28.691781 | 107 | 0.59334 |
3d81143199d30bf1afb752289d20dfe6d3a3f506 | 16,009 | py | Python | src/dataset-dl.py | Mokuichi147/dataset-dl | e669243ccd2d64aa5ccbdd17b430e3d130bb13cd | [
"Apache-2.0",
"MIT"
] | null | null | null | src/dataset-dl.py | Mokuichi147/dataset-dl | e669243ccd2d64aa5ccbdd17b430e3d130bb13cd | [
"Apache-2.0",
"MIT"
] | 2 | 2022-01-01T16:56:58.000Z | 2022-02-27T14:32:32.000Z | src/dataset-dl.py | Mokuichi147/dataset-dl | e669243ccd2d64aa5ccbdd17b430e3d130bb13cd | [
"Apache-2.0",
"MIT"
] | null | null | null | from concurrent.futures import ALL_COMPLETED, ThreadPoolExecutor, as_completed
import csv
import dearpygui.dearpygui as dpg
from os.path import isfile, isdir, join
import pyperclip
import subprocess
import sys
from tempfile import gettempdir
from traceback import print_exc
import core
import extruct
import utilio
from pytube import YouTube, Playlist
import ffmpeg
if sys.platform == 'darwin':
from tkinter import Tk
from tkinter.filedialog import askdirectory, askopenfilename
save_dir_dialog_mac = False
load_csv_dialog_mac = False
tkinter_root = Tk()
tkinter_root.withdraw()
dpg.create_context()
APPNAME = 'dataset-dl'
TEMPDIR = join(gettempdir(), APPNAME)
MAXWOREKR = 20
TAGS = []
if sys.platform == 'darwin':
else:
with dpg.font_registry():
with dpg.font(extruct.get_fullpath(join('resources', 'fonts', 'NotoSansJP-Regular.otf')), 22) as default_font:
dpg.add_font_range_hint(dpg.mvFontRangeHint_Default)
dpg.add_font_range_hint(dpg.mvFontRangeHint_Japanese)
with open(extruct.get_fullpath(join('resources', 'fonts', 'OFL.txt')), 'r', encoding='utf-8') as f:
font_license = f.read()
with dpg.window(tag='Primary Window'):
dpg.bind_font(default_font)
with dpg.menu_bar():
with dpg.menu(label='License'):
dpg.add_text('NotoSansJP-Regular')
dpg.add_input_text(default_value=font_license, multiline=True, readonly=True)
dpg.add_text('Save Directory')
with dpg.group(horizontal=True):
dpg.add_checkbox(default_value=False, enabled=False, tag='save_dir_check')
dpg.add_input_text(callback=check_save_dir, tag='save_dir_path')
dpg.add_button(label='Select', tag='save_dir_button', callback=save_dir_dialog)
TAGS.append('save_dir_path')
TAGS.append('save_dir_button')
dpg.add_spacer(height=10)
dpg.add_text('Quality')
dpg.add_radio_button(
[quality_mode.text for quality_mode in core.QualityMode],
tag = 'quality_radio',
default_value = core.QualityMode.HIGH.text,
horizontal = True
)
TAGS.append('quality_radio')
dpg.add_spacer(height=10)
dpg.add_text('Mode')
with dpg.tab_bar():
with dpg.tab(label='Video OR Playlist URL', tag='url_tab'):
with dpg.group(horizontal=True):
dpg.add_checkbox(default_value=False, enabled=False, tag='url_check')
dpg.add_input_text(callback=check_url, tag='url')
dpg.add_button(label='Paste', tag='url_paste_button', callback=paste_url)
dpg.add_button(label='Run', tag='url_run_button', callback=run_url)
TAGS.append('url')
TAGS.append('url_paste_button')
TAGS.append('url_run_button')
with dpg.tab(label='CSV File', tag='csv_tab'):
with dpg.group(horizontal=True):
dpg.add_checkbox(default_value=False, enabled=False, tag='csv_path_check')
dpg.add_input_text(callback=check_csv_path, tag='csv_path')
dpg.add_button(label='Select', tag='csv_path_button', callback=load_csv_dialog)
dpg.add_button(label='Run', tag='csv_run_button', callback=run_csv)
TAGS.append('csv_path')
TAGS.append('csv_path_button')
TAGS.append('csv_run_button')
utilio.create_workdir(TEMPDIR)
icon = extruct.get_fullpath(join('resources', 'dataset-dl.ico')) if sys.platform == 'win32' else ''
dpg.create_viewport(title=APPNAME, width=1000, height=500, large_icon=icon)
dpg.setup_dearpygui()
dpg.show_viewport()
dpg.set_primary_window('Primary Window', True)
if not sys.platform == 'darwin':
dpg.start_dearpygui()
else:
while dpg.is_dearpygui_running():
dpg.render_dearpygui_frame()
if save_dir_dialog_mac:
save_dir = askdirectory()
if save_dir != '':
dpg.set_value('save_dir_path', save_dir)
check_save_dir()
save_dir_dialog_mac = False
elif load_csv_dialog_mac:
load_csv = askopenfilename(filetypes=[('', '.csv')])
if load_csv != '':
dpg.set_value('csv_path', load_csv)
check_csv_path()
load_csv_dialog_mac = False
tkinter_root.destroy()
dpg.destroy_context()
utilio.delete_workdir(TEMPDIR) | 37.757075 | 151 | 0.639578 |
3d82652d7d5f527c23d139f61d27dabd1f54a20e | 3,813 | py | Python | src/robot/parsing/parser/parser.py | bhirsz/robotframework | d62ee5091ed932aee8fc12ae5e340a5b19288f05 | [
"ECL-2.0",
"Apache-2.0"
] | 7,073 | 2015-01-01T17:19:16.000Z | 2022-03-31T22:01:29.000Z | src/robot/parsing/parser/parser.py | bhirsz/robotframework | d62ee5091ed932aee8fc12ae5e340a5b19288f05 | [
"ECL-2.0",
"Apache-2.0"
] | 2,412 | 2015-01-02T09:29:05.000Z | 2022-03-31T13:10:46.000Z | src/robot/parsing/parser/parser.py | bhirsz/robotframework | d62ee5091ed932aee8fc12ae5e340a5b19288f05 | [
"ECL-2.0",
"Apache-2.0"
] | 2,298 | 2015-01-03T02:47:15.000Z | 2022-03-31T02:00:16.000Z | # Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..lexer import Token, get_tokens, get_resource_tokens, get_init_tokens
from ..model import Statement
from .fileparser import FileParser
def get_model(source, data_only=False, curdir=None):
"""Parses the given source to a model represented as an AST.
How to use the model is explained more thoroughly in the general
documentation of the :mod:`robot.parsing` module.
:param source: The source where to read the data. Can be a path to
a source file as a string or as ``pathlib.Path`` object, an already
opened file object, or Unicode text containing the date directly.
Source files must be UTF-8 encoded.
:param data_only: When ``False`` (default), returns all tokens. When set
to ``True``, omits separators, comments, continuation markers, and
other non-data tokens. Model like this cannot be saved back to
file system.
:param curdir: Directory where the source file exists. This path is used
to set the value of the built-in ``${CURDIR}`` variable during parsing.
When not given, the variable is left as-is. Should only be given
only if the model will be executed afterwards. If the model is saved
back to disk, resolving ``${CURDIR}`` is typically not a good idea.
Use :func:`get_resource_model` or :func:`get_init_model` when parsing
resource or suite initialization files, respectively.
"""
return _get_model(get_tokens, source, data_only, curdir)
def get_resource_model(source, data_only=False, curdir=None):
"""Parses the given source to a resource file model.
Otherwise same as :func:`get_model` but the source is considered to be
a resource file. This affects, for example, what settings are valid.
"""
return _get_model(get_resource_tokens, source, data_only, curdir)
def get_init_model(source, data_only=False, curdir=None):
"""Parses the given source to a init file model.
Otherwise same as :func:`get_model` but the source is considered to be
a suite initialization file. This affects, for example, what settings are
valid.
"""
return _get_model(get_init_tokens, source, data_only, curdir)
| 38.515152 | 79 | 0.702072 |
3d83dae1b7cb47bf096db3ece76a46efed3fa5a8 | 1,835 | py | Python | astronomy_datamodels/tags/fixed_location.py | spacetelescope/astronomy_datamodels | ca5db82d5982781ea763cef9851d4c982fd86328 | [
"BSD-3-Clause"
] | 1 | 2019-03-08T03:06:43.000Z | 2019-03-08T03:06:43.000Z | astronomy_datamodels/tags/fixed_location.py | spacetelescope/astronomy_datamodels | ca5db82d5982781ea763cef9851d4c982fd86328 | [
"BSD-3-Clause"
] | 1 | 2020-10-29T19:54:28.000Z | 2020-10-29T19:54:28.000Z | astronomy_datamodels/tags/fixed_location.py | spacetelescope/astronomy_datamodels | ca5db82d5982781ea763cef9851d4c982fd86328 | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from asdf import yamlutil
from asdf.versioning import AsdfSpec
from ..types import AstronomyDataModelType
from ..fixed_location import FixedLocation
| 40.777778 | 96 | 0.683924 |
3d85f7e617337855186eb9a6630f328826ed38ef | 868 | py | Python | app/migrations/0003_contacts.py | Joshua-Barawa/Django-IP4 | 5665efe73cf8d2244b7bb35ed627e4e237902156 | [
"Unlicense"
] | null | null | null | app/migrations/0003_contacts.py | Joshua-Barawa/Django-IP4 | 5665efe73cf8d2244b7bb35ed627e4e237902156 | [
"Unlicense"
] | null | null | null | app/migrations/0003_contacts.py | Joshua-Barawa/Django-IP4 | 5665efe73cf8d2244b7bb35ed627e4e237902156 | [
"Unlicense"
] | null | null | null | # Generated by Django 4.0.3 on 2022-03-21 13:04
from django.db import migrations, models
import django.db.models.deletion
| 34.72 | 117 | 0.623272 |
3d8734a866fbee3cba78ae6db665c5cbc41ba2ea | 440 | py | Python | assessment/seeders/base_seeder.py | kenware/Assessment | 69f5e3fbf18dfa2c59eaf3b083ebdba7ca66c9b7 | [
"MIT"
] | null | null | null | assessment/seeders/base_seeder.py | kenware/Assessment | 69f5e3fbf18dfa2c59eaf3b083ebdba7ca66c9b7 | [
"MIT"
] | 3 | 2020-02-11T23:31:01.000Z | 2021-06-10T21:04:34.000Z | assessment/seeders/base_seeder.py | kenware/Assessment | 69f5e3fbf18dfa2c59eaf3b083ebdba7ca66c9b7 | [
"MIT"
] | null | null | null |
from .seed_assessment_type import seed_assessment
from .seed_question import seed_question
from .seed_answer import seed_answer
from .seed_user import seed_user
from .seed_score import seed_score
from .seed_assessment_name import seed_assessment_name
| 24.444444 | 54 | 0.752273 |
3d89564a5d0fa853d134b34b86a84b5003e24ceb | 328 | py | Python | contek_tusk/metric_data.py | contek-io/contek-tusk | 74dc73388367adb958848819b29fe24316c4f6f4 | [
"MIT"
] | null | null | null | contek_tusk/metric_data.py | contek-io/contek-tusk | 74dc73388367adb958848819b29fe24316c4f6f4 | [
"MIT"
] | null | null | null | contek_tusk/metric_data.py | contek-io/contek-tusk | 74dc73388367adb958848819b29fe24316c4f6f4 | [
"MIT"
] | null | null | null | from pandas import DataFrame
from contek_tusk.table import Table
| 19.294118 | 60 | 0.655488 |
3d8aee839cc7a45416c287f7da1460240d9b1dd8 | 28 | py | Python | inlinec/__init__.py | ssize-t/inlinec | 20eca6bf8556a77906ba5f420f09006d6daf4355 | [
"Apache-2.0"
] | 22 | 2020-10-10T18:25:04.000Z | 2021-11-09T18:56:34.000Z | inlinec/__init__.py | ssize-t/inlinec | 20eca6bf8556a77906ba5f420f09006d6daf4355 | [
"Apache-2.0"
] | 1 | 2020-11-10T03:50:05.000Z | 2020-11-10T03:50:05.000Z | inlinec/__init__.py | ssize-t/inlinec | 20eca6bf8556a77906ba5f420f09006d6daf4355 | [
"Apache-2.0"
] | 2 | 2020-10-10T16:09:42.000Z | 2021-03-10T16:43:11.000Z | from .inlinec import inlinec | 28 | 28 | 0.857143 |
3d90245ccc4e47d064d2a5aa4296f527b42e0ce2 | 3,360 | py | Python | mcastropi.py | martinohanlon/MinecraftInteractiveAstroPi | 0e9f30b25cad83b52553b257103b0e89a09ecc38 | [
"BSD-3-Clause"
] | null | null | null | mcastropi.py | martinohanlon/MinecraftInteractiveAstroPi | 0e9f30b25cad83b52553b257103b0e89a09ecc38 | [
"BSD-3-Clause"
] | null | null | null | mcastropi.py | martinohanlon/MinecraftInteractiveAstroPi | 0e9f30b25cad83b52553b257103b0e89a09ecc38 | [
"BSD-3-Clause"
] | null | null | null | """
SpaceCRAFT - Astro Pi competition[http://astro-pi.org/] entry
Conceived by Hannah Belshaw
Created by Martin O'Hanlon[http://www.stuffaboutcode.com]
For the Raspberry Pi Foundation[https://www.raspberrypi.org]
mcastropi.py
A movable minecraft model of a Raspberry Pi with an Astro Pi on top
"""
from minecraftstuff import MinecraftShape
from minecraftstuff import ShapeBlock
from mcpi.minecraft import Minecraft
from mcpi.minecraft import Vec3
from mcpi import block
from time import sleep
#test
if __name__ == "__main__":
mc = Minecraft.create()
pos = Vec3(0, 20, 0)
mcastropi = MCAstroPi(mc, pos)
try:
sleep(5)
finally:
mcastropi.clear()
| 37.752809 | 74 | 0.535714 |
3d9080c01f26c55604e47fcbe8181d860f113c89 | 1,444 | py | Python | utils/pack_images.py | 1mplex/segmentation_image_augmentation | bd93c1589078247c0c7aff8556afc16a7e15be39 | [
"MIT"
] | 15 | 2020-07-21T08:57:38.000Z | 2022-01-24T21:59:10.000Z | utils/pack_images.py | el-lilya/segmentation_image_augmentation | c16604274a220e00a6fbc4d653ab9c90276a8eba | [
"MIT"
] | 1 | 2021-02-15T21:24:11.000Z | 2021-02-15T21:24:11.000Z | utils/pack_images.py | el-lilya/segmentation_image_augmentation | c16604274a220e00a6fbc4d653ab9c90276a8eba | [
"MIT"
] | 9 | 2021-07-01T02:42:22.000Z | 2022-01-24T21:59:12.000Z | import copy
import math
import numpy as np
# import rpack
from rectpack import newPacker
from rectpack.maxrects import MaxRectsBssf
# def get_pack_coords(sizes):
# # list of [height, width] i.e. img.shape order
# sizes = _change_dim_order(sizes)
# positions = rpack.pack(sizes)
# return _change_dim_order(positions)
| 22.5625 | 66 | 0.629501 |
3d90bec081e48c3692736a49abca5a861a8e0892 | 626 | py | Python | scripts/modules/task_plan_types/date.py | vkostyanetsky/Organizer | b1f0a05c0b6c6e6ea7a78a6bd7a3c70f85b33eba | [
"MIT"
] | null | null | null | scripts/modules/task_plan_types/date.py | vkostyanetsky/Organizer | b1f0a05c0b6c6e6ea7a78a6bd7a3c70f85b33eba | [
"MIT"
] | null | null | null | scripts/modules/task_plan_types/date.py | vkostyanetsky/Organizer | b1f0a05c0b6c6e6ea7a78a6bd7a3c70f85b33eba | [
"MIT"
] | null | null | null | # DD.MM.YYYY (DD , MM , YYYY )
import re
import datetime | 26.083333 | 91 | 0.600639 |
3d92ede6e5d24bbbfeb9c757cc08cd7affa9cd34 | 268 | py | Python | src/pyons/setup.py | larioandr/thesis-models | ecbc8c01aaeaa69034d6fe1d8577ab655968ea5f | [
"MIT"
] | 1 | 2021-01-17T15:49:03.000Z | 2021-01-17T15:49:03.000Z | src/pyons/setup.py | larioandr/thesis-models | ecbc8c01aaeaa69034d6fe1d8577ab655968ea5f | [
"MIT"
] | null | null | null | src/pyons/setup.py | larioandr/thesis-models | ecbc8c01aaeaa69034d6fe1d8577ab655968ea5f | [
"MIT"
] | 1 | 2021-03-07T15:31:06.000Z | 2021-03-07T15:31:06.000Z | from setuptools import setup
setup(
name='pyons',
version='1.0',
author="Andrey Larionov",
author_email="larioandr@gmail.com",
license="MIT",
py_modules=['pyons'],
install_requires=[
],
tests_requires=[
'pytest',
],
)
| 15.764706 | 39 | 0.589552 |
3d95e63a148b7fb62965e71316967e479358de64 | 2,262 | py | Python | html2markdown.py | DeusFigendi/fefebot | 935338c7b082502f25f97ae4874b4e896a04972e | [
"MIT"
] | 4 | 2016-09-19T03:54:31.000Z | 2021-03-27T23:06:34.000Z | html2markdown.py | DeusFigendi/fefebot | 935338c7b082502f25f97ae4874b4e896a04972e | [
"MIT"
] | 1 | 2017-08-01T15:04:57.000Z | 2017-08-08T22:02:46.000Z | html2markdown.py | DeusFigendi/fefebot | 935338c7b082502f25f97ae4874b4e896a04972e | [
"MIT"
] | 6 | 2015-08-24T09:37:41.000Z | 2018-12-26T19:40:42.000Z | #! /usr/bin/env python3.2
import re
| 29 | 183 | 0.517683 |
3d9613c4bf3516cfc004d7af07118d7c31dd361e | 2,572 | py | Python | Uebung10/Aufgabe29.py | B0mM3L6000/EiP | f68718f95a2d3cde8ead62b6134ac1b5068881a5 | [
"MIT"
] | 1 | 2018-04-18T19:10:06.000Z | 2018-04-18T19:10:06.000Z | Uebung10/Aufgabe29.py | B0mM3L6000/EiP | f68718f95a2d3cde8ead62b6134ac1b5068881a5 | [
"MIT"
] | null | null | null | Uebung10/Aufgabe29.py | B0mM3L6000/EiP | f68718f95a2d3cde8ead62b6134ac1b5068881a5 | [
"MIT"
] | 1 | 2018-04-29T08:48:00.000Z | 2018-04-29T08:48:00.000Z |
##################################
"""
29.5:
nein es gilt nicht, wenn z.B. das Dictionary fr verschiedene schlssel gleiche
Bedeutungen hat
z.B. dict erstellt mit den strings:
"haus baum welt"
"rot blau blau"
und bersetzt werden soll:
"baum welt haus"
dann erhlt man am ende: "welt welt haus"
"""
#####################################
#sauce foooter:
from random import randint
try:
#Create an Encoder object
enc = Encoder()
# Create two strings
st1 = "Lorem ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat voluptua"
st2 = "At vero eos at accusam sit justo duo dolores et ea rebum Stet clita kasd gubergren no sea takimata sanctus est Lorem ipsum"
# set the dictionary
enc.updateEncoding(st1,st2)
# create a random sentence from words of the first sentence
bagOfWords = str.split(st1)
st3 = ""
for i in range(19):
st3 += bagOfWords[randint(0,len(bagOfWords)-1)]+" "
st3 += bagOfWords[1]
# encode the random sentence
st4 = enc.encode(st3)
# decode it
st5 = enc.decode(st4)
# print the random sentence
print("#Encode String:",st3)
# print the encoded sentence
print("#Decode String:",st4)
# print the decoded sentence
print("#Result:",st5)
# in this case: if the random and the decoded sentence are equal, the test is passed
if(str.split(st3) == str.split(st5)):
print("correct")
else:
print("Encoding or Decoding incorrect")
print("Line #Encode String: and Line #Result: should be equal")
except:
print("Some names or functions do not work correctly or are wrongly named")
| 28.263736 | 154 | 0.626361 |
3d97e3a10c2e5eda50ea446fddb6d02e4af4f7fc | 543 | py | Python | p2p/adapters.py | baltimore-sun-data/p2p-python | 5f9648839d17c003104d88fd6cc6ca7a8eddd2c6 | [
"MIT"
] | 9 | 2015-07-23T06:35:59.000Z | 2020-06-01T04:33:56.000Z | p2p/adapters.py | baltimore-sun-data/p2p-python | 5f9648839d17c003104d88fd6cc6ca7a8eddd2c6 | [
"MIT"
] | 28 | 2015-10-16T19:09:58.000Z | 2019-02-28T21:09:54.000Z | p2p/adapters.py | baltimore-sun-data/p2p-python | 5f9648839d17c003104d88fd6cc6ca7a8eddd2c6 | [
"MIT"
] | 5 | 2015-10-15T22:56:10.000Z | 2018-11-13T20:44:39.000Z | from requests.adapters import HTTPAdapter, DEFAULT_POOLBLOCK
from requests.packages.urllib3.poolmanager import PoolManager
| 38.785714 | 78 | 0.664825 |
3d9a1b0edafd4fb0b37e8206295d03027352213c | 18 | py | Python | mltk/marl/algorithms/__init__.py | lqf96/mltk | 7187be5d616781695ee68674cd335fbb5a237ccc | [
"MIT"
] | null | null | null | mltk/marl/algorithms/__init__.py | lqf96/mltk | 7187be5d616781695ee68674cd335fbb5a237ccc | [
"MIT"
] | 2 | 2019-12-24T01:54:21.000Z | 2019-12-24T02:23:54.000Z | mltk/marl/algorithms/__init__.py | lqf96/mltk | 7187be5d616781695ee68674cd335fbb5a237ccc | [
"MIT"
] | null | null | null | from .phc import * | 18 | 18 | 0.722222 |
3d9ccca595c0005acda152685faed3168eed5797 | 14,006 | py | Python | src/elementary_modules.py | rmldj/random-graph-nn-paper | b04537f3312113b118878c37cb314a527c5b3a11 | [
"MIT"
] | 3 | 2020-03-23T14:00:35.000Z | 2020-09-24T13:56:18.000Z | src/elementary_modules.py | rmldj/random-graph-nn-paper | b04537f3312113b118878c37cb314a527c5b3a11 | [
"MIT"
] | null | null | null | src/elementary_modules.py | rmldj/random-graph-nn-paper | b04537f3312113b118878c37cb314a527c5b3a11 | [
"MIT"
] | null | null | null | import sympy as sym
import torch
import torch.nn as nn
import torch.nn.functional as F
| 43.228395 | 141 | 0.628873 |
3d9d90c223017d9e1ce9c0cffb8a666b613826f2 | 1,326 | py | Python | actions.py | rodrigocamposdf/MovieBot | 927ded61a201e6b5c33efd88e9e9a0271a43a4d4 | [
"MIT"
] | 1 | 2021-09-21T00:00:25.000Z | 2021-09-21T00:00:25.000Z | actions.py | rodrigocamposdf/MovieBot | 927ded61a201e6b5c33efd88e9e9a0271a43a4d4 | [
"MIT"
] | null | null | null | actions.py | rodrigocamposdf/MovieBot | 927ded61a201e6b5c33efd88e9e9a0271a43a4d4 | [
"MIT"
] | 5 | 2020-07-20T18:43:59.000Z | 2020-11-03T22:49:17.000Z | import movies
| 28.212766 | 78 | 0.630468 |
3d9dc45f332b2fb283e892734ee2a5da821f63dd | 118 | py | Python | Exercicios7/percorrendoLista.py | vinihf/Prog1_ADS_2019 | 97d2e0cddf72c00a73d0bc3070bb9731e66e19e2 | [
"CC-BY-4.0"
] | 1 | 2019-04-18T13:43:15.000Z | 2019-04-18T13:43:15.000Z | Exercicios7/percorrendoLista.py | vinihf/Prog1_ADS_2019 | 97d2e0cddf72c00a73d0bc3070bb9731e66e19e2 | [
"CC-BY-4.0"
] | null | null | null | Exercicios7/percorrendoLista.py | vinihf/Prog1_ADS_2019 | 97d2e0cddf72c00a73d0bc3070bb9731e66e19e2 | [
"CC-BY-4.0"
] | null | null | null | lista = list(range(0,10001))
for cont in range(0,10001):
print(lista[cont])
for valor in lista:
print(valor) | 16.857143 | 28 | 0.669492 |
3d9e72965d75f1eba7d57fa18ca18b2a64265bc7 | 8,282 | py | Python | core/spacy_parser.py | teodor-cotet/DiacriticsRestoration | e7b41d75b84ab2131694f16b9bd93448e83069e1 | [
"Apache-2.0"
] | 1 | 2020-12-05T15:45:48.000Z | 2020-12-05T15:45:48.000Z | core/spacy_parser.py | teodor-cotet/DiacriticsRestoration | e7b41d75b84ab2131694f16b9bd93448e83069e1 | [
"Apache-2.0"
] | 2 | 2021-03-18T07:37:28.000Z | 2021-07-27T14:45:14.000Z | core/spacy_parser.py | teodor-cotet/DiacriticsRestoration | e7b41d75b84ab2131694f16b9bd93448e83069e1 | [
"Apache-2.0"
] | null | null | null | import spacy
from spacy.lang.ro import Romanian
from typing import Dict, List, Iterable
from nltk import sent_tokenize
import re
# JSON Example localhost:8081/spacy application/json
# {
# "lang" : "en",
# "blocks" : ["Dup terminarea oficial a celui de-al doilea rzboi mondial, n conformitate cu discursul lui W. Churchill (prim ministru al Regatului Unit la acea dat), de la Fulton, s-a declanat Rzboiul rece i a aprut conceptul de cortin de fier. Urmare a politicii consecvente de aprare a sistemului economic i politic (implicit a intereslor economice ale marelui capital din lumea occidental) trupele germane, n calitate de prizonieri, aflate pe teritoriul Germaniei de Vest au fost renarmate i au constituit baza viitorului Bundeswehr - armata regulat a R.F.G."]
# }
models = {
'en': 'en_coref_lg',
'nl': 'nl',
'fr': 'fr_core_news_md',
'es': 'es',
'de': 'de',
'it': 'it',
'ro': 'models/model3'
}
normalization = {
'ro': [
(re.compile(""), ""),
(re.compile(""), ""),
(re.compile(""), ""),
(re.compile(""), ""),
(re.compile("(\w)(\w)"), "\g<1>\g<2>")
]
}
if __name__ == "__main__":
spacyInstance = SpacyParser()
sent = """
Dup terminarea oficial a celui de-al doilea rzboi mondial, n conformitate cu discursul lui W. Churchill (prim ministru al Regatului Unit la acea dat), de la Fulton, s-a declanat Rzboiul rece i a aprut conceptul de cortin de fier. Urmare a politicii consecvente de aprare a sistemului economic i politic (implicit a intereslor economice ale marelui capital din lumea occidental) trupele germane, n calitate de "prizonieri", aflate pe teritoriul Germaniei de Vest au fost renarmate i au constituit baza viitorului "Bundeswehr" - armata regulat a R.F.G.
Pe fondul evenimentelor din 1948 din Cehoslovacia (expulzri ale etnicilor germani, alegeri, reconstrucie economic) apare infiltrarea agenilor serviciilor speciale ale S.U.A. i Marii Britanii cu rol de "agitatori". Existnd cauza, trupele sovietice nu prsesc Europa Central i de Est cucerit-eliberat, staionnd pe teritoriul mai multor state. Aflate pe linia de demarcaie dintre cele dou blocuri foste aliate, armata sovietic nu a plecat din Ungaria dect dup dizolvarea Tratatului de la Varovia.
"""
# sent = """
# Dup terminarea oficial a celui de-al doilea rzboi mondial, n conformitate cu discursul lui Churchill, de la Fulton, s-a declanat Rzboiul rece i a aprut conceptul de cortin de fier."""
# print(spacyInstance.get_ner(sent))
# print(spacyInstance.get_tokens_lemmas(sent))
# doc = spacyInstance.parse("My sister has a dog. She loves him.", 'en')
doc = spacyInstance.parse("Pense des enseignants, production dcrits, ingnierie ducative, enseignement distance, traitement automatique de la langue, outils cognitifs, feedback automatique", 'fr')
for token in doc:
print(convertToPenn(token.tag_, 'fr'))
# print(spacyInstance.preprocess("cobor", 'ro'))
| 42.255102 | 584 | 0.579087 |
3da0bfcdf3a8e5f3c1aebf2e4b45b14e05c629a8 | 1,375 | py | Python | code/bot/bot3.py | josemac95/umucv | f0f8de17141f4adcb4966281c3f83539ebda5f0b | [
"BSD-3-Clause"
] | null | null | null | code/bot/bot3.py | josemac95/umucv | f0f8de17141f4adcb4966281c3f83539ebda5f0b | [
"BSD-3-Clause"
] | null | null | null | code/bot/bot3.py | josemac95/umucv | f0f8de17141f4adcb4966281c3f83539ebda5f0b | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/env python
# comando con argumentos
# y procesamiento de una imagen
# enviada por el usuario
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
from io import BytesIO
from PIL import Image
import cv2 as cv
import skimage.io as io
updater = Updater('api token del bot')
updater.dispatcher.add_handler(CommandHandler('hello', hello))
updater.dispatcher.add_handler(CommandHandler('argu' , argu, pass_args=True))
updater.dispatcher.add_handler(MessageHandler(Filters.photo, work))
updater.start_polling()
updater.idle()
| 28.061224 | 85 | 0.722182 |
3da10758c9f1e0fdc4bba0b279e9579ff6f1b0c5 | 1,236 | py | Python | AUTOENCODERS/DataPreparing/CICIDSPreprocessor.py | pawelptak/AI-Anomaly-Detection | 0d3e6072e273d6cc59ba79d5f8c73f393d1ec4e5 | [
"MIT"
] | 1 | 2022-03-23T10:18:17.000Z | 2022-03-23T10:18:17.000Z | AUTOENCODERS/DataPreparing/CICIDSPreprocessor.py | pawelptak/AI-Anomaly-Detection | 0d3e6072e273d6cc59ba79d5f8c73f393d1ec4e5 | [
"MIT"
] | null | null | null | AUTOENCODERS/DataPreparing/CICIDSPreprocessor.py | pawelptak/AI-Anomaly-Detection | 0d3e6072e273d6cc59ba79d5f8c73f393d1ec4e5 | [
"MIT"
] | null | null | null | from sklearn.preprocessing import StandardScaler, LabelEncoder, MinMaxScaler, OneHotEncoder
import numpy as np
import pandas as pd
import tqdm
"""
Class for Preprocessing CICIDS2017 Data represented as rows
"""
| 33.405405 | 91 | 0.670712 |
3da195067ff01ae97b234bc41093431b6cebf500 | 646 | py | Python | class3/collateral/show_genie.py | twin-bridges/netmiko_course | 31943e4f6f66dbfe523d62d5a2f03285802a8c56 | [
"Apache-2.0"
] | 11 | 2020-09-16T06:53:16.000Z | 2021-08-24T21:27:37.000Z | class3/collateral/show_genie.py | twin-bridges/netmiko_course | 31943e4f6f66dbfe523d62d5a2f03285802a8c56 | [
"Apache-2.0"
] | null | null | null | class3/collateral/show_genie.py | twin-bridges/netmiko_course | 31943e4f6f66dbfe523d62d5a2f03285802a8c56 | [
"Apache-2.0"
] | 5 | 2020-10-18T20:25:59.000Z | 2021-10-20T16:27:00.000Z | import os
from netmiko import ConnectHandler
from getpass import getpass
from pprint import pprint
# Code so automated tests will run properly
# Check for environment variable, if that fails, use getpass().
password = os.getenv("NETMIKO_PASSWORD") if os.getenv("NETMIKO_PASSWORD") else getpass()
my_device = {
"device_type": "cisco_xe",
"host": "cisco3.lasthop.io",
"username": "pyclass",
"password": password,
}
with ConnectHandler(**my_device) as net_connect:
output = net_connect.send_command("show ip int brief", use_genie=True)
# output = net_connect.send_command("show ip arp", use_genie=True)
pprint(output)
| 30.761905 | 88 | 0.733746 |
3da20b359813d6186015461736f4d52256b59084 | 2,793 | py | Python | pints/tests/test_toy_hes1_michaelis_menten_model.py | lisaplag/pints | 3de6617e57ba5b395edaca48961bfc5a4b7209b3 | [
"RSA-MD"
] | null | null | null | pints/tests/test_toy_hes1_michaelis_menten_model.py | lisaplag/pints | 3de6617e57ba5b395edaca48961bfc5a4b7209b3 | [
"RSA-MD"
] | null | null | null | pints/tests/test_toy_hes1_michaelis_menten_model.py | lisaplag/pints | 3de6617e57ba5b395edaca48961bfc5a4b7209b3 | [
"RSA-MD"
] | null | null | null | #!/usr/bin/env python3
#
# Tests if the HES1 Michaelis-Menten toy model runs.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import unittest
import numpy as np
import pints
import pints.toy
if __name__ == '__main__':
unittest.main()
| 38.260274 | 78 | 0.653419 |
3da3144e79a3871eba136a301ca02449b8340d18 | 390 | py | Python | pyctogram/instagram_client/relations/__init__.py | RuzzyRullezz/pyctogram | b811c55dc1c74d57ef489810816322e7f2909f3d | [
"MIT"
] | 1 | 2019-12-10T08:01:58.000Z | 2019-12-10T08:01:58.000Z | pyctogram/instagram_client/relations/__init__.py | RuzzyRullezz/pyctogram | b811c55dc1c74d57ef489810816322e7f2909f3d | [
"MIT"
] | null | null | null | pyctogram/instagram_client/relations/__init__.py | RuzzyRullezz/pyctogram | b811c55dc1c74d57ef489810816322e7f2909f3d | [
"MIT"
] | null | null | null | from . base import Actions, get_users
| 39 | 103 | 0.810256 |
3da323f7d830c432cc131d570a30ac74ba6392bd | 1,636 | py | Python | day-40-API-Cheapest-Flight-Multiple-Users/data_manager.py | anelshaer/Python100DaysOfCode | 012ae7dda28dc790d3bc4d26df807a4dba179ffe | [
"MIT"
] | null | null | null | day-40-API-Cheapest-Flight-Multiple-Users/data_manager.py | anelshaer/Python100DaysOfCode | 012ae7dda28dc790d3bc4d26df807a4dba179ffe | [
"MIT"
] | null | null | null | day-40-API-Cheapest-Flight-Multiple-Users/data_manager.py | anelshaer/Python100DaysOfCode | 012ae7dda28dc790d3bc4d26df807a4dba179ffe | [
"MIT"
] | null | null | null | import requests
import os
from user_data import UserData
import json
| 34.808511 | 110 | 0.630807 |
3da40761377898e0edc360572dbd5d864963e85c | 4,232 | py | Python | crime_data/resources/incidents.py | 18F/crime-data-api | 3e8cab0fad4caac1d7d8ef1b62ae7a1441752c6c | [
"CC0-1.0"
] | 51 | 2016-09-16T00:37:56.000Z | 2022-01-22T03:48:24.000Z | crime_data/resources/incidents.py | harrisj/crime-data-api | 9b49b5cc3cd8309dda888f49356ee5168c43851a | [
"CC0-1.0"
] | 605 | 2016-09-15T19:16:49.000Z | 2018-01-18T20:46:39.000Z | crime_data/resources/incidents.py | harrisj/crime-data-api | 9b49b5cc3cd8309dda888f49356ee5168c43851a | [
"CC0-1.0"
] | 12 | 2018-01-18T21:15:34.000Z | 2022-02-17T10:09:40.000Z | from webargs.flaskparser import use_args
from itertools import filterfalse
from crime_data.common import cdemodels, marshmallow_schemas, models, newmodels
from crime_data.common.base import CdeResource, tuning_page, ExplorerOffenseMapping
from crime_data.extensions import DEFAULT_MAX_AGE
from flask.ext.cachecontrol import cache
from flask import jsonify
| 41.087379 | 157 | 0.708176 |
3da4a9becaa6b35a7f34b4f9c1a6f2e59d92599e | 1,522 | py | Python | deploy_config_generator/output/kube_kong_consumer.py | ApplauseAQI/applause-deploy-config-generator | 46f957fbfe991677f920d5db74b0670385b6e505 | [
"MIT"
] | 3 | 2019-04-05T14:16:17.000Z | 2021-06-25T20:53:03.000Z | deploy_config_generator/output/kube_kong_consumer.py | ApplauseAQI/applause-deploy-config-generator | 46f957fbfe991677f920d5db74b0670385b6e505 | [
"MIT"
] | 6 | 2019-04-04T20:20:16.000Z | 2021-09-27T21:04:39.000Z | deploy_config_generator/output/kube_kong_consumer.py | ApplauseAQI/applause-deploy-config-generator | 46f957fbfe991677f920d5db74b0670385b6e505 | [
"MIT"
] | null | null | null | import copy
from deploy_config_generator.utils import yaml_dump
from deploy_config_generator.output import kube_common
| 31.061224 | 130 | 0.532194 |
3da7fc4300dabd09ec4c470043ea127780e60a3b | 2,450 | py | Python | EyePatterns/clustering_algorithms/custom_mean_shift.py | Sale1996/Pattern-detection-of-eye-tracking-scanpaths | 15c832f26dce98bb95445f9f39f454f99bbb6029 | [
"MIT"
] | 1 | 2021-12-07T08:02:30.000Z | 2021-12-07T08:02:30.000Z | EyePatterns/clustering_algorithms/custom_mean_shift.py | Sale1996/Pattern-detection-of-eye-tracking-scanpaths | 15c832f26dce98bb95445f9f39f454f99bbb6029 | [
"MIT"
] | null | null | null | EyePatterns/clustering_algorithms/custom_mean_shift.py | Sale1996/Pattern-detection-of-eye-tracking-scanpaths | 15c832f26dce98bb95445f9f39f454f99bbb6029 | [
"MIT"
] | null | null | null | import numpy as np
| 34.027778 | 87 | 0.646122 |
3da83d4179e3c0fa03b23a086938541e7c9c090e | 931 | py | Python | src/tentaclio/clients/athena_client.py | datavaluepeople/tentaclio | eb6920a0e115c6c08043063a8c1013d812ec34c8 | [
"MIT"
] | 12 | 2019-04-30T16:07:42.000Z | 2021-12-08T08:02:09.000Z | src/tentaclio/clients/athena_client.py | octoenergy/tentaclio | eb6920a0e115c6c08043063a8c1013d812ec34c8 | [
"MIT"
] | 74 | 2019-04-25T11:18:22.000Z | 2022-01-18T11:31:14.000Z | src/tentaclio/clients/athena_client.py | datavaluepeople/tentaclio | eb6920a0e115c6c08043063a8c1013d812ec34c8 | [
"MIT"
] | 4 | 2019-05-05T13:13:21.000Z | 2022-01-14T00:33:07.000Z | """AWS Athena query client.
Overrides the `get_df` convenience methods for loading a DataFrame using PandasCursor,
which is more performant than using sql alchemy functions.
"""
import pandas as pd
from pyathena.pandas_cursor import PandasCursor
from . import decorators, sqla_client
__all__ = ["AthenaClient"]
| 32.103448 | 86 | 0.736842 |
3da995d5085338f00dd3653e93f80c4fa924f8b7 | 3,592 | py | Python | tests/unit/merge/merge_test.py | singulared/conflow | f74dec63b23da9791202e99496d3baadd458c1c5 | [
"MIT"
] | 11 | 2018-03-27T17:24:35.000Z | 2021-09-21T05:49:11.000Z | tests/unit/merge/merge_test.py | singulared/conflow | f74dec63b23da9791202e99496d3baadd458c1c5 | [
"MIT"
] | 64 | 2018-01-24T16:34:42.000Z | 2020-03-23T13:34:07.000Z | tests/unit/merge/merge_test.py | singulared/conflow | f74dec63b23da9791202e99496d3baadd458c1c5 | [
"MIT"
] | null | null | null | import pytest
from conflow.merge import merge_factory
from conflow.node import Node, NodeList, NodeMap
| 26.218978 | 69 | 0.58686 |
3da9ac46abe5207f20db155757f945a1d90d40c8 | 864 | py | Python | cartopolar/antarctica_maps.py | dlilien/cartopolar | a425ef205c72e25c5d140c65c1ec99d688618f49 | [
"MIT"
] | null | null | null | cartopolar/antarctica_maps.py | dlilien/cartopolar | a425ef205c72e25c5d140c65c1ec99d688618f49 | [
"MIT"
] | null | null | null | cartopolar/antarctica_maps.py | dlilien/cartopolar | a425ef205c72e25c5d140c65c1ec99d688618f49 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright 2020 dlilien <dlilien@hozideh>
#
# Distributed under terms of the MIT license.
"""
"""
import numpy as np
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
from .cartopy_overrides import SPS
# import shapely.geometry as sgeom
USP_EXTENT = (31000, 35000, -37750, -33750)
# USP_EXTENT = (-100000, 100000, -100000, 100000)
USP_ASP = (USP_EXTENT[1] - USP_EXTENT[0]) / (USP_EXTENT[3] - USP_EXTENT[2])
| 24.685714 | 76 | 0.665509 |
3daa549e10afe7d4f29dbdbe102676caed6653f5 | 1,010 | py | Python | cpdb/toast/tests/test_serializers.py | invinst/CPDBv2_backend | b4e96d620ff7a437500f525f7e911651e4a18ef9 | [
"Apache-2.0"
] | 25 | 2018-07-20T22:31:40.000Z | 2021-07-15T16:58:41.000Z | cpdb/toast/tests/test_serializers.py | invinst/CPDBv2_backend | b4e96d620ff7a437500f525f7e911651e4a18ef9 | [
"Apache-2.0"
] | 13 | 2018-06-18T23:08:47.000Z | 2022-02-10T07:38:25.000Z | cpdb/toast/tests/test_serializers.py | invinst/CPDBv2_backend | b4e96d620ff7a437500f525f7e911651e4a18ef9 | [
"Apache-2.0"
] | 6 | 2018-05-17T21:59:43.000Z | 2020-11-17T00:30:26.000Z | from django.test import TestCase
from robber import expect
from toast.serializers import ToastDesktopSerializer, ToastMobileSerializer
from toast.factories import ToastFactory
| 31.5625 | 112 | 0.634653 |
3daa64b4b3b876de59fee4ffa1f0970c52c6d7f9 | 12,063 | py | Python | wirepas_backend_client/test/kpi_adv.py | PFigs/backend-client | e6f024d8c5b8ba3e7cd1b5c226d16ff643d4bd83 | [
"Apache-2.0"
] | null | null | null | wirepas_backend_client/test/kpi_adv.py | PFigs/backend-client | e6f024d8c5b8ba3e7cd1b5c226d16ff643d4bd83 | [
"Apache-2.0"
] | null | null | null | wirepas_backend_client/test/kpi_adv.py | PFigs/backend-client | e6f024d8c5b8ba3e7cd1b5c226d16ff643d4bd83 | [
"Apache-2.0"
] | 1 | 2021-09-30T06:38:54.000Z | 2021-09-30T06:38:54.000Z | """
KPI ADV
=======
Script to execute an inventory and otap benchmark for the
advertiser feature.
.. Copyright:
Copyright 2019 Wirepas Ltd under Apache License, Version 2.0.
See file LICENSE for full license details.
"""
import queue
import random
import datetime
import importlib
import multiprocessing
import pandas
from wirepas_backend_client.messages import AdvertiserMessage
from wirepas_backend_client.tools import ParserHelper, LoggerHelper
from wirepas_backend_client.api import MySQLSettings, MySQLObserver
from wirepas_backend_client.api import MQTTObserver, MQTTSettings
from wirepas_backend_client.management import Daemon, Inventory
from wirepas_backend_client.test import TestManager
def fetch_report(
args, rx_queue, timeout, report_output, number_of_runs, exit_signal, logger
):
""" Reporting loop executed between test runs """
reports = {}
for run in range(0, number_of_runs):
try:
report = rx_queue.get(timeout=timeout, block=True)
reports[run] = report
except queue.Empty:
report = None
logger.warning("timed out waiting for report")
if exit_signal.is_set():
raise RuntimeError
df = pandas.DataFrame.from_dict(reports)
if args.output_time:
filepath = "{}_{}".format(
datetime.datetime.now().isoformat(), args.output
)
else:
filepath = "{}".format(args.output)
df.to_json(filepath)
def main(args, logger):
""" Main loop """
# process management
daemon = Daemon(logger=logger)
mysql_settings = MySQLSettings(args)
mqtt_settings = MQTTSettings(args)
if mysql_settings.sanity():
mysql_available = True
daemon.build(
__STORAGE_ENGINE__,
MySQLObserver,
dict(mysql_settings=mysql_settings),
)
daemon.set_run(
__STORAGE_ENGINE__,
task_kwargs=dict(parallel=True),
task_as_daemon=False,
)
else:
mysql_available = False
logger.info("Skipping Storage module")
if mqtt_settings.sanity():
mqtt_process = daemon.build(
"mqtt",
MQTTObserver,
dict(
mqtt_settings=mqtt_settings,
logger=logger,
allowed_endpoints=set([AdvertiserMessage.source_endpoint]),
),
)
topic = "gw-event/received_data/{gw_id}/{sink_id}/{network_id}/{source_endpoint}/{destination_endpoint}".format(
gw_id=args.mqtt_subscribe_gateway_id,
sink_id=args.mqtt_subscribe_sink_id,
network_id=args.mqtt_subscribe_network_id,
source_endpoint=args.mqtt_subscribe_source_endpoint,
destination_endpoint=args.mqtt_subscribe_destination_endpoint,
)
mqtt_process.message_subscribe_handlers = {
topic: mqtt_process.generate_data_received_cb()
}
daemon.set_run("mqtt", task=mqtt_process.run)
# build each process and set the communication
adv_manager = daemon.build(
"adv_manager",
AdvertiserManager,
dict(
inventory_target_nodes=args.target_nodes,
inventory_target_otap=args.target_otap,
inventory_target_frequency=args.target_frequency,
logger=logger,
delay=args.delay,
duration=args.duration,
),
receive_from="mqtt",
storage=mysql_available,
storage_name=__STORAGE_ENGINE__,
)
adv_manager.execution_jitter(
_min=args.jitter_minimum, _max=args.jitter_maximum
)
adv_manager.register_task(
adv_manager.test_inventory, number_of_runs=args.number_of_runs
)
daemon.set_loop(
fetch_report,
dict(
args=args,
rx_queue=adv_manager.tx_queue,
timeout=args.delay + args.duration + 60,
report_output=args.output,
number_of_runs=args.number_of_runs,
exit_signal=daemon.exit_signal,
logger=logger,
),
)
daemon.start()
else:
print("Please check you MQTT settings")
print(mqtt_settings)
if __name__ == "__main__":
__MYSQL_ENABLED__ = importlib.util.find_spec("MySQLdb")
__STORAGE_ENGINE__ = "mysql"
__TEST_NAME__ = "test_advertiser"
PARSE = ParserHelper(description="KPI ADV arguments")
PARSE.add_mqtt()
PARSE.add_test()
PARSE.add_database()
PARSE.add_fluentd()
PARSE.add_file_settings()
SETTINGS = PARSE.settings()
LOGGER = LoggerHelper(
module_name=__TEST_NAME__, args=SETTINGS, level=SETTINGS.debug_level
).setup()
if SETTINGS.delay is None:
SETTINGS.delay = random.randrange(0, 60)
# pylint: disable=locally-disabled, no-member
try:
nodes = set({int(line) for line in open(SETTINGS.nodes, "r")})
except FileNotFoundError:
LOGGER.warning("Could not find nodes file")
nodes = set()
SETTINGS.target_nodes = nodes
if SETTINGS.jitter_minimum > SETTINGS.jitter_maximum:
SETTINGS.jitter_maximum = SETTINGS.jitter_minimum
LOGGER.info(
{
"test_suite_start": datetime.datetime.utcnow().isoformat("T"),
"run_arguments": SETTINGS.to_dict(),
}
)
# pylint: enable=no-member
main(SETTINGS, LOGGER)
PARSE.dump(
"run_information_{}.txt".format(datetime.datetime.now().isoformat())
)
| 32.340483 | 120 | 0.607643 |
3dac942409c65786150bee242bc747d471fc5414 | 1,608 | py | Python | levenshtein_func.py | Lance-Easley/Document-Similarity | c83fa406acf6308da28867611f567776fc266884 | [
"MIT"
] | null | null | null | levenshtein_func.py | Lance-Easley/Document-Similarity | c83fa406acf6308da28867611f567776fc266884 | [
"MIT"
] | null | null | null | levenshtein_func.py | Lance-Easley/Document-Similarity | c83fa406acf6308da28867611f567776fc266884 | [
"MIT"
] | null | null | null | import doctest
def leven_distance(iterable1: str or list, iterable2: str or list) -> int:
"""Takes two strings or lists and will find the Levenshtein distance
between the two.
Both iterables must be same type (str or list) for proper functionality.
If given strings, function will find distance per character. If given
lists, function will find distance per term in list.
Capitalization will be counted as a difference.
>>> leven_distance('cat', 'hat')
1
>>> leven_distance('abcdef', 'azc3uf')
3
>>> leven_distance(['hi', 'there', 'kevin'], ['hello', 'there', 'kevin'])
1
"""
iterable1_count = len(iterable1) + 1
iterable2_count = len(iterable2) + 1
mem = []
# Set memoize list length
for i in range(0, iterable1_count):
mem.append([])
for j in range(0, iterable2_count):
mem[i].append(None)
# Assign empty string numbers to memoize chart
# Row
for r in range(0, iterable1_count):
mem[r][0] = r
# Column
for c in range(0, iterable2_count):
mem[0][c] = c
# Fill in rest of chart
for r in range(iterable1_count - 1):
for c in range(iterable2_count - 1):
if iterable1[r] == iterable2[c]:
mem[r + 1][c + 1] = mem[r][c]
else:
mem[r + 1][c + 1] = min(
mem[r][c] + 1,
mem[r + 1][c] + 1,
mem[r][c + 1] + 1
)
# Get last number in chart
return mem[-1][-1]
if __name__ == "__main__":
print(doctest.testmod()) | 29.777778 | 77 | 0.559701 |
3dada60e0249d722b9efc92d356114b02e3e0c6c | 18,496 | py | Python | filters/Filter.py | Paul1298/ITMO_FS | 219537776d89e52df0c1c07de2c71ce91c679c50 | [
"MIT"
] | null | null | null | filters/Filter.py | Paul1298/ITMO_FS | 219537776d89e52df0c1c07de2c71ce91c679c50 | [
"MIT"
] | null | null | null | filters/Filter.py | Paul1298/ITMO_FS | 219537776d89e52df0c1c07de2c71ce91c679c50 | [
"MIT"
] | null | null | null | from .utils import *
GLOB_MEASURE = {"FitCriterion": _DefaultMeasures.fit_criterion_measure,
"FRatio": _DefaultMeasures.f_ratio_measure,
"GiniIndex": _DefaultMeasures.gini_index,
"InformationGain": _DefaultMeasures.ig_measure,
"MrmrDiscrete": _DefaultMeasures.mrmr_measure,
"SymmetricUncertainty": _DefaultMeasures.su_measure,
"SpearmanCorr": _DefaultMeasures.spearman_corr,
"PearsonCorr": _DefaultMeasures.pearson_corr,
"FechnerCorr": _DefaultMeasures.fechner_corr,
"ReliefF": _DefaultMeasures.reliefF_measure,
"Chi2": _DefaultMeasures.chi2_measure}
GLOB_CR = {"Best by value": _DefaultCuttingRules.select_best_by_value,
"Worst by value": _DefaultCuttingRules.select_worst_by_value,
"K best": _DefaultCuttingRules.select_k_best,
"K worst": _DefaultCuttingRules.select_k_worst}
| 37.670061 | 118 | 0.579477 |
3dae0fc03c90ecfa32dc4ecfd3dd9dd3da1ccb4d | 457 | py | Python | h3.py | alexfmsu/pyquantum | 78b09987cbfecf549e67b919bb5cb2046b21ad44 | [
"MIT"
] | null | null | null | h3.py | alexfmsu/pyquantum | 78b09987cbfecf549e67b919bb5cb2046b21ad44 | [
"MIT"
] | null | null | null | h3.py | alexfmsu/pyquantum | 78b09987cbfecf549e67b919bb5cb2046b21ad44 | [
"MIT"
] | 2 | 2020-07-28T08:40:06.000Z | 2022-02-16T23:04:58.000Z | from PyQuantum.TC3.Cavity import Cavity
from PyQuantum.TC3.Hamiltonian3 import Hamiltonian3
capacity = {
'0_1': 2,
'1_2': 2,
}
wc = {
'0_1': 0.2,
'1_2': 0.3,
}
wa = [0.2] * 3
g = {
'0_1': 1,
'1_2': 200,
}
cv = Cavity(wc=wc, wa=wa, g=g, n_atoms=3, n_levels=3)
# cv.wc_info()
# cv.wa_info()
# cv.g_info()
cv.info()
H = Hamiltonian3(capacity=capacity, cavity=cv, iprint=False)
H.print_states()
H.print_bin_states()
# H.iprint()
| 13.848485 | 60 | 0.603939 |
3daf0b7c2684b25ee98648b971b2e1076b2cf00c | 1,058 | py | Python | gamestate-changes/change_statistics/other/rectangleAnimation.py | phylib/MinecraftNDN-RAFNET19 | c7bfa7962707af367fafe9d879bc63637c06aec7 | [
"MIT"
] | 1 | 2020-05-18T15:55:09.000Z | 2020-05-18T15:55:09.000Z | gamestate-changes/change_statistics/other/rectangleAnimation.py | phylib/MinecraftNDN-RAFNET19 | c7bfa7962707af367fafe9d879bc63637c06aec7 | [
"MIT"
] | null | null | null | gamestate-changes/change_statistics/other/rectangleAnimation.py | phylib/MinecraftNDN-RAFNET19 | c7bfa7962707af367fafe9d879bc63637c06aec7 | [
"MIT"
] | null | null | null | # https://stackoverflow.com/questions/31921313/matplotlib-animation-moving-square
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from matplotlib import animation
x = [0, 1, 2]
y = [0, 10, 20]
y2 = [40, 30, 20]
colors = ['r','b','g','orange']
fig = plt.figure()
plt.axis('equal')
plt.grid()
ax = fig.add_subplot(111)
ax.set_xlim(-100, 100)
ax.set_ylim(-100, 100)
patch1 = patches.Rectangle((0, 0), 0, 0, fill=False, edgecolor=colors[0])
patch1.set_width(21)
patch1.set_height(21)
patch2 = patches.Rectangle((0, 0), 0, 0, fill=False, edgecolor=colors[1])
patch2.set_width(21)
patch2.set_height(21)
anim = animation.FuncAnimation(fig, animate,
init_func=init,
frames=len(x),
interval=500,
blit=True)
plt.show() | 25.190476 | 81 | 0.614367 |
3daf498d7521399146cf380a60792cc98a71c488 | 6,145 | py | Python | MakeMytripChallenge/script/IFtrial.py | divayjindal95/DataScience | d976a5e3ac9bd36e84149642a5b93f7bfc3540cf | [
"MIT"
] | null | null | null | MakeMytripChallenge/script/IFtrial.py | divayjindal95/DataScience | d976a5e3ac9bd36e84149642a5b93f7bfc3540cf | [
"MIT"
] | null | null | null | MakeMytripChallenge/script/IFtrial.py | divayjindal95/DataScience | d976a5e3ac9bd36e84149642a5b93f7bfc3540cf | [
"MIT"
] | null | null | null | import sys
import warnings
if not sys.warnoptions:
warnings.simplefilter("ignore")
import numpy as np
import pandas as pd
import matplotlib.pylab as plt
from sklearn.naive_bayes import MultinomialNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression,LinearRegression
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import KFold,cross_val_score,LeaveOneOut
#from sklearn.cross_validation import KFold,train_test_split,cross_val_score
train_data = pd.read_csv("../data/train.csv")
train_data_len=len(train_data)
test_data=pd.read_csv("../data/test.csv")
test_data_len=len(test_data)
data=pd.concat([train_data,test_data])
data.A=data.A.fillna(data['A'].mode()[0])
data.D=data.D.fillna(data['D'].mode()[0])
data.E=data.E.fillna(data['E'].mode()[0])
data.G=data.G.fillna(data['G'].mode()[0])
data.F=data.F.fillna(data['F'].mode()[0])
data.B=data.A.fillna(data['B'].median())
data.N=data.N.fillna(data['N'].median())
#print len(data.dropna())
#print data.describe()
data,cls=getint(data)
# data.O=np.log(data.O+1)
# data.H=np.log(data.H+1)
# data.K=np.log(data.K+1)
# data.N=np.log(data.N+1)
# data.C=np.log(data.C+1)
# sc = StandardScaler()
# data.O=sc.fit_transform(np.reshape(data.O,(len(data.O),1)))
# sc = StandardScaler()
# data.H=sc.fit_transform(np.reshape(data.H,(len(data.H),1)))
# sc = StandardScaler()
# data.K=sc.fit_transform(np.reshape(data.K,(len(data.K),1)))
# sc = StandardScaler()
# data.N=sc.fit_transform(np.reshape(data.N,(len(data.N),1)))
# sc = StandardScaler()
# data.C=sc.fit_transform(np.reshape(data.C,(len(data.C),1)))
# sc = StandardScaler()
# data.B=sc.fit_transform(np.reshape(data.B,(len(data.B),1)))
data['H_frac']=data.H-data.H.map(lambda x:int(x))
data['H_int'] = data.H.map(lambda x:int(x))
data['C_frac']=data.C-data.C.map(lambda x:int(x))
data['C_int'] = data.C.map(lambda x:int(x))
data['N_frac']=data.N-data.N.map(lambda x:int(x))
data['N_int'] = data.N.map(lambda x:int(x))
data=pd.concat([data,pd.get_dummies(data.A,'A')],axis=1)
data=pd.concat([data,pd.get_dummies(data.F,'F')],axis=1)
print data.head()
print data.columns
trncols=[u'A', u'B','C_frac','C_int', u'D', u'E', u'F', u'G', u'H_int','H_frac', u'I', u'J', u'K',
u'L', u'M','N_frac','N_int', u'O']
trncols=[u'A', u'B', u'C', u'D', u'E', u'F', u'G', u'H', u'I', u'J', u'K', u'L', u'M', u'N', u'O', u'id', u'H_frac', u'H_int', u'C_frac', u'C_int', u'N_frac', u'N_int', u'A_0', u'A_1', u'F_0', u'F_1', u'F_2', u'F_3', u'F_4', u'F_5', u'F_6', u'F_7', u'F_8', u'F_9', u'F_10', u'F_11', u'F_12', u'F_13']
testcols=['P']
data_bin = ['A','I','J','L','F']
#trncols=data_bin
fin_train_data=data.iloc[:len(train_data)]
fin_test_data=data.iloc[len(train_data):]
#print fin_train_data[(fin_train_data.I==1) & (fin_train_data.J==0)].tostring()
print len(fin_train_data)
print len(fin_train_data[(fin_train_data.I==1) & (fin_train_data.J==1)]),len(fin_train_data[(fin_train_data.I==1) & (fin_train_data.J==1) & (fin_train_data.P==1)]),
print len(fin_train_data[(fin_train_data.I==0) & (fin_train_data.J==0)]),len(fin_train_data[(fin_train_data.I==0) & (fin_train_data.J==0) & (fin_train_data.P==0)])
print len(fin_train_data[(fin_train_data.I==0) & (fin_train_data.J==1)]),len(fin_train_data[(fin_train_data.I==0) & (fin_train_data.J==1) & (fin_train_data.P==0)])
print len(fin_test_data[(fin_test_data.I==1) & (fin_test_data.J==0)]),len(fin_test_data)
fin_train_data = fin_train_data[(fin_train_data.I==1) & (fin_train_data.J==0)]
from sklearn.utils import shuffle
fin_train_data= shuffle(fin_train_data)
X=fin_train_data[trncols]
Y=fin_train_data[testcols]
rfc=GradientBoostingClassifier(n_estimators=30)
#rfc=LogisticRegression()
rfc=LinearRegression()
#rfc=MultinomialNB()
kf=KFold(n_splits=5)
lo = LeaveOneOut()
accs=cross_val_score(rfc,X,Y,cv=kf)
accslo=cross_val_score(rfc,X,Y,cv=lo)
#print np.mean(accs),np.mean(accslo)
rfc.fit(X,Y)
#print rfc.score(X,Y)
#print rfc.predict(X)<0.5
rsss = pd.DataFrame((Y==0)==(rfc.predict(X)<0.5))
#print rsss[rsss.P==True]
# asnls=[]
#
# orans=y.P.tolist()
# x=x.reset_index(xrange(len(y)))
#
# for i in xrange(len(x)):
# if x.I.iloc[i]==0 and x.J.iloc[i]==0:
# asnls.append(1)
# if x.I.iloc[i]==1 and x.J.iloc[i]==1:
# asnls.append(1)
# if x.I.iloc[i]==0 and x.J.iloc[i]==1:
# asnls.append(1)
# if x.I.iloc[i]==1 and x.J.iloc[i]==0:
# asnls.append(orans[i])
# i+=1
#
# res=0
# for a,b in zip(asnls,orans):
# res+=np.abs(a-b)
# print res/len(orans)
fintestindex=fin_test_data.index
for e in fintestindex:
if (fin_test_data['I'][e]==1) and (fin_test_data['J'][e]==1):
fin_test_data['P'][e]=0
if (fin_test_data['I'][e]==0) and (fin_test_data['J'][e]==0):
fin_test_data['P'][e]=1
if (fin_test_data['I'][e]==0) and (fin_test_data['J'][e]==1):
fin_test_data['P'][e]=1
# if (fin_test_data['I'][e]==1) and (fin_test_data['J'][e]==0):
# fin_test_data['P']=0
print fin_test_data.P
remaining=fin_test_data[fin_test_data.P.isnull()]
remainingans =rfc.predict(remaining[trncols])>0.5
fin_test_data[fin_test_data.P.isnull()]['P'][:]=np.reshape(remainingans.astype(int),(len(remainingans)))
fin_test_data[fin_test_data.P.isnull()]['P'][:]=1
print fin_test_data[fin_test_data.P.isnull()]['P'][:]
#print fin_test_data.P
final = pd.DataFrame()
final['id']=fin_test_data.id
# #final['P']=pd.to_numeric(rfc.predict(fin_test_data[trncols]),downcast='signed')
# final['P']=rfc.predict(fin_test_data[trncols]).astype(int)
# final.to_csv('../data/final.csv',index=False) | 34.138889 | 300 | 0.682832 |
3daf789bd0a2214d01837395979045b5721435c8 | 16,895 | py | Python | qf_lib/backtesting/order/order_factory.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 198 | 2019-08-16T15:09:23.000Z | 2022-03-30T12:44:00.000Z | qf_lib/backtesting/order/order_factory.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 13 | 2021-01-07T10:15:19.000Z | 2022-03-29T13:01:47.000Z | qf_lib/backtesting/order/order_factory.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 29 | 2019-08-16T15:21:28.000Z | 2022-02-23T09:53:49.000Z | # Copyright 2016-present CERN European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from typing import Mapping, Dict, List
from qf_lib.backtesting.broker.broker import Broker
from qf_lib.backtesting.contract.contract import Contract
from qf_lib.backtesting.contract.contract_to_ticker_conversion.base import ContractTickerMapper
from qf_lib.backtesting.order.execution_style import ExecutionStyle
from qf_lib.backtesting.order.order import Order
from qf_lib.backtesting.order.time_in_force import TimeInForce
from qf_lib.common.enums.frequency import Frequency
from qf_lib.common.utils.logging.qf_parent_logger import qf_logger
from qf_lib.common.utils.miscellaneous.function_name import get_function_name
from qf_lib.data_providers.data_provider import DataProvider
| 48.409742 | 123 | 0.674223 |
3db22ed381d2b08ee0407932f289e02567c77fca | 1,268 | py | Python | src/test_network3.py | chansonzhang/FirstDL | 41ad7def19c42882f0418fe44ce395f7b5492f36 | [
"Apache-2.0"
] | null | null | null | src/test_network3.py | chansonzhang/FirstDL | 41ad7def19c42882f0418fe44ce395f7b5492f36 | [
"Apache-2.0"
] | null | null | null | src/test_network3.py | chansonzhang/FirstDL | 41ad7def19c42882f0418fe44ce395f7b5492f36 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018 Zhang, Chen. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# @Time : 3/12/2019 20:18
# @Author : Zhang, Chen (chansonzhang)
# @Email : ZhangChen.Shaanxi@gmail.com
# @FileName: test_network3.py
import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10
net = Network([
FullyConnectedLayer(n_in=784, n_out=100),
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1,
validation_data, test_data) | 42.266667 | 80 | 0.69795 |
3db26a9a64ef3907fd6d3bfdd43c6b7c844f6a0f | 303 | py | Python | mood_sense/serializers.py | D-Denysenko/health-app | 18d1e9c492fb00694e1987a6cdaa2197ff4efa11 | [
"MIT"
] | null | null | null | mood_sense/serializers.py | D-Denysenko/health-app | 18d1e9c492fb00694e1987a6cdaa2197ff4efa11 | [
"MIT"
] | 9 | 2021-03-19T08:05:00.000Z | 2022-03-12T00:15:53.000Z | mood_sense/serializers.py | D-Denysenko/health-app | 18d1e9c492fb00694e1987a6cdaa2197ff4efa11 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from .models import Mood
| 23.307692 | 92 | 0.686469 |
3db6b1a2ad7d586c5f66023f21c351a35d9fd997 | 7,604 | py | Python | Appserver/Test/ApiUnitTesting/testBusquedaCandidatos.py | seguijoaquin/taller2 | f41232516de15fe045805131b09299e5c2634e5e | [
"MIT"
] | 2 | 2016-06-06T03:26:49.000Z | 2017-08-06T18:12:33.000Z | Appserver/Test/ApiUnitTesting/testBusquedaCandidatos.py | seguijoaquin/taller2 | f41232516de15fe045805131b09299e5c2634e5e | [
"MIT"
] | 60 | 2016-03-19T16:01:27.000Z | 2016-06-23T16:26:10.000Z | Appserver/Test/ApiUnitTesting/testBusquedaCandidatos.py | seguijoaquin/taller2 | f41232516de15fe045805131b09299e5c2634e5e | [
"MIT"
] | null | null | null | import json
import requests
import unittest
import Utilities
# Precondiciones:
# Intereses:
# No debe haber ningun usuario en el Shared que tenga "interesUnico"
#
Address = "http://localhost:8000"
#Tal vez mandar las URIs a sus respectivas clases
URIResgistro = "/registro"
URILogin = "/login"
URIPedirCandidato = "/perfil"
URIEliminar = "/eliminar"
| 43.451429 | 233 | 0.768017 |
3db6b5d6bbd126263b54d30034f80a8d201b13af | 3,639 | py | Python | scripts/plots/yearly_summary.py | jarad/dep | fe73982f4c70039e1a31b9e8e2d9aac31502f803 | [
"MIT"
] | 1 | 2019-11-26T17:49:19.000Z | 2019-11-26T17:49:19.000Z | scripts/plots/yearly_summary.py | jarad/dep | fe73982f4c70039e1a31b9e8e2d9aac31502f803 | [
"MIT"
] | 54 | 2018-12-12T18:02:31.000Z | 2022-03-28T19:14:25.000Z | scripts/plots/yearly_summary.py | jarad/dep | fe73982f4c70039e1a31b9e8e2d9aac31502f803 | [
"MIT"
] | 4 | 2020-03-02T22:59:38.000Z | 2021-12-09T15:49:00.000Z | import datetime
import cStringIO
import psycopg2
from shapely.wkb import loads
import numpy as np
import sys
from geopandas import read_postgis
import matplotlib
matplotlib.use("agg")
from pyiem.plot import MapPlot
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
import matplotlib.colors as mpcolors
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from pyiem.util import get_dbconn
V2NAME = {
"avg_loss": "Detachment",
"qc_precip": "Precipitation",
"avg_delivery": "Delivery",
"avg_runoff": "Runoff",
}
V2MULTI = {
"avg_loss": 4.463,
"qc_precip": 1.0 / 25.4,
"avg_delivery": 4.463,
"avg_runoff": 1.0 / 25.4,
}
V2UNITS = {
"avg_loss": "tons/acre",
"qc_precip": "inches",
"avg_delivery": "tons/acre",
"avg_runoff": "inches",
}
V2RAMP = {
"avg_loss": [0, 2.5, 5, 10, 20, 40, 60],
"qc_precip": [15, 25, 35, 45, 55],
"avg_delivery": [0, 2.5, 5, 10, 20, 40, 60],
"avg_runoff": [0, 2.5, 5, 10, 15, 30],
}
year = int(sys.argv[1])
v = sys.argv[2]
ts = datetime.date(year, 1, 1)
ts2 = datetime.date(year, 12, 31)
scenario = 0
# suggested for runoff and precip
if v in ["qc_precip", "avg_runoff"]:
c = ["#ffffa6", "#9cf26d", "#76cc94", "#6399ba", "#5558a1"]
# suggested for detachment
elif v in ["avg_loss"]:
c = ["#cbe3bb", "#c4ff4d", "#ffff4d", "#ffc44d", "#ff4d4d", "#c34dee"]
# suggested for delivery
elif v in ["avg_delivery"]:
c = ["#ffffd2", "#ffff4d", "#ffe0a5", "#eeb74d", "#ba7c57", "#96504d"]
cmap = mpcolors.ListedColormap(c, "james")
cmap.set_under("white")
cmap.set_over("black")
pgconn = get_dbconn("idep")
cursor = pgconn.cursor()
title = "for %s" % (ts.strftime("%-d %B %Y"),)
if ts != ts2:
title = "for period between %s and %s" % (
ts.strftime("%-d %b %Y"),
ts2.strftime("%-d %b %Y"),
)
m = MapPlot(
axisbg="#EEEEEE",
nologo=True,
sector="iowa",
nocaption=True,
title="DEP %s %s" % (V2NAME[v], title),
caption="Daily Erosion Project",
)
# Check that we have data for this date!
cursor.execute(
"""
SELECT value from properties where key = 'last_date_0'
"""
)
lastts = datetime.datetime.strptime(cursor.fetchone()[0], "%Y-%m-%d")
floor = datetime.date(2007, 1, 1)
df = read_postgis(
"""
WITH data as (
SELECT huc_12,
sum("""
+ v
+ """) as d from results_by_huc12
WHERE scenario = %s and valid >= %s and valid <= %s
GROUP by huc_12)
SELECT ST_Transform(simple_geom, 4326) as geo, coalesce(d.d, 0) as data
from huc12 i LEFT JOIN data d
ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s and i.states ~* 'IA'
""",
pgconn,
params=(scenario, ts, ts2, scenario),
geom_col="geo",
index_col=None,
)
df["data"] = df["data"] * V2MULTI[v]
if df["data"].max() < 0.01:
bins = [0.01, 0.02, 0.03, 0.04, 0.05]
else:
bins = V2RAMP[v]
norm = mpcolors.BoundaryNorm(bins, cmap.N)
patches = []
# m.ax.add_geometries(df['geo'], ccrs.PlateCarree())
for i, row in df.iterrows():
c = cmap(norm([row["data"]]))[0]
arr = np.asarray(row["geo"].exterior)
points = m.ax.projection.transform_points(
ccrs.Geodetic(), arr[:, 0], arr[:, 1]
)
p = Polygon(points[:, :2], fc=c, ec="k", zorder=2, lw=0.1)
m.ax.add_patch(p)
# m.ax.add_collection(PatchCollection(patches, match_original=True))
m.drawcounties()
m.drawcities()
lbl = [round(_, 2) for _ in bins]
u = "%s, Avg: %.2f" % (V2UNITS[v], df["data"].mean())
m.draw_colorbar(
bins,
cmap,
norm,
clevlabels=lbl,
title="%s :: %s" % (V2NAME[v], V2UNITS[v]),
)
plt.savefig("%s_%s.png" % (year, v))
| 25.992857 | 74 | 0.622424 |
3db72a55f192a9c9ab68f0478ca0ffc316b36c78 | 1,053 | py | Python | package/diana/utils/iter_dates.py | thomasyi17/diana2 | 2167053dfe15b782d96cb1e695047433f302d4dd | [
"MIT"
] | 15 | 2019-02-12T23:26:09.000Z | 2021-12-21T08:53:58.000Z | package/diana/utils/iter_dates.py | thomasyi17/diana2 | 2167053dfe15b782d96cb1e695047433f302d4dd | [
"MIT"
] | 2 | 2019-01-23T21:13:12.000Z | 2019-06-28T15:45:51.000Z | package/diana/utils/iter_dates.py | thomasyi17/diana2 | 2167053dfe15b782d96cb1e695047433f302d4dd | [
"MIT"
] | 6 | 2019-01-23T20:22:50.000Z | 2022-02-03T03:27:04.000Z | from datetime import datetime, timedelta
| 26.325 | 79 | 0.624881 |
3db739475a32d4a4cd03afcbff8864712c35cad0 | 193 | py | Python | Exercicios Curso Em Video Mundo 2/ex067.py | JorgeTranin/Python_Curso_Em_Video | be74c9301aafc055bdf883be649cb8b7716617e3 | [
"MIT"
] | null | null | null | Exercicios Curso Em Video Mundo 2/ex067.py | JorgeTranin/Python_Curso_Em_Video | be74c9301aafc055bdf883be649cb8b7716617e3 | [
"MIT"
] | null | null | null | Exercicios Curso Em Video Mundo 2/ex067.py | JorgeTranin/Python_Curso_Em_Video | be74c9301aafc055bdf883be649cb8b7716617e3 | [
"MIT"
] | null | null | null | cont = 1
while True:
t = int(input('Quer saber a tabuada de que numero ? '))
if t < 0:
break
for c in range (1, 11):
print(f'{t} X {c} = {t * c}')
print('Obrigado!') | 24.125 | 59 | 0.507772 |
3db86f3d8bdc658afbe080624e5b8f952805ce4b | 1,172 | py | Python | src/PassGen/PassGen.py | Natthapolmnc/PasswordGenerator | 1d481de1b4773af99558c68e9570d1801c1f6e2e | [
"MIT"
] | null | null | null | src/PassGen/PassGen.py | Natthapolmnc/PasswordGenerator | 1d481de1b4773af99558c68e9570d1801c1f6e2e | [
"MIT"
] | null | null | null | src/PassGen/PassGen.py | Natthapolmnc/PasswordGenerator | 1d481de1b4773af99558c68e9570d1801c1f6e2e | [
"MIT"
] | null | null | null | import random as rd
| 35.515152 | 59 | 0.617747 |
3db8e72e1423808652d32817702cb2ec2246d0ea | 5,413 | py | Python | services/offers_service.py | martinmladenov/RankingBot | 1df4e37b4b9a68b3f553b2f55acc77663163be1b | [
"MIT"
] | 2 | 2020-06-03T20:19:33.000Z | 2021-04-29T08:05:09.000Z | services/offers_service.py | martinmladenov/RankingBot | 1df4e37b4b9a68b3f553b2f55acc77663163be1b | [
"MIT"
] | 41 | 2020-06-09T11:11:37.000Z | 2022-03-20T21:18:42.000Z | services/offers_service.py | martinmladenov/RankingBot | 1df4e37b4b9a68b3f553b2f55acc77663163be1b | [
"MIT"
] | 9 | 2020-05-27T19:04:55.000Z | 2021-11-01T12:57:55.000Z | from datetime import date, datetime, timedelta
from matplotlib import pyplot as plt, dates as mdates
from matplotlib.ticker import MaxNLocator
from helpers import programmes_helper
filename = 'offers.png'
| 41.320611 | 114 | 0.585258 |
3db9d9cd9e40d9cc018a319420be1ba7e9abac3d | 11,397 | py | Python | lib/python3.8/site-packages/ansible_collections/community/postgresql/plugins/modules/postgresql_user_obj_stat_info.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/community/postgresql/plugins/modules/postgresql_user_obj_stat_info.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/community/postgresql/plugins/modules/postgresql_user_obj_stat_info.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2020, Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: postgresql_user_obj_stat_info
short_description: Gather statistics about PostgreSQL user objects
description:
- Gathers statistics about PostgreSQL user objects.
version_added: '0.2.0'
options:
filter:
description:
- Limit the collected information by comma separated string or YAML list.
- Allowable values are C(functions), C(indexes), C(tables).
- By default, collects all subsets.
- Unsupported values are ignored.
type: list
elements: str
schema:
description:
- Restrict the output by certain schema.
type: str
db:
description:
- Name of database to connect.
type: str
aliases:
- login_db
session_role:
description:
- Switch to session_role after connecting. The specified session_role must
be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though
the session_role were the one that had logged in originally.
type: str
trust_input:
description:
- If C(no), check the value of I(session_role) is potentially dangerous.
- It makes sense to use C(no) only when SQL injections via I(session_role) are possible.
type: bool
default: yes
version_added: '0.2.0'
notes:
- C(size) and C(total_size) returned values are presented in bytes.
- For tracking function statistics the PostgreSQL C(track_functions) parameter must be enabled.
See U(https://www.postgresql.org/docs/current/runtime-config-statistics.html) for more information.
seealso:
- module: community.postgresql.postgresql_info
- module: community.postgresql.postgresql_ping
- name: PostgreSQL statistics collector reference
description: Complete reference of the PostgreSQL statistics collector documentation.
link: https://www.postgresql.org/docs/current/monitoring-stats.html
author:
- Andrew Klychkov (@Andersson007)
- Thomas O'Donnell (@andytom)
extends_documentation_fragment:
- community.postgresql.postgres
'''
EXAMPLES = r'''
- name: Collect information about all supported user objects of the acme database
community.postgresql.postgresql_user_obj_stat_info:
db: acme
- name: Collect information about all supported user objects in the custom schema of the acme database
community.postgresql.postgresql_user_obj_stat_info:
db: acme
schema: custom
- name: Collect information about user tables and indexes in the acme database
community.postgresql.postgresql_user_obj_stat_info:
db: acme
filter: tables, indexes
'''
RETURN = r'''
indexes:
description: User index statistics
returned: always
type: dict
sample: {"public": {"test_id_idx": {"idx_scan": 0, "idx_tup_fetch": 0, "idx_tup_read": 0, "relname": "test", "size": 8192, ...}}}
tables:
description: User table statistics.
returned: always
type: dict
sample: {"public": {"test": {"analyze_count": 3, "n_dead_tup": 0, "n_live_tup": 0, "seq_scan": 2, "size": 0, "total_size": 8192, ...}}}
functions:
description: User function statistics.
returned: always
type: dict
sample: {"public": {"inc": {"calls": 1, "funcid": 26722, "self_time": 0.23, "total_time": 0.23}}}
'''
try:
from psycopg2.extras import DictCursor
except ImportError:
# psycopg2 is checked by connect_to_db()
# from ansible.module_utils.postgres
pass
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.postgresql.plugins.module_utils.database import (
check_input,
)
from ansible_collections.community.postgresql.plugins.module_utils.postgres import (
connect_to_db,
exec_sql,
get_conn_params,
postgres_common_argument_spec,
)
from ansible.module_utils.six import iteritems
# ===========================================
# PostgreSQL module specific support methods.
#
# ===========================================
# Module execution.
#
def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
db=dict(type='str', aliases=['login_db']),
filter=dict(type='list', elements='str'),
session_role=dict(type='str'),
schema=dict(type='str'),
trust_input=dict(type="bool", default=True),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
filter_ = module.params["filter"]
schema = module.params["schema"]
if not module.params["trust_input"]:
check_input(module, module.params['session_role'])
# Connect to DB and make cursor object:
pg_conn_params = get_conn_params(module, module.params)
# We don't need to commit anything, so, set it to False:
db_connection = connect_to_db(module, pg_conn_params, autocommit=False)
cursor = db_connection.cursor(cursor_factory=DictCursor)
############################
# Create object and do work:
pg_obj_info = PgUserObjStatInfo(module, cursor)
info_dict = pg_obj_info.collect(filter_, schema)
# Clean up:
cursor.close()
db_connection.close()
# Return information:
module.exit_json(**info_dict)
if __name__ == '__main__':
main()
| 33.919643 | 137 | 0.623761 |
3dbac19444fd45965d236a4f1e5266c9a002aefd | 1,586 | py | Python | lib/run_config.py | king/s3vdc | baa6689a6344f417758d4d8b4e6c6e966a510b32 | [
"MIT"
] | 10 | 2020-05-28T07:09:02.000Z | 2021-04-18T07:38:01.000Z | lib/run_config.py | king/s3vdc | baa6689a6344f417758d4d8b4e6c6e966a510b32 | [
"MIT"
] | 4 | 2020-11-13T18:51:09.000Z | 2022-02-10T01:58:16.000Z | lib/run_config.py | king/s3vdc | baa6689a6344f417758d4d8b4e6c6e966a510b32 | [
"MIT"
] | 4 | 2020-05-29T05:05:18.000Z | 2021-04-22T01:33:17.000Z | """
Copyright (C) king.com Ltd 2019
https://github.com/king/s3vdc
License: MIT, https://raw.github.com/king/s3vdc/LICENSE.md
"""
import tensorflow as tf
def _session_config() -> tf.ConfigProto:
"""Constructs a session config specifying gpu memory usage.
Returns:
tf.ConfigProto -- session config.
"""
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.95, allow_growth=True)
session_config = tf.ConfigProto(allow_soft_placement=True, gpu_options=gpu_options)
return session_config
def default_run_config(
model_dir: str,
save_summary_steps: int = 100,
save_checkpoints_mins: int = 5,
keep_checkpoint_max: int = 5,
) -> tf.estimator.RunConfig:
"""Constructs a tf.contrib.learn.RunConfig instance with the specified model dir and default values.
Arguments:
model_dir {str} -- The model directory to save checkpoints, summary outputs etc.
Keyword Arguments:
save_summary_steps {int} -- save summary every x steps (default: {100})
save_checkpoints_mins {int} -- save checkpoints every x steps (default: {5})
keep_checkpoint_max {int} -- keep maximum x checkpoints (default: {5})
Returns:
tf.estimator.RunConfig -- The constructed RunConfig.
"""
return tf.estimator.RunConfig(
model_dir=model_dir,
save_summary_steps=save_summary_steps,
save_checkpoints_steps=None,
save_checkpoints_secs=save_checkpoints_mins * 60, # seconds
keep_checkpoint_max=keep_checkpoint_max,
session_config=_session_config(),
)
| 31.098039 | 104 | 0.708071 |
3dbaf6caeb51e514bda230b2abe9f5f3e8537dce | 974 | py | Python | tests/test_address_book.py | kibernick/pycontacts | 9ec7653cdea582b242a6d5f314b4d0c4bb92dd39 | [
"MIT"
] | null | null | null | tests/test_address_book.py | kibernick/pycontacts | 9ec7653cdea582b242a6d5f314b4d0c4bb92dd39 | [
"MIT"
] | null | null | null | tests/test_address_book.py | kibernick/pycontacts | 9ec7653cdea582b242a6d5f314b4d0c4bb92dd39 | [
"MIT"
] | null | null | null | from pycontacts import AddressBook
from pycontacts.models import Person
from pycontacts.managers import (
EmailAddressManager,
GroupManager,
PhoneNumberManager,
PersonManager,
StreetAddressManager,
)
| 29.515152 | 74 | 0.776181 |
3dbc71f9f330f9191f0001053d461bd694f61316 | 46,266 | py | Python | lifeloopweb/db/models.py | jaimecruz21/lifeloopweb | ba0ffe1ea94ba3323a4e9c66c9506a338cae3212 | [
"MIT"
] | null | null | null | lifeloopweb/db/models.py | jaimecruz21/lifeloopweb | ba0ffe1ea94ba3323a4e9c66c9506a338cae3212 | [
"MIT"
] | null | null | null | lifeloopweb/db/models.py | jaimecruz21/lifeloopweb | ba0ffe1ea94ba3323a4e9c66c9506a338cae3212 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# pylint: disable=no-value-for-parameter,too-many-nested-blocks
import contextlib
import datetime
import functools
import re
from abc import abstractmethod
import sqlalchemy as sa
from sqlalchemy import event, exc, func, select
from sqlalchemy.ext import declarative
from sqlalchemy.ext import hybrid
from sqlalchemy import orm
import sqlalchemy_utils
from lifeloopweb import config, constants, exception, logging, renders, subscription
from lifeloopweb.db import utils as db_utils
from lifeloopweb.webpack import webpack
from lifeloopweb.helpers.base_helper import Helper
from flask_login import UserMixin
LOG = logging.get_logger(__name__)
CONF = config.CONF
helper = Helper()
TABLE_KWARGS = {"mysql_engine": "InnoDB",
"mysql_charset": "utf8",
"mysql_collate": "utf8_general_ci"}
DB_NAME = "lifeloopweb_{}".format(CONF.get("ENVIRONMENT"))
# TODO(mdietz): when this comes from a configuration, we need to
# force the charset to utf8
ENGINE_URL = CONF.get("DB_ENGINE_URL")
if not ENGINE_URL:
ENGINE_URL = ("mysql+pymysql://root:@127.0.0.1/"
"{}?charset=utf8".format(DB_NAME))
connection_debug = CONF.get("database.connection.debug")
if connection_debug.lower() not in ["true", "false"]:
raise exception.InvalidConfigValue(value=connection_debug,
key="database.connection.debug")
connection_debug = connection_debug.lower() == "true"
connection_pool_size = int(CONF.get("database.connection.poolsize"))
connection_overflow_pool = int(CONF.get("database.connection.overflowpool"))
# NOTE: MySQL defaults to 8 hour connection timeouts. It's possible that
# docker-compose or our hosting provider will sever connections sooner.
# if we see "MySQL has gone away" tweaking this variable is the thing
# to revisit
connection_pool_recycle = int(CONF.get("database.connection.poolrecycle"))
engine_kwargs = {}
if "sqlite" not in ENGINE_URL:
engine_kwargs = {
"pool_size": connection_pool_size,
"max_overflow": connection_overflow_pool,
"pool_recycle": connection_pool_recycle}
engine = sa.create_engine(ENGINE_URL, echo=connection_debug,
**engine_kwargs)
SessionFactory = orm.sessionmaker(bind=engine, expire_on_commit=False,
autocommit=False, autoflush=True)
# TODO use of the scoped session needs to be evaluated against
# greenthreading servers like gunicorn and uwsgi. The scope
# by default is to thread local, as in threading.local
# and not the greenthread specifically. Things that use greenthreads
# have to be gt aware, so really we may just do Scoped and Unscoped
# sessions. Alternatively, we hack eventlet to attach the scope there
# http://docs.sqlalchemy.org/en/latest/orm/contextual.html#using-custom-created-scopes
ScopedSession = orm.scoped_session(SessionFactory)
Session = ScopedSession
# TODO We may only want to do this conditionally. I've used it in the past
# but I think the pool_recycling may be enough
def teardown():
ScopedSession.remove()
def can_connect():
try:
engine.connect()
return True
except Exception:
return False
class MetaBase(declarative.DeclarativeMeta):
Base = declarative.declarative_base(cls=ModelBase, bind=engine,
metaclass=MetaBase)
# pylint: disable=abstract-method,unused-argument
# TODO This parent class may not allow NULL to go into a UUID field :-|
def org_notifications(self, org_id):
return (n for n in self.notifications if
n.org_id is org_id)
# NOTE: this fails as soon as we allow a user to have more than one
# role in an organization
# NOTE: this fails as soon as we allow a user to have more than one
# role in an group
class LinkType(Base, HasId):
description = sa.Column(sa.String(200), nullable=False)
priority = sa.Column(sa.Integer(), nullable=True)
link = orm.relationship('Link', backref='link_type')
class Link(Base, HasId):
link_type_id = sa.Column(GUID(), sa.ForeignKey("link_types.id"))
icon_css_class = sa.Column(sa.String(120))
organization_id = sa.Column(GUID(), sa.ForeignKey("organizations.id"), nullable=True)
group_id = sa.Column(GUID(), sa.ForeignKey("groups.id"), nullable=True)
url = sa.Column(sa.String(250), nullable=False)
# TODO If these represent permissions, we can probably do this better, globally
def __str__(self):
return self.name
def public_groups(self):
return [g for g in self.groups
if g.privacy_settings.description.lower()
.startswith('public')]
| 37.371567 | 99 | 0.626162 |
3dbe95131f682ae91ac5d0ab7098a4da9541c391 | 267 | py | Python | gc_win1.py | danz2004/learning_python | 20cb7d33f898bcc406f33565308132dca31e11cd | [
"MIT"
] | null | null | null | gc_win1.py | danz2004/learning_python | 20cb7d33f898bcc406f33565308132dca31e11cd | [
"MIT"
] | null | null | null | gc_win1.py | danz2004/learning_python | 20cb7d33f898bcc406f33565308132dca31e11cd | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
seq = 'ACGACGCAGGAGGAGAGTTTCAGAGATCACGAATACATCCATATTACCCAGAGAGAG'
w = 11
for i in range(len(seq) - w + 1):
count = 0
for j in range(i, i + w):
if seq[j] == 'G' or seq[j] == 'C':
count += 1
print(f'{i} {seq[i:i+w]} {(count / w) : .4f}')
| 26.7 | 65 | 0.595506 |
3dbf87737162b90ca8a50c6b75c42c1a4829f712 | 6,159 | py | Python | test/test_auth.py | tjones-commits/server-client-python | b9309fb79564de9f28196b929ee77b0e77a8f504 | [
"CC0-1.0",
"MIT"
] | 470 | 2016-09-14T23:38:48.000Z | 2022-03-31T07:59:53.000Z | test/test_auth.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
] | 772 | 2016-09-09T18:15:44.000Z | 2022-03-31T22:01:08.000Z | test/test_auth.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
] | 346 | 2016-09-10T00:05:00.000Z | 2022-03-30T18:55:47.000Z | import unittest
import os.path
import requests_mock
import tableauserverclient as TSC
TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets')
SIGN_IN_XML = os.path.join(TEST_ASSET_DIR, 'auth_sign_in.xml')
SIGN_IN_IMPERSONATE_XML = os.path.join(TEST_ASSET_DIR, 'auth_sign_in_impersonate.xml')
SIGN_IN_ERROR_XML = os.path.join(TEST_ASSET_DIR, 'auth_sign_in_error.xml')
| 49.272 | 117 | 0.664069 |
3dbfa17a77ec527273235935d102cd0d8f5bcbb2 | 7,991 | py | Python | gym_flock/envs/old/flocking.py | katetolstaya/gym-flock | 3236d1dafcb1b9be0cf78b471672e8becb2d37af | [
"MIT"
] | 19 | 2019-07-29T22:19:58.000Z | 2022-01-27T04:38:38.000Z | gym_flock/envs/old/flocking.py | henghenghahei849/gym-flock | b09bdfbbe4a96fe052958d1f9e1e9dd314f58419 | [
"MIT"
] | null | null | null | gym_flock/envs/old/flocking.py | henghenghahei849/gym-flock | b09bdfbbe4a96fe052958d1f9e1e9dd314f58419 | [
"MIT"
] | 5 | 2019-10-03T14:44:49.000Z | 2021-12-09T20:39:39.000Z | import gym
from gym import spaces, error, utils
from gym.utils import seeding
import numpy as np
import configparser
from os import path
import matplotlib.pyplot as plt
from matplotlib.pyplot import gca
font = {'family': 'sans-serif',
'weight': 'bold',
'size': 14}
| 36.99537 | 134 | 0.585659 |
3dc00d2a0bc2efe282c87c91e5370202da55e278 | 3,010 | py | Python | dataPipelines/gc_scrapy/gc_scrapy/spiders/army_reserve_spider.py | ekmixon/gamechanger-crawlers | 60a0cf20338fb3dc134eec117bccd519cede9288 | [
"MIT"
] | 8 | 2021-05-20T18:39:35.000Z | 2022-02-25T23:24:21.000Z | dataPipelines/gc_scrapy/gc_scrapy/spiders/army_reserve_spider.py | dod-advana/gamechanger-crawlers | e0113111a39f78bd13f70fa4b3359a688f7dc6e8 | [
"MIT"
] | 4 | 2021-06-14T13:46:46.000Z | 2022-03-02T02:01:49.000Z | dataPipelines/gc_scrapy/gc_scrapy/spiders/army_reserve_spider.py | ekmixon/gamechanger-crawlers | 60a0cf20338fb3dc134eec117bccd519cede9288 | [
"MIT"
] | 4 | 2021-06-30T22:18:52.000Z | 2021-11-17T22:43:27.000Z | import scrapy
import re
from urllib.parse import urljoin, urlencode, parse_qs
from dataPipelines.gc_scrapy.gc_scrapy.items import DocItem
from dataPipelines.gc_scrapy.gc_scrapy.GCSpider import GCSpider
type_and_num_regex = re.compile(r"([a-zA-Z].*) (\d.*)")
| 34.597701 | 94 | 0.571429 |
3dc01664c6a8e4d90955ec90294ebb0c1cb73629 | 4,036 | py | Python | lbrynet/daemon/Publisher.py | Invariant-Change/lbry | 2ddd6b051d4457f0d747428e3d97aa37839f3c93 | [
"MIT"
] | null | null | null | lbrynet/daemon/Publisher.py | Invariant-Change/lbry | 2ddd6b051d4457f0d747428e3d97aa37839f3c93 | [
"MIT"
] | null | null | null | lbrynet/daemon/Publisher.py | Invariant-Change/lbry | 2ddd6b051d4457f0d747428e3d97aa37839f3c93 | [
"MIT"
] | null | null | null | import logging
import mimetypes
import os
from twisted.internet import defer
from lbrynet.core import file_utils
from lbrynet.file_manager.EncryptedFileCreator import create_lbry_file
log = logging.getLogger(__name__)
| 51.088608 | 117 | 0.631318 |
3dc0b7210fc8b7d9ca8c5c2087a4723a81de890a | 10,498 | py | Python | SAMPNet/train.py | bcmi/Image-Composition-Assessment-with-SAMP | 35c093bafdaaa98923d8ba093a73ddf0079ffbc9 | [
"MIT"
] | 27 | 2021-04-28T04:51:02.000Z | 2022-03-04T08:57:03.000Z | SAMPNet/train.py | bcmi/Image-Composition-Assessment-with-SAMP | 35c093bafdaaa98923d8ba093a73ddf0079ffbc9 | [
"MIT"
] | 4 | 2021-10-30T13:28:33.000Z | 2022-02-19T01:09:47.000Z | SAMPNet/train.py | bcmi/Image-Composition-Assessment-with-SAMP | 35c093bafdaaa98923d8ba093a73ddf0079ffbc9 | [
"MIT"
] | 3 | 2021-10-30T10:18:02.000Z | 2022-01-16T08:44:43.000Z | import sys,os
from torch.autograd import Variable
import torch.optim as optim
from tensorboardX import SummaryWriter
import torch
import time
import shutil
from torch.utils.data import DataLoader
import csv
from samp_net import EMDLoss, AttributeLoss, SAMPNet
from config import Config
from cadb_dataset import CADBDataset
from test import evaluation_on_cadb
if __name__ == '__main__':
cfg = Config()
cfg.create_path()
device = torch.device('cuda:{}'.format(cfg.gpu_id))
# evaluate(cfg)
for file in os.listdir('./'):
if file.endswith('.py'):
shutil.copy(file, cfg.exp_path)
print('Backup ', file)
model = SAMPNet(cfg)
model = model.train().to(device)
trainer = Trainer(model, cfg)
trainer.run() | 40.689922 | 130 | 0.561155 |
3dc12e0ce591217b149c51e1d38a5ca5547d4627 | 3,282 | py | Python | combine_layer.py | Lynton-Morgan/combine_layer | 93b83ed69b8201db69fff80e60e8cb2955b40cd1 | [
"MIT"
] | null | null | null | combine_layer.py | Lynton-Morgan/combine_layer | 93b83ed69b8201db69fff80e60e8cb2955b40cd1 | [
"MIT"
] | null | null | null | combine_layer.py | Lynton-Morgan/combine_layer | 93b83ed69b8201db69fff80e60e8cb2955b40cd1 | [
"MIT"
] | null | null | null | import keras
import keras.backend as K
| 33.151515 | 109 | 0.59415 |
3dc274928408de034cf930f3d624022d965d5166 | 4,308 | py | Python | src/pystage/core/_sound.py | pystage/pystage | 4a76e95f6de2df59736de17fe81219485fde1556 | [
"MIT"
] | 12 | 2021-05-20T12:49:52.000Z | 2022-01-12T02:15:33.000Z | src/pystage/core/_sound.py | pystage/pystage | 4a76e95f6de2df59736de17fe81219485fde1556 | [
"MIT"
] | 14 | 2021-05-25T09:28:33.000Z | 2021-09-10T07:54:45.000Z | src/pystage/core/_sound.py | pystage/pystage | 4a76e95f6de2df59736de17fe81219485fde1556 | [
"MIT"
] | 3 | 2021-05-25T12:58:36.000Z | 2022-02-18T04:19:21.000Z | import pygame
from pygame.mixer import music
from pystage.core.assets import SoundManager
from pystage.core._base_sprite import BaseSprite
import time
| 35.02439 | 92 | 0.668524 |
3dc364b351e4b86533cd7ac27b461f7ca088a0a9 | 2,126 | py | Python | tests/test_runner/test_discover_runner.py | tomleo/django | ebfb71c64a786620947c9d598fd1ebae2958acff | [
"BSD-3-Clause"
] | 1 | 2015-09-09T08:48:03.000Z | 2015-09-09T08:48:03.000Z | tests/test_runner/test_discover_runner.py | tomleo/django | ebfb71c64a786620947c9d598fd1ebae2958acff | [
"BSD-3-Clause"
] | null | null | null | tests/test_runner/test_discover_runner.py | tomleo/django | ebfb71c64a786620947c9d598fd1ebae2958acff | [
"BSD-3-Clause"
] | 1 | 2020-04-12T19:00:12.000Z | 2020-04-12T19:00:12.000Z | from django.test import TestCase
from django.test.runner import DiscoverRunner
| 30.811594 | 83 | 0.676388 |
3dc48feaabd6085099581154d9df3a8f76e956ee | 1,265 | py | Python | src/ggrc/rbac/__init__.py | Killswitchz/ggrc-core | 2460df94daf66727af248ad821462692917c97a9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/ggrc/rbac/__init__.py | Killswitchz/ggrc-core | 2460df94daf66727af248ad821462692917c97a9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/ggrc/rbac/__init__.py | Killswitchz/ggrc-core | 2460df94daf66727af248ad821462692917c97a9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Basic permissions module."""
from sqlalchemy import or_
def context_query_filter(context_column, contexts):
'''
Intended for use by `model.query.filter(...)`
If `contexts == None`, it's Admin (no filter), so return `True`
Else, return the full query
'''
if contexts is None:
# Admin context, no filter
return True
else:
filter_expr = None
# Handle `NULL` context specially
if None in contexts:
filter_expr = context_column.is_(None)
# We're modifying `contexts`, so copy
contexts = set(contexts)
contexts.remove(None)
if contexts:
filter_in_expr = context_column.in_(contexts)
if filter_expr is not None:
filter_expr = or_(filter_expr, filter_in_expr)
else:
filter_expr = filter_in_expr
if filter_expr is None:
# No valid contexts
return False
return filter_expr
| 25.816327 | 78 | 0.67747 |
3dc61360e96fb602ab782fcc77e9987334f638a2 | 2,075 | py | Python | buildingspy/examples/dymola/plotResult.py | Mathadon/BuildingsPy | 9b27c6f3c0e2c185d03b846de18ec818a1f10d95 | [
"BSD-3-Clause-LBNL"
] | null | null | null | buildingspy/examples/dymola/plotResult.py | Mathadon/BuildingsPy | 9b27c6f3c0e2c185d03b846de18ec818a1f10d95 | [
"BSD-3-Clause-LBNL"
] | null | null | null | buildingspy/examples/dymola/plotResult.py | Mathadon/BuildingsPy | 9b27c6f3c0e2c185d03b846de18ec818a1f10d95 | [
"BSD-3-Clause-LBNL"
] | 1 | 2022-02-16T14:04:15.000Z | 2022-02-16T14:04:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# import from future to make Python2 behave like Python3
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
from builtins import *
from io import open
# end of from future import
def main():
""" Main method that plots the results
"""
import os
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from buildingspy.io.outputfile import Reader
# Optionally, change fonts to use LaTeX fonts
# from matplotlib import rc
# rc('text', usetex=True)
# rc('font', family='serif')
# Read results
ofr1 = Reader(os.path.join("buildingspy", "examples", "dymola",
"case1", "PIDHysteresis.mat"), "dymola")
ofr2 = Reader(os.path.join("buildingspy", "examples", "dymola",
"case2", "PIDHysteresis.mat"), "dymola")
(time1, T1) = ofr1.values("cap.T")
(time1, y1) = ofr1.values("con.y")
(time2, T2) = ofr2.values("cap.T")
(time2, y2) = ofr2.values("con.y")
# Plot figure
fig = plt.figure()
ax = fig.add_subplot(211)
ax.plot(time1 / 3600, T1 - 273.15, 'r', label='$T_1$')
ax.plot(time2 / 3600, T2 - 273.15, 'b', label='$T_2$')
ax.set_xlabel('time [h]')
ax.set_ylabel(r'temperature [$^\circ$C]')
ax.set_xticks(list(range(25)))
ax.set_xlim([0, 24])
ax.legend()
ax.grid(True)
ax = fig.add_subplot(212)
ax.plot(time1 / 3600, y1, 'r', label='$y_1$')
ax.plot(time2 / 3600, y2, 'b', label='$y_2$')
ax.set_xlabel('time [h]')
ax.set_ylabel('y [-]')
ax.set_xticks(list(range(25)))
ax.set_xlim([0, 24])
ax.legend()
ax.grid(True)
# Save figure to file
plt.savefig('plot.pdf')
plt.savefig('plot.png')
# To show the plot on the screen, uncomment the line below
# plt.show()
# Main function
if __name__ == '__main__':
main()
| 27.302632 | 71 | 0.620723 |
3dc696f09fb0ebe8bc4f7011c19473f98ca4f506 | 335 | py | Python | tango_with_django_project/rango/admin.py | DADDYKIKI/tango_with_django_project | da2bbb0b7fd2d587c9af4c7ac14068678b2c38cf | [
"MIT"
] | null | null | null | tango_with_django_project/rango/admin.py | DADDYKIKI/tango_with_django_project | da2bbb0b7fd2d587c9af4c7ac14068678b2c38cf | [
"MIT"
] | null | null | null | tango_with_django_project/rango/admin.py | DADDYKIKI/tango_with_django_project | da2bbb0b7fd2d587c9af4c7ac14068678b2c38cf | [
"MIT"
] | null | null | null | from django.contrib import admin
from rango.models import Category, Page
admin.site.register(Page)
admin.site.register(Category)
| 22.333333 | 45 | 0.668657 |
3dc6d3255aa8efde45efdc9453d22aa71f26740f | 1,334 | py | Python | components/python/scripts/bootstrap_validate.py | cloudify-cosmo/cloudify-manager-blueprints | 1908c1a0615fb15cbb118335aa2f9e055b9e5779 | [
"Apache-2.0"
] | 35 | 2015-03-07T13:30:58.000Z | 2022-02-14T11:44:48.000Z | components/python/scripts/bootstrap_validate.py | cloudify-cosmo/cloudify-manager-blueprints | 1908c1a0615fb15cbb118335aa2f9e055b9e5779 | [
"Apache-2.0"
] | 101 | 2015-03-18T03:07:57.000Z | 2019-02-07T12:06:42.000Z | components/python/scripts/bootstrap_validate.py | cloudify-cosmo/cloudify-manager-blueprints | 1908c1a0615fb15cbb118335aa2f9e055b9e5779 | [
"Apache-2.0"
] | 76 | 2015-01-08T10:33:03.000Z | 2021-05-11T08:45:50.000Z | #!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
# Most images already ship with the following packages:
#
# python-setuptools
# python-backports
# python-backports-ssl_match_hostname
#
# - as they are dependencies of cloud-init, which is extremely popular.
#
# However, cloud-init is irrelevant for certain IaaS (such as vSphere) so
# images used there may not have these packages preinstalled.
#
# We're currently considering whether to include these libraries in the
# manager resources package. Until then, we only validate that they're
# preinstalled, and if not - instruct the user to install them.
missing_packages = set()
for pkg in ['python-setuptools',
'python-backports',
'python-backports-ssl_match_hostname']:
ctx.logger.info('Ensuring {0} is installed'.format(pkg))
is_installed = utils.RpmPackageHandler.is_package_installed(pkg)
if not is_installed:
missing_packages.add(pkg)
if missing_packages:
ctx.abort_operation('Prerequisite packages missing: {0}. '
'Please ensure these packages are installed and '
'try again'.format(', '.join(missing_packages)))
| 31.761905 | 73 | 0.709145 |
3dc72f281f6a609f6178afd5c15a1c8b5b592cd3 | 278 | py | Python | subdomains/gen_master_data.py | sjy5386/subshorts | d8170ee4a66989c3e852f86aa83bab6341e3aa10 | [
"MIT"
] | 3 | 2022-03-08T19:02:41.000Z | 2022-03-16T23:04:37.000Z | subdomains/gen_master_data.py | sjy5386/subshorts | d8170ee4a66989c3e852f86aa83bab6341e3aa10 | [
"MIT"
] | 5 | 2022-03-17T02:16:52.000Z | 2022-03-18T02:55:25.000Z | subdomains/gen_master_data.py | sjy5386/subshorts | d8170ee4a66989c3e852f86aa83bab6341e3aa10 | [
"MIT"
] | null | null | null | from .models import ReservedName
| 34.75 | 116 | 0.647482 |
3dc7b5b71b827c183978d2d97338bcdc701937fb | 5,180 | py | Python | promort_tools/converters/zarr_to_tiledb.py | mdrio/promort_tools | 26f1b96b27046b0480872dcf17b3be057660a51d | [
"MIT"
] | null | null | null | promort_tools/converters/zarr_to_tiledb.py | mdrio/promort_tools | 26f1b96b27046b0480872dcf17b3be057660a51d | [
"MIT"
] | null | null | null | promort_tools/converters/zarr_to_tiledb.py | mdrio/promort_tools | 26f1b96b27046b0480872dcf17b3be057660a51d | [
"MIT"
] | 2 | 2021-05-24T16:04:55.000Z | 2021-09-16T13:58:48.000Z | # Copyright (c) 2021, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import argparse, sys, os
import zarr
import tiledb
import numpy as np
from math import ceil
from promort_tools.libs.utils.logger import get_logger, LOG_LEVELS
if __name__ == '__main__':
main(sys.argv[1:])
| 43.166667 | 107 | 0.663127 |
3dc7bf9b590e7454e8a84ae7d5b2f66655fcd2d8 | 9,121 | py | Python | rxmarbles/theme/pencil.py | enbandari/rx-marbles | b95813b5e24818eee272ab7ecf0f130510e60f39 | [
"MIT"
] | null | null | null | rxmarbles/theme/pencil.py | enbandari/rx-marbles | b95813b5e24818eee272ab7ecf0f130510e60f39 | [
"MIT"
] | null | null | null | rxmarbles/theme/pencil.py | enbandari/rx-marbles | b95813b5e24818eee272ab7ecf0f130510e60f39 | [
"MIT"
] | null | null | null | from numpy.random import random
import random
root = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="%spx"
height="%spx"
viewBox="0 0 %s %s "
id="svg2"
version="1.1"
inkscape:version="0.91 r13725"
>
<defs
id="defs4">
<filter
style="color-interpolation-filters:sRGB;"
inkscape:label="Drop Shadow"
id="filter3443"
x="-25%%"
y="-25%%"
width="150%%"
height="150%%"
>
<feFlood
flood-opacity="0.498039"
flood-color="rgb(0,0,0)"
result="flood"
id="feFlood3445" />
<feComposite
in="flood"
in2="SourceGraphic"
operator="in"
result="composite1"
id="feComposite3447" />
<feGaussianBlur
in="composite1"
stdDeviation="3"
result="blur"
id="feGaussianBlur3449" />
<feOffset
dx="2"
dy="3"
result="offset"
id="feOffset3451" />
<feComposite
in="SourceGraphic"
in2="offset"
operator="over"
result="composite2"
id="feComposite3453" />
</filter>
<marker
inkscape:stockid="Arrow1Lend"
orient="auto"
refY="0.0"
refX="0.0"
id="Arrow1Lend"
style="overflow:visible;"
inkscape:isstock="true">
<path
d="M -3.0,0.0 L -3.0,-5.0 L -12.5,0.0 L -3.0,5.0 L -3.0,0.0 z "
style="fill-rule:evenodd;stroke:#003080;stroke-width:1pt;stroke-opacity:1;fill:#003080;fill-opacity:1"
transform="scale(0.8) rotate(180) translate(12.5,0)" />
</marker>
</defs>
%s
</svg>
'''
circ1 = '''
<g transform="translate(%s %s)">
<path
sodipodi:nodetypes="cccc"
inkscape:connector-curvature="0"
id="circle"
d="m 4.9388474,-19.439462 c 16.0642996,-0.12398 28.5596096,25.2132203 13.6726596,35.64262 -11.0573896,9.63907 -34.34364,12.39205 -40.14488,-4.43275 -5.99947,-18.2070397 12.2740204,-28.34201 25.6703704,-34.96158"
style="fill:#ffffff;fill-opacity:0.8627451;fill-rule:evenodd;stroke:#000000;stroke-width:1.42857146px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
inkscape:label="#path3567" />
<text
y="11"
x="0"
style="font-size:28px;font-family:purisa;text-align:center;text-anchor:middle;fill:#000000;"
xml:space="preserve">%s</text>
</g>
'''
circ2 = '''
<g transform="translate(%s %s)">
<path
sodipodi:nodetypes="ccc"
style="fill:#ffffff;fill-opacity:0.8627451;fill-rule:evenodd;stroke:#000000;stroke-width:1.42857158px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
d="M 1.5925919,21.477458 C 54.657578,22.391841 -4.4465257,-49.196211 -20.218549,-5.7426508 -25.112801,8.7120558 -15.351552,21.857363 2.9582607,24.135679"
id="circ2"
inkscape:connector-curvature="0"
inkscape:label="#path3569" />
<text
y="11"
x="0"
style="font-size:28px;font-family:purisa;text-align:center;text-anchor:middle;fill:#000000;"
xml:space="preserve">%s</text>
</g>
'''
circ3 = '''
<g transform="translate(%s %s)">
<path
sodipodi:nodetypes="ccccc"
inkscape:connector-curvature="0"
id="circ3"
d="M 4.0475415,-21.306002 C -11.703304,-26.547792 -23.641751,-7.9231854 -22.516473,6.1088129 -20.059942,26.830243 12.722358,33.867273 22.337406,14.863588 27.656584,4.0579388 23.204578,-8.3517124 15.784624,-16.859919 c -1.822,-3.127279 -5.336267,-5.723574 -9.3972065,-5.54123"
style="fill:#ffffff;fill-opacity:0.8627451;fill-rule:evenodd;stroke:#000000;stroke-width:1.42857158px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
inkscape:label="#path3571" />
<text
y="11"
x="0"
style="font-size:28px;font-family:purisa;text-align:center;text-anchor:middle;fill:#000000;"
xml:space="preserve">%s</text>
</g>
'''
circ4 = '''
<g transform="translate(%s %s)">
<path
style="fill:#ffffff;fill-opacity:0.8627451;fill-rule:evenodd;stroke:#000000;stroke-width:1.42857146px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
d="M 2.0536007,-17.942742 C -52.370629,-18.905944 8.2474086,56.504162 24.423439,10.730643 29.443049,-4.4957928 16.207176,-22.177911 -2.5716488,-24.577866"
id="circ5"
inkscape:connector-curvature="0"
inkscape:label="#path3433" />
<text
y="11"
x="0"
style="font-size:28px;font-family:purisa;text-align:center;text-anchor:middle;fill:#000000;"
xml:space="preserve">%s</text>
</g>
'''
arrow = '''
<g transform="scale(%s %s) translate(%s %s)">
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
d="M -0.67660398,1.4566587 C 51.393331,1.3820987 103.49025,-3.9934243 155.52767,1.1808467 c 33.34887,0.89417 67.21197,-1.95060293 99.84156,5.535708 44.03188,2.2890288 88.09651,1.698567 131.74849,-3.79605 21.2474,-0.841106 42.51228,0.139269 63.76647,-0.199798"
id="axisLine"
inkscape:connector-curvature="0"
inkscape:label="#path3511" />
</g>
<g transform="translate(%s %s)">
<path
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.42857146px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
d="m -13.085216,-10.419073 c 2.66757,0.133318 4.1293297,2.8477214 6.5645197,3.6415244 2.19618,1.483387 4.27915,3.129365 6.74184,4.165938 3.6572898,1.62997797 0.28555,4.903303 -1.90365,6.045673 -2.08841,1.84505 -3.80877,3.732465 -6.63704,4.785017 -1.8518597,0.870578 -3.6440197,1.8066886 -5.3976897,2.8506076"
id="arrow_end"
inkscape:connector-curvature="0"
inkscape:label="#path3528" />
</g>
'''
end = '''
<g>
<path d="m %s,%s -1,32"
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:4px;" />
</g>
'''
err = '''
<g id="error">
<path
inkscape:connector-curvature="0"
d="m %s,%s -34,36"
style="stroke:#000000;stroke-width:3px;" />
<path
style="stroke:#000000;stroke-width:3px;"
d="m %s,%s 36,36"
/>
</g>
'''
# this one is used for operator box
block = '''
<g transform="scale(%s %s) translate(%s %s)">
<path
style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1.42857146px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
d="M 3.6131775,2.4809559 C 7.7262916,27.136376 -4.8390181,67.388756 10.311791,81.793736 c 56.57601,-7.35809 113.842299,-2.82956 170.815959,-4.56434 48.9116,1.31804 98.12281,2.30369 146.89949,0.25237 36.73272,-6.08907 74.34343,-4.60865 110.81369,1.7655 26.17801,-6.87142 7.26874,-47.02276 10.85636,-67.94864 C 435.2653,-11.614984 389.13054,8.5049456 362.01772,0.90526594 300.94038,0.67314594 239.26649,2.7131859 178.67384,0.60705594 118.08119,-1.4990741 86.699905,6.8117156 57.753682,4.3549359 28.807462,1.8981559 17.816805,1.4648659 0.01403178,-4.669534"
id="operator_box"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccccccczzc"
inkscape:label="#path3549" />
</g>
<text
x="%s"
y="%s"
style="font-size:24px;font-family:purisa;text-align:center;text-anchor:middle;fill:#000000;"
xml:space="preserve">%s</text>
'''
# - this one is used for groupping
groupping_block = '''
<g >
<rect
ry="25px"
rx="25px"
y="%s"
x="%s"
width="%s"
height="%s"
style="opacity:1;fill:%s;fill-opacity:0;stroke:#000000;stroke-width:1px;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" />
</g>
'''
#==================================================
# this is the theme interface
#==================================================
| 36.338645 | 559 | 0.621642 |
3dc93ff9707b2d135f50553fa063389f067d2b73 | 803 | py | Python | awx/main/migrations/0082_v360_webhook_http_method.py | Avinesh/awx | 6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf | [
"Apache-2.0"
] | 11,396 | 2017-09-07T04:56:02.000Z | 2022-03-31T13:56:17.000Z | awx/main/migrations/0082_v360_webhook_http_method.py | Avinesh/awx | 6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf | [
"Apache-2.0"
] | 11,046 | 2017-09-07T09:30:46.000Z | 2022-03-31T20:28:01.000Z | awx/main/migrations/0082_v360_webhook_http_method.py | Avinesh/awx | 6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf | [
"Apache-2.0"
] | 3,592 | 2017-09-07T04:14:31.000Z | 2022-03-31T23:53:09.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
| 30.884615 | 98 | 0.731009 |
3dca45f1cb27867b123a5f15fcfde334028fa3ca | 7,964 | py | Python | ogc_edr_lib/ogc_api_collection_metadata.py | eugenegesdisc/gmuedr | e8b3e5c7b8d18421d875f0f6f778a37a6d8ec3fd | [
"MIT"
] | null | null | null | ogc_edr_lib/ogc_api_collection_metadata.py | eugenegesdisc/gmuedr | e8b3e5c7b8d18421d875f0f6f778a37a6d8ec3fd | [
"MIT"
] | null | null | null | ogc_edr_lib/ogc_api_collection_metadata.py | eugenegesdisc/gmuedr | e8b3e5c7b8d18421d875f0f6f778a37a6d8ec3fd | [
"MIT"
] | null | null | null | from typing import Tuple, Union
from aiohttp import web
from ogc_edr_lib.ogc_api import OgcApi
import logging
from ogc_edr_lib.ogc_api_collection_metadata_get_queries import (
OgcApiCollectionMetadataGetQueries)
from ogc_edr_lib.ogc_api_collection_metadata_list_data_items import (
OgcApiCollectionMetadataListDataItems
)
from ogc_edr_lib.ogc_api_collection_metadata_list_data_locations import (
OgcApiCollectionMetadataListDataLocations
)
Logger = logging.getLogger(__name__)
| 63.206349 | 1,561 | 0.708815 |
3dca6b4523ea884f293c6a6b346cc8182bedf764 | 28 | py | Python | tunga/preprocessing/__init__.py | tahtaciburak/tunga | e71a4fa393d692779ab6d674673c5674d7287dac | [
"MIT"
] | 5 | 2020-07-31T19:26:46.000Z | 2020-10-23T11:49:06.000Z | tunga/preprocessing/__init__.py | tunga-ml/tunga | 823fd762054fd513300025cbb1fc799f7e3cf6b1 | [
"MIT"
] | null | null | null | tunga/preprocessing/__init__.py | tunga-ml/tunga | 823fd762054fd513300025cbb1fc799f7e3cf6b1 | [
"MIT"
] | 1 | 2021-09-10T08:24:13.000Z | 2021-09-10T08:24:13.000Z | from .normalization import * | 28 | 28 | 0.821429 |
3dccadbdd4f7bd09cd826b80f7957d192a7141e5 | 800 | py | Python | runtests.py | resurrexi/django-restql | 6a642a46ae597201214bdaeee5d9e92a62fa4616 | [
"MIT"
] | 545 | 2019-04-23T12:54:21.000Z | 2022-03-28T07:59:43.000Z | runtests.py | resurrexi/django-restql | 6a642a46ae597201214bdaeee5d9e92a62fa4616 | [
"MIT"
] | 109 | 2019-05-21T13:48:27.000Z | 2022-03-18T21:10:32.000Z | runtests.py | resurrexi/django-restql | 6a642a46ae597201214bdaeee5d9e92a62fa4616 | [
"MIT"
] | 44 | 2019-05-15T19:04:01.000Z | 2022-01-31T04:12:59.000Z | #!/usr/bin/env python
import os
import sys
import subprocess
from django.core.management import execute_from_command_line
FLAKE8_ARGS = ['django_restql', 'tests', 'setup.py', 'runtests.py']
WARNING_COLOR = '\033[93m'
END_COLOR = '\033[0m'
if __name__ == '__main__':
runtests()
| 22.857143 | 69 | 0.67375 |
3dccba1140ab8bafa4d46c818af6ac8d4201bac2 | 17,549 | py | Python | structured_tables/parser.py | CivicKnowledge/structured_tables | 836ff700f49be51d2a12b2daa3a5460a2fc2fc06 | [
"BSD-3-Clause"
] | null | null | null | structured_tables/parser.py | CivicKnowledge/structured_tables | 836ff700f49be51d2a12b2daa3a5460a2fc2fc06 | [
"BSD-3-Clause"
] | null | null | null | structured_tables/parser.py | CivicKnowledge/structured_tables | 836ff700f49be51d2a12b2daa3a5460a2fc2fc06 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2016 Civic Knowledge. This file is licensed under the terms of the
# Revised BSD License, included in this distribution as LICENSE
"""
Parser for the Simple Data Package format. The parser consists of several iterable generator
objects.
"""
NO_TERM = '<no_term>' # No parent term -- no '.' -- in term cell
ELIDED_TERM = '<elided_term>' # A '.' in term cell, but no term before it.
def add_child(self, child):
self.children.append(child)
def __repr__(self):
return "<Term: {}{}.{} {} {} >".format(self.file_ref(), self.parent_term,
self.record_term, self.value, self.args)
def __str__(self):
if self.parent_term == NO_TERM:
return "{}{}: {}".format(self.file_ref(), self.record_term, self.value)
elif self.parent_term == ELIDED_TERM:
return "{}.{}: {}".format(self.file_ref(), self.record_term, self.value)
else:
return "{}{}.{}: {}".format(self.file_ref(), self.parent_term, self.record_term, self.value)
class CsvPathRowGenerator(object):
"""An object that generates rows. The current implementation mostly just a wrapper around
csv.reader, but it add a path property so term interperters know where the terms are coming from
"""
class CsvDataRowGenerator(object):
"""Generate rows from CSV data, as a string
"""
class RowGenerator(object):
"""An object that generates rows. The current implementation mostly just a wrapper around
csv.reader, but it add a path property so term interperters know where the terms are coming from
"""
class TermGenerator(object):
"""Generate terms from a row generator. It will produce a term for each row, and child
terms for any arguments to the row. """
def __init__(self, row_gen):
"""
:param row_gen: an interator that generates rows
:return:
"""
from os.path import dirname, basename
self._row_gen = row_gen
self._path = self._row_gen.path
class TermInterpreter(object):
"""Takes a stream of terms and sets the parameter map, valid term names, etc """
def __init__(self, term_gen, remove_special=True):
"""
:param term_gen: an an iterator that generates terms
:param remove_special: If true ( default ) remove the special terms from the stream
:return:
"""
from collections import defaultdict
self._remove_special = remove_special
self._term_gen = term_gen
self._param_map = [] # Current parameter map, the args of the last Section term
# _sections and _terms are loaded from Declare documents, in
# handle_declare and import_declare_doc. The Declare doc information
# can also be loaded before parsing, so the Declare term can be eliminated.
self._sections = {} # Declared sections and their arguments
self._terms = {} # Pre-defined terms, plus TermValueName and ChildPropertyType
self.errors = []
def as_dict(self):
"""Iterate, link terms and convert to a dict"""
return convert_to_dict(link_terms(self))
def handle_section(self, t):
self._param_map = [p.lower() if p else i for i, p in enumerate(t.args)]
def handle_declare(self, t):
"""Load the information in the file referenced by a Delare term, but don't
insert the terms in the file into the stream"""
from os.path import dirname, join
if t.value.startswith('http'):
fn = t.value.strip('/')
else:
fn = join(dirname(t.file_name), t.value.strip('/'))
ti = DeclareTermInterpreter(TermGenerator(CsvPathRowGenerator(fn)))
try:
self.import_declare_doc(ti.as_dict())
except IncludeError as e:
e.term = t
self.errors.append(e)
def import_declare_doc(self, d):
"""Import a declare cod that has been parsed and converted to a dict"""
if 'declaresection' in d:
for e in d['declaresection']:
if e:
self._sections[e['section_name'].lower()] = {
'args': [v for k, v in sorted((k, v) for k, v in e.items() if isinstance(k, int))],
'terms': list()
}
if 'declareterm' in d:
for e in d['declareterm']:
terms = self.join(*Term.split_term_lower(e['term_name']))
self._terms[terms] = e
if 'section' in e and e['section']:
if e['section'] not in self._sections:
self._sections[e['section'].lower()] = {
'args': [],
'terms': list()
}
st = self._sections[e['section'].lower()]['terms']
if e['section'] not in st:
st.append(e['term_name'])
if 'declarevalueset' in d:
for e in d['declarevalueset']:
for k,v in self._terms.items():
if 'valueset' in v and e.get('name',None) == v['valueset']:
v['valueset'] = e['value']
class DeclareTermInterpreter(TermInterpreter):
"""
A version of the TermInterpreter specifically for parsing Declare documents. These documents
require some special handling because they declare terms that are required for propertly parsing
Metatab files. These require declarations are pre-declared in this class.
"""
def link_terms(term_generator):
"""Return a heirarchy of records from a stream of terms
:param term_generator:
"""
root = Term('Root', None)
last_term_map = {NO_TERM: root}
for term in term_generator:
try:
parent = last_term_map[term.parent_term]
except KeyError as e:
raise ParserError("Failed to find parent term in last term map: {} {} \nTerm: \n{}"
.format(e.__class__.__name__, e, term))
parent.add_child(term)
if not term.is_arg_child and term.parent_term != ELIDED_TERM:
# Recs created from term args don't go in the maps.
# Nor do record term records with elided parent terms
last_term_map[ELIDED_TERM] = term
last_term_map[term.record_term] = term
return root
def convert_to_dict(term):
"""Converts a record heirarchy to nested dicts.
:param term: Root term at which to start conversion
"""
if term.children:
d = {}
for c in term.children:
if c.child_property_type == 'scalar':
d[c.record_term] = convert_to_dict(c)
elif c.child_property_type == 'sequence':
try:
d[c.record_term].append(convert_to_dict(c))
except (KeyError, AttributeError):
# The c.term property doesn't exist, so add a list
d[c.record_term] = [convert_to_dict(c)]
else:
try:
d[c.record_term].append(convert_to_dict(c))
except KeyError:
# The c.term property doesn't exist, so add a scalar
d[c.record_term] = convert_to_dict(c)
except AttributeError as e:
# d[c.term] exists, but is a scalar, so convert it to a list
d[c.record_term] = [d[c.record_term]] + [convert_to_dict(c)]
if term.value:
d[term.term_value_name] = term.value
return d
else:
return term.value
| 30.573171 | 108 | 0.570004 |
3dcde3d12d8ff748623472b864c1c6d69f5873ea | 1,462 | py | Python | plugins/playbook/deploy_cluster/decapod_plugin_playbook_deploy_cluster/monitor_secret.py | angry-tony/ceph-lcm-decapod | 535944d3ee384c3a7c4af82f74041b0a7792433f | [
"Apache-2.0"
] | 41 | 2016-11-03T16:40:17.000Z | 2019-05-23T08:39:17.000Z | plugins/playbook/deploy_cluster/decapod_plugin_playbook_deploy_cluster/monitor_secret.py | Mirantis/ceph-lcm | fad9bad0b94f2ef608362953583b10a54a841d24 | [
"Apache-2.0"
] | 30 | 2016-10-14T10:54:46.000Z | 2017-10-20T15:58:01.000Z | plugins/playbook/deploy_cluster/decapod_plugin_playbook_deploy_cluster/monitor_secret.py | angry-tony/ceph-lcm-decapod | 535944d3ee384c3a7c4af82f74041b0a7792433f | [
"Apache-2.0"
] | 28 | 2016-09-17T01:17:36.000Z | 2019-07-05T03:32:54.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Specified KV model for storing monitor secrets."""
import base64
import os
import struct
import time
from decapod_common.models import kv
| 25.649123 | 69 | 0.685363 |
3dce78da1f7ce43271310900e0dcc23b81e61a1a | 1,135 | py | Python | scripts/v1/03-collectAllModels.py | groppcw/CLDA | efd59d0dde38d6579366d195c3a0d4e6b1021af5 | [
"Apache-2.0"
] | 6 | 2017-01-31T19:18:59.000Z | 2020-04-21T17:20:56.000Z | scripts/v1/03-collectAllModels.py | groppcw/CLDA | efd59d0dde38d6579366d195c3a0d4e6b1021af5 | [
"Apache-2.0"
] | null | null | null | scripts/v1/03-collectAllModels.py | groppcw/CLDA | efd59d0dde38d6579366d195c3a0d4e6b1021af5 | [
"Apache-2.0"
] | 3 | 2017-09-20T21:18:36.000Z | 2020-07-29T10:00:30.000Z | # take a bunch of model_0 model_1 etc files and merge them alphabetically
from settings import *
# for each file, load the file into one giant list
# call sort on the list
# write this output somewhere else
for timestep in range(START_IDX,NUM_TIMES):
model = dict()
#Add the full vocabulary to the dictionary
fdict = open("./input_data/word_ids.dat","r")
for line in fdict:
pieces = (line.replace('\t',' ')).split(' ',1)
key = (pieces[1].strip()).replace('\"','')
value = ''
for unused in range(LOCAL_TOPICS):
value = value + '0 '
value = value.strip() + '\n'
model[key] = value
fdict.close()
#Replace words that actually appear
for num in range(PLDA_CHUNKS):
infile = open("./partial_results/time-"+str(timestep)+"-model_"+str(num),"r")
for line in infile:
pieces = (line.replace('\t',' ')).split(' ',1)
model[pieces[0]] = pieces[1]
infile.close()
outmodel = sorted(model) # gives sorted list of keys
outfile = open("./local_models/time-"+str(timestep)+".model","w")
for key in outmodel:
outfile.write(key + " " + model[key])
outfile.close()
| 26.395349 | 81 | 0.639648 |
3dd07bf478788d856c11476ddb5329b455ea6168 | 5,428 | py | Python | controller/hopfields_registration_server.py | SIDN/p4-scion | 30fc42ac3672a2d862e5537f6990c87ef3c21860 | [
"BSD-3-Clause"
] | 2 | 2021-05-25T16:17:25.000Z | 2021-07-16T06:30:27.000Z | controller/hopfields_registration_server.py | SIDN/p4-scion | 30fc42ac3672a2d862e5537f6990c87ef3c21860 | [
"BSD-3-Clause"
] | null | null | null | controller/hopfields_registration_server.py | SIDN/p4-scion | 30fc42ac3672a2d862e5537f6990c87ef3c21860 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2021, SIDN Labs
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from concurrent import futures
import argparse
import grpc
import logging
from scion_grpc import hopfields_pb2_grpc, hopfields_pb2
from tofino import *
logger = logging.getLogger('scion_hopfields_registration_server')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)
if __name__ == "__main__":
main()
| 40.507463 | 147 | 0.680545 |
3dd1773f50f2af84354e0431bf0e4276687f173e | 3,401 | py | Python | Server/Python/src/dbs/dao/Oracle/MigrationBlock/Update.py | vkuznet/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 8 | 2015-08-14T04:01:32.000Z | 2021-06-03T00:56:42.000Z | Server/Python/src/dbs/dao/Oracle/MigrationBlock/Update.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 162 | 2015-01-07T21:34:47.000Z | 2021-10-13T09:42:41.000Z | Server/Python/src/dbs/dao/Oracle/MigrationBlock/Update.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 16 | 2015-01-22T15:27:29.000Z | 2021-04-28T09:23:28.000Z | #!/usr/bin/env python
"""
This module provides Migration.Update data access object.
"""
from WMCore.Database.DBFormatter import DBFormatter
from dbs.utils.dbsExceptionHandler import dbsExceptionHandler
from dbs.utils.DBSDaoTools import create_token_generator
| 46.589041 | 165 | 0.614231 |
3dd18ca1ce7d02c28f4d50d91ff399eaea978a1f | 3,636 | py | Python | cldfbench_lapollaqiang.py | cldf-datasets/lapollaqiang | 40bcba31a65b675a15d2dcac5fae7901619162fc | [
"CC-BY-4.0"
] | null | null | null | cldfbench_lapollaqiang.py | cldf-datasets/lapollaqiang | 40bcba31a65b675a15d2dcac5fae7901619162fc | [
"CC-BY-4.0"
] | 2 | 2020-04-18T10:57:21.000Z | 2020-04-18T12:16:03.000Z | cldfbench_lapollaqiang.py | cldf-datasets/lapollaqiang | 40bcba31a65b675a15d2dcac5fae7901619162fc | [
"CC-BY-4.0"
] | null | null | null | import re
import pathlib
from clldutils.text import strip_chars
from cldfbench import Dataset as BaseDataset
from cldfbench import CLDFSpec
QUOTES = ''
| 33.054545 | 97 | 0.55033 |
3dd25490c9540bd331008a56be6c0ffa65b4b3b0 | 1,752 | py | Python | simple-zero-width-chars-encoder-and-decoder/encoder.py | MihaiAC/Other-Projects | 2ce3b4dbc0edf79124fee929c63a698efbbbf123 | [
"MIT"
] | null | null | null | simple-zero-width-chars-encoder-and-decoder/encoder.py | MihaiAC/Other-Projects | 2ce3b4dbc0edf79124fee929c63a698efbbbf123 | [
"MIT"
] | null | null | null | simple-zero-width-chars-encoder-and-decoder/encoder.py | MihaiAC/Other-Projects | 2ce3b4dbc0edf79124fee929c63a698efbbbf123 | [
"MIT"
] | null | null | null | import sys
import os
args = sys.argv
from_file_path = args[1]
to_file_path = args[2]
word_to_hide = args[3]
if(os.path.isfile(from_file_path) and len(word_to_hide) > 0):
# Read input from file.
f = open(from_file_path,'r')
content = f.read()
f.close()
# Encode the word.
ls = convert_word_to_zero_length_list(word_to_hide)
# Preamble for iteration.
step = int(len(content)/len(ls))
offset = 0
content = unicode(content)
# Save each zero-width sequence corresponding to a character to a specific place in the input.
# We can be smarter and save them semi-randomly but we'll keep it simple.
for ii in range(len(ls)):
index = ii * step + offset
content = content[:index] + ls[ii] + content[index:]
offset += len(ls[ii])
# Overwrite old file with modified input.
f = open(to_file_path,'w')
f.write(content.encode('utf-8'))
f.close()
else:
print('File could not be found or length of word to hide is 0.') | 30.206897 | 98 | 0.622717 |
3dd2a3424b490a95eadbcb0285fa8becc7dbdcc5 | 280 | py | Python | setup.py | lambdaofgod/HOTT | 74ec33dae7ba9f9d382384c6bd2c97b5557f6eea | [
"MIT"
] | null | null | null | setup.py | lambdaofgod/HOTT | 74ec33dae7ba9f9d382384c6bd2c97b5557f6eea | [
"MIT"
] | null | null | null | setup.py | lambdaofgod/HOTT | 74ec33dae7ba9f9d382384c6bd2c97b5557f6eea | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name='HOTT',
version='0.1',
url='https://github.com/lambdaofgod/HOTT',
packages=find_packages(),
install_requires=requirements
)
| 20 | 46 | 0.692857 |
3dd4772c1009f05a2da5ab89f95cb164ef80a08f | 736 | py | Python | setup.py | mdmix4/pymdmix-run | 2c3fdeca39f02429ab0040491e2ad016de210795 | [
"MIT"
] | null | null | null | setup.py | mdmix4/pymdmix-run | 2c3fdeca39f02429ab0040491e2ad016de210795 | [
"MIT"
] | null | null | null | setup.py | mdmix4/pymdmix-run | 2c3fdeca39f02429ab0040491e2ad016de210795 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from setuptools import setup
setup(
python_requires=">=3.8",
name="pymdmix-run",
version=getVersion(),
license="MIT",
description="pymdmix plugin for command interpreter",
author="ggutierrez-bio",
author_email="",
url="https://github.com/ggutierrez-bio/mdmix4/pymdmix-run",
packages=["pymdmix_run"],
install_requires=getRequirements(),
classifiers=['Development Status :: 3 - Alpha'],
scripts=["bin/mdmix-run"],
)
| 23 | 63 | 0.65625 |
3dd4a8f967bc41b59fc8f2382ab1f0506c71e247 | 4,340 | py | Python | aether/forum/forms.py | katajakasa/aetherguild4 | a7e294f0cff11e2508751f1013e6648fdc56bb94 | [
"MIT"
] | null | null | null | aether/forum/forms.py | katajakasa/aetherguild4 | a7e294f0cff11e2508751f1013e6648fdc56bb94 | [
"MIT"
] | 1 | 2021-06-10T17:36:11.000Z | 2021-06-10T17:36:11.000Z | aether/forum/forms.py | katajakasa/aetherguild4 | a7e294f0cff11e2508751f1013e6648fdc56bb94 | [
"MIT"
] | null | null | null | from django.forms import Form, ModelForm, CharField, Textarea
from django.db import transaction
from django.utils.translation import gettext_lazy as _
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from .models import ForumPost, ForumThread, ForumBoard, ForumPostEdit
| 32.631579 | 81 | 0.621429 |
3dd4b115a1efae712e7d58d8046528f7acbf782b | 1,467 | py | Python | for_straight_forward_relion/read_star_del_metadata_param.py | homurachan/Block-based-recontruction | b3fc02a0648db6aaa5d77dcc4b8e10f3361d66f4 | [
"WTFPL"
] | 11 | 2018-04-17T01:41:11.000Z | 2020-12-11T05:43:21.000Z | for_straight_forward_relion/read_star_del_metadata_param.py | homurachan/Block-based-recontruction | b3fc02a0648db6aaa5d77dcc4b8e10f3361d66f4 | [
"WTFPL"
] | null | null | null | for_straight_forward_relion/read_star_del_metadata_param.py | homurachan/Block-based-recontruction | b3fc02a0648db6aaa5d77dcc4b8e10f3361d66f4 | [
"WTFPL"
] | 3 | 2019-08-23T07:48:50.000Z | 2020-12-08T07:31:41.000Z | #!/usr/bin/env python
import math,os,sys
try:
from optparse import OptionParser
except:
from optik import OptionParser
if __name__== "__main__":
main()
| 22.227273 | 59 | 0.632584 |
3dd4c39c91d920a780223d1076fe94897deaabd0 | 2,639 | py | Python | python/GafferUI/ProgressBar.py | cwmartin/gaffer | 1f8a0f75522105c9d5efefac6d55cb61c1038909 | [
"BSD-3-Clause"
] | null | null | null | python/GafferUI/ProgressBar.py | cwmartin/gaffer | 1f8a0f75522105c9d5efefac6d55cb61c1038909 | [
"BSD-3-Clause"
] | null | null | null | python/GafferUI/ProgressBar.py | cwmartin/gaffer | 1f8a0f75522105c9d5efefac6d55cb61c1038909 | [
"BSD-3-Clause"
] | null | null | null | ##########################################################################
#
# Copyright (c) 2011-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import GafferUI
QtGui = GafferUI._qtImport( "QtGui" )
| 33.833333 | 77 | 0.675256 |
3dd4de6bb7f825300faccd73e718c78bb7dd3d78 | 18,444 | py | Python | minihack/agent/rllib/models.py | samvelyan/minihack-1 | 441eba33ba0d240b98aeabe1ff7a0c0b33cd236c | [
"Apache-2.0"
] | 1 | 2021-11-19T01:51:38.000Z | 2021-11-19T01:51:38.000Z | minihack/agent/rllib/models.py | samvelyan/minihack-1 | 441eba33ba0d240b98aeabe1ff7a0c0b33cd236c | [
"Apache-2.0"
] | null | null | null | minihack/agent/rllib/models.py | samvelyan/minihack-1 | 441eba33ba0d240b98aeabe1ff7a0c0b33cd236c | [
"Apache-2.0"
] | 1 | 2021-11-17T15:45:02.000Z | 2021-11-17T15:45:02.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from typing import Any, Dict, Optional, Tuple
import gym
import torch
from nle import nethack
from minihack.agent.common.models.embed import GlyphEmbedding
from minihack.agent.common.models.transformer import TransformerEncoder
from omegaconf import DictConfig
from ray.rllib.models import ModelCatalog
from ray.rllib.models.torch.torch_modelv2 import TorchModelV2
from ray.rllib.utils.annotations import override
from torch import nn
from torch.nn import functional as F
NUM_GLYPHS = nethack.MAX_GLYPH
NUM_FEATURES = nethack.BLSTATS_SHAPE[0]
PAD_CHAR = 0
NUM_CHARS = 128
ModelCatalog.register_custom_model("rllib_nle_model", RLLibNLENetwork)
| 33.966851 | 79 | 0.559803 |
3dd4f4c9b22e44b3e89f6ae2ccad38e595e93b8d | 1,149 | py | Python | old/projects/6.884/hybrid_twolinkmanipulator_with_GreedyFeatures.py | ali493/pyro | 1245340077a733e2ab35765eae783b358d2f3af9 | [
"MIT"
] | null | null | null | old/projects/6.884/hybrid_twolinkmanipulator_with_GreedyFeatures.py | ali493/pyro | 1245340077a733e2ab35765eae783b358d2f3af9 | [
"MIT"
] | null | null | null | old/projects/6.884/hybrid_twolinkmanipulator_with_GreedyFeatures.py | ali493/pyro | 1245340077a733e2ab35765eae783b358d2f3af9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Mar 23 12:50:34 2016
@author: alex
"""
from AlexRobotics.dynamic import Manipulator as M
from AlexRobotics.dynamic import Hybrid_Manipulator as HM
from AlexRobotics.control import DPO_features as DPO
import numpy as np
# Define dynamic system
R = HM.HybridTwoLinkManipulator()
R.u_lb = np.array([-5,-5, 0 ])
R.u_ub = np.array([ 5, 5, 3 ])
# Define controller
cost_function = 'quadratic'
A = DPO.TD_Greedy_hybrid_2DOF_Features( R , cost_function )
A.W = np.array([ 0.2 , 0.2 , 0.4 , 0.02 ])
#A.W = np.array([ 1 , 0 , 0 , 0 ])
A.x0 = np.array([ -3, 1 , 0 , 0 ])
A.max_error = 0.5
A.eps = 0.8
A.alpha = 0.00001
#A.plot_J_hat()
A.training( 3 , random = True , show = False )
#A.W = np.array( [ 0.00596714 , 0.05787924 , 0.1246888 , -0.00158788 ] )
#Weight = [ 0.09416771 0.20230782 0.37820584 0.01672458]
#A.plot_J_hat()
A.eps = 1.0
R.plotAnimation( [-4,1,0,0] , tf = 12 , n = 241 , solver = 'euler' )#, save = True )
R.Sim.plot_CL('x')
R.Sim.plot_CL('u')
#R.Sim.plot_OL()
#R.Sim.phase_plane_trajectory() | 24.978261 | 85 | 0.598782 |