hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4d0e95505acc0778edf0c0cfa6593332a693d19f | 7,813 | py | Python | Common/EDACommon.py | MlGroupsWJ/Customer-Satisfication | bae0d3691613f078c88f926fee8d1d0684cb6f88 | [
"Apache-2.0"
] | null | null | null | Common/EDACommon.py | MlGroupsWJ/Customer-Satisfication | bae0d3691613f078c88f926fee8d1d0684cb6f88 | [
"Apache-2.0"
] | null | null | null | Common/EDACommon.py | MlGroupsWJ/Customer-Satisfication | bae0d3691613f078c88f926fee8d1d0684cb6f88 | [
"Apache-2.0"
] | null | null | null | # -*- coding:UTF-8 -*-
import pandas as pd
from minepy import MINE
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.ensemble import ExtraTreesClassifier
import xgboost as xgb
import operator
from sklearn.utils import shuffle
from Common.ModelCommon import ModelCV
from sklearn import svm
import numpy as np
#
# rate0rate*1
# repeat1
# train_datacvsample
# iswholeTrueTARGETFalseTARGET
| 32.419087 | 114 | 0.64879 |
4d1789b7a180d686bba726991622611824a7655a | 11,166 | py | Python | spekev2_verification_testsuite/test_drm_system_specific_system_id_elements.py | amphied/speke-reference-server | 7b589a333fb3c619c6f7e53483d43de9a588f7b7 | [
"Apache-2.0"
] | null | null | null | spekev2_verification_testsuite/test_drm_system_specific_system_id_elements.py | amphied/speke-reference-server | 7b589a333fb3c619c6f7e53483d43de9a588f7b7 | [
"Apache-2.0"
] | null | null | null | spekev2_verification_testsuite/test_drm_system_specific_system_id_elements.py | amphied/speke-reference-server | 7b589a333fb3c619c6f7e53483d43de9a588f7b7 | [
"Apache-2.0"
] | null | null | null | import xml.etree.ElementTree as ET
import pytest
from .helpers import utils
def test_widevine_pssh_cpd_no_rotation(widevine_pssh_cpd_response):
root_cpix = ET.fromstring(widevine_pssh_cpd_response)
drm_system_list_element = root_cpix.find('./{urn:dashif:org:cpix}DRMSystemList')
drm_system_elements = drm_system_list_element.findall('./{urn:dashif:org:cpix}DRMSystem')
for drm_system_element in drm_system_elements:
pssh_data_bytes = drm_system_element.find('./{urn:dashif:org:cpix}PSSH')
content_protection_data_bytes = drm_system_element.find('./{urn:dashif:org:cpix}ContentProtectionData')
content_protection_data_string = utils.decode_b64_bytes(content_protection_data_bytes.text)
pssh_in_cpd = ET.fromstring(content_protection_data_string)
# Assert pssh in cpd is same as pssh box
assert pssh_data_bytes.text == pssh_in_cpd.text, \
"Content in PSSH box and the requested content in ContentProtectionData are expected to be the same"
# Validate presence of HLSSignalingData and PSSH when those elements are present in the request
| 57.854922 | 117 | 0.732581 |
4d1842ad937506e0139c4548364f4972688cf066 | 606 | py | Python | lgtv_rs232/commands/power.py | davo22/lgtv_rs232 | 40562cddf7acdf6fa95124029595e3838dd9e7b0 | [
"MIT"
] | null | null | null | lgtv_rs232/commands/power.py | davo22/lgtv_rs232 | 40562cddf7acdf6fa95124029595e3838dd9e7b0 | [
"MIT"
] | null | null | null | lgtv_rs232/commands/power.py | davo22/lgtv_rs232 | 40562cddf7acdf6fa95124029595e3838dd9e7b0 | [
"MIT"
] | null | null | null | from enum import Enum
| 20.2 | 81 | 0.671617 |
4d188e480cc959a97285226a6ee540747e54cbfc | 3,661 | py | Python | aioruuvitag/scanner_windows.py | hulttis/ruuvigw | 914eb657e3f2792cecf6848dfa7607ad45f17ab4 | [
"MIT"
] | 7 | 2019-11-08T07:30:05.000Z | 2022-02-20T21:58:44.000Z | aioruuvitag/scanner_windows.py | hulttis/ruuvigw | 914eb657e3f2792cecf6848dfa7607ad45f17ab4 | [
"MIT"
] | null | null | null | aioruuvitag/scanner_windows.py | hulttis/ruuvigw | 914eb657e3f2792cecf6848dfa7607ad45f17ab4 | [
"MIT"
] | 1 | 2021-06-19T16:52:55.000Z | 2021-06-19T16:52:55.000Z | # -*- coding: utf-8 -*-
"""
Perform Bluetooth LE Scan.
Based on https://github.com/hbldh/bleak/blob/master/bleak/backends/dotnet/discovery.py by
Created by hbldh <henrik.blidh@nedomkull.com>
"""
import logging
logger = logging.getLogger('bleak_scanner')
import asyncio
import queue
from bleak.backends.device import BLEDevice
# Import of Bleak CLR->UWP Bridge. It is not needed here, but it enables loading of Windows.Devices
from BleakBridge import Bridge
from System import Array, Byte
from Windows.Devices.Bluetooth.Advertisement import \
BluetoothLEAdvertisementWatcher, BluetoothLEScanningMode
from Windows.Storage.Streams import DataReader, IBuffer
QUEUE_SIZE = 100
###############################################################################
| 34.537736 | 99 | 0.571429 |
4d1921977ff8cb85df1411e9a16a739fa19af1b7 | 2,354 | py | Python | GUI/app.py | YJWang94108/Real-Time-Textural-Analysis-System-with-Autonomous-Underwater-Vehicles | dbdfb1157c6448720bcf18135789c91c0940bdb4 | [
"MIT"
] | 1 | 2020-09-09T13:59:34.000Z | 2020-09-09T13:59:34.000Z | GUI/app.py | YJWang94108/Real-Time-Textural-Analysis-System-with-Autonomous-Underwater-Vehicles | dbdfb1157c6448720bcf18135789c91c0940bdb4 | [
"MIT"
] | 1 | 2020-09-10T05:28:02.000Z | 2020-09-10T05:28:02.000Z | GUI/app.py | YJWang94108/Real-Time-Textural-Analysis-System-with-Autonomous-Underwater-Vehicles | dbdfb1157c6448720bcf18135789c91c0940bdb4 | [
"MIT"
] | 1 | 2020-09-09T13:59:37.000Z | 2020-09-09T13:59:37.000Z | import tkinter as tk
from PIL import Image, ImageTk
import numpy as np
import os
import time
app=MyAPP()
| 35.134328 | 98 | 0.621071 |
4d193b99e7c955296baf206f87610d82e0c31d15 | 1,568 | py | Python | lib/surface/api_gateway/__init__.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 2 | 2019-11-10T09:17:07.000Z | 2019-12-18T13:44:08.000Z | lib/surface/api_gateway/__init__.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | null | null | null | lib/surface/api_gateway/__init__.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 1 | 2020-07-25T01:40:19.000Z | 2020-07-25T01:40:19.000Z | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for Cloud API Gateway CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.ml_engine import flags
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
| 34.844444 | 78 | 0.769133 |
4d19e5d928407169ece619db02a32249f6803443 | 1,350 | py | Python | 028_dict.py | MikePolyakov/python_book | 497681e8a167918a19ae737960c9c86ebffa9e91 | [
"MIT"
] | null | null | null | 028_dict.py | MikePolyakov/python_book | 497681e8a167918a19ae737960c9c86ebffa9e91 | [
"MIT"
] | null | null | null | 028_dict.py | MikePolyakov/python_book | 497681e8a167918a19ae737960c9c86ebffa9e91 | [
"MIT"
] | null | null | null | countries = {'Russia' : 'Europe', 'Germany' : 'Europe', 'Australia' : 'Australia'}
sqrs = {}
sqrs[1] = 1
sqrs[2] = 4
sqrs[10] = 100
print(sqrs)
myDict = dict([['key1', 'value1'], ('key2', 'value2')])
print(myDict)
phones = {'police' : 102, 'ambulance' : 103, 'firefighters' : 101}
print(phones['police'])
phones = {'police' : 102, 'ambulance' : 103, 'firefighters' : 101}
del phones['police']
print(phones)
phones = {'police' : 102, 'ambulance' : 103, 'firefighters' : 101}
for service in phones:
print(service, phones[service])
phones = {'police' : 102, 'ambulance' : 103, 'firefighters' : 101}
for service, phone in phones.items():
print(service, phone)
seq = map(int, input().split())
countDict = {}
for elem in seq:
countDict[elem] = countDict.get(elem, 0) + 1
for key in sorted(countDict):
print(key, countDict[key], sep=' : ')
n = int(input())
latinEnglish = {}
for i in range(n):
line = input()
english = line[:line.find('-')].strip()
latinsStr = line[line.find('-') + 1:].strip()
latins = map(lambda s : s.strip(), latinsStr.split(','))
for latin in latins:
if latin not in latinEnglish:
latinEnglish[latin] = []
latinEnglish[latin].append(english)
print(len(latinEnglish))
for latin in sorted(latinEnglish):
print(latin, '-', ', '.join(sorted(latinEnglish[latin]))) | 28.723404 | 82 | 0.625185 |
4d1a042ce335839faa6fa2c218bbf1f71877225d | 1,203 | py | Python | solutions/10. Regular Expression Matching.py | JacopoPan/leetcode-top100-liked-questions | 03dc05f087d05805d54b7585ce740338f3128833 | [
"MIT"
] | null | null | null | solutions/10. Regular Expression Matching.py | JacopoPan/leetcode-top100-liked-questions | 03dc05f087d05805d54b7585ce740338f3128833 | [
"MIT"
] | null | null | null | solutions/10. Regular Expression Matching.py | JacopoPan/leetcode-top100-liked-questions | 03dc05f087d05805d54b7585ce740338f3128833 | [
"MIT"
] | null | null | null | """
Runtime: 47 ms, faster than 89.57% of Python3 online submissions for Regular Expression Matching.
Memory Usage: 15.2 MB, less than 6.45% of Python3 online submissions for Regular Expression Matching.
"""
from typing import List
from typing import Optional
if __name__ == "__main__":
main()
| 29.341463 | 101 | 0.502078 |
4d1aec75f55686cedd085e55848b278b516e591c | 166 | py | Python | Mundo2/Desafio019.py | Marcoakira/Desafios_Python_do_Curso_Guanabara | c49b774148a2232f8f3c21b83e3dc97610480757 | [
"MIT"
] | null | null | null | Mundo2/Desafio019.py | Marcoakira/Desafios_Python_do_Curso_Guanabara | c49b774148a2232f8f3c21b83e3dc97610480757 | [
"MIT"
] | null | null | null | Mundo2/Desafio019.py | Marcoakira/Desafios_Python_do_Curso_Guanabara | c49b774148a2232f8f3c21b83e3dc97610480757 | [
"MIT"
] | null | null | null | #Desafio019 ( aplicao randomica para determinar que aluno vai no quadro.
import random
al01 = str('joao'),('maria'),('pdro'),('paula')
print(random.choice(al01))
| 27.666667 | 74 | 0.722892 |
4d1b446c03d9bd0f9bcfdda12df328d24d3b6854 | 3,807 | py | Python | jdll-tutorial/ansible/library/book.py | Spredzy/jdll-ansible | 767ab383c9efb676c3d9923159172f42d221fd2f | [
"Apache-2.0"
] | null | null | null | jdll-tutorial/ansible/library/book.py | Spredzy/jdll-ansible | 767ab383c9efb676c3d9923159172f42d221fd2f | [
"Apache-2.0"
] | null | null | null | jdll-tutorial/ansible/library/book.py | Spredzy/jdll-ansible | 767ab383c9efb676c3d9923159172f42d221fd2f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
from ansible.module_utils.basic import *
from jdll import API
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: book
author: "Yanis Guenane (@Spredzy)"
version_added: "2.3"
short_description: Gerer des resources books de notre API de test.
description:
- Ce module interagit avec le endpoint /books de notre API de test.
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Si la resource book doit etre presente ou absente.
id:
required: false
description:
- L'identifieur de la resource book.
author:
required: false
description:
- Le nom de l'auteur de book.
title:
required: false
description:
- Titre du book.
summary:
required: true
description:
- Resume du book.
'''
EXAMPLES = '''
# Create a new book
- book:
title: A title
author: An author
summary: A summary
# Update a specific book
- book:
id: XXXX
title: Un titre alternatif
# Delete a book
- book:
id: XXX
state: absent
'''
RETURN = '''
title:
description: The title of the book
returned:
- changed
- success
type: string
sample: A title
summary:
description: The summary of the book
returned:
- changed
- success
type: string
sample: A summary
id:
description: ID of the book
returned:
- changed
- success
type: string
sample: XXXXX
'''
if __name__ == '__main__':
main()
| 25.38 | 127 | 0.561334 |
4d1d08adbb5e362ba1318dc99aee5c3a36e6c489 | 2,679 | py | Python | main.py | CyberPunk-Josh/Lab-app | aa3a5cf77a176bde3156d0f69a1f4018a503ca6f | [
"MIT"
] | null | null | null | main.py | CyberPunk-Josh/Lab-app | aa3a5cf77a176bde3156d0f69a1f4018a503ca6f | [
"MIT"
] | null | null | null | main.py | CyberPunk-Josh/Lab-app | aa3a5cf77a176bde3156d0f69a1f4018a503ca6f | [
"MIT"
] | null | null | null | from menu_design import *
from PySide6.QtWidgets import QApplication, QMainWindow
from PySide6.QtCore import Qt, QEasingCurve
# Local files
from reologicalOne.reological import RModel
from reologicalTwo.reologicalDB import RModelDB
from density.density import Density
import sys
# class for menu
if __name__ == "__main__":
app = QApplication(sys.argv)
mi_app = Global()
mi_app.show()
sys.exit(app.exec())
| 30.793103 | 95 | 0.655842 |
4d1e11fe07c9e17482225346b3bf314a5354caa6 | 360 | py | Python | create_table_from_word/table_columns.py | yongli82/CodeGenerator | 4ca9255c3c4c5392e45815fd20f605ccbbfd2325 | [
"MIT"
] | null | null | null | create_table_from_word/table_columns.py | yongli82/CodeGenerator | 4ca9255c3c4c5392e45815fd20f605ccbbfd2325 | [
"MIT"
] | null | null | null | create_table_from_word/table_columns.py | yongli82/CodeGenerator | 4ca9255c3c4c5392e45815fd20f605ccbbfd2325 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import ExcelUtil
from jinja2 import Template
import re
| 18.947368 | 83 | 0.730556 |
4d1e4f9713b73667b272d5c41483d8a89a98e904 | 1,024 | py | Python | web/detector/dog_detector.py | PaulNWms/dog-project | 4d70bcd39aa3ea0a8744efc42c53f031fbf63b64 | [
"MIT"
] | null | null | null | web/detector/dog_detector.py | PaulNWms/dog-project | 4d70bcd39aa3ea0a8744efc42c53f031fbf63b64 | [
"MIT"
] | 13 | 2020-01-28T22:15:35.000Z | 2022-03-11T23:57:35.000Z | web_app/detector/dog_detector.py | Brijesh-Chandra/Dog-Breed-Identifier | b3c918ad148b072d49b358629cba146079bf3dc3 | [
"MIT"
] | null | null | null | from keras.preprocessing import image
import keras.applications.resnet50 as resnet50
import numpy as np
app = None
| 29.257143 | 85 | 0.723633 |
4d1e62b7359e72d9ef996cfa45b2930243bf6b7d | 1,124 | py | Python | camp_real_engine/plugins/regexp.py | vassik/camp-realize | be65af18dd6deb800695988700730d2c3fb279cf | [
"MIT"
] | null | null | null | camp_real_engine/plugins/regexp.py | vassik/camp-realize | be65af18dd6deb800695988700730d2c3fb279cf | [
"MIT"
] | null | null | null | camp_real_engine/plugins/regexp.py | vassik/camp-realize | be65af18dd6deb800695988700730d2c3fb279cf | [
"MIT"
] | null | null | null | import re
from camp_real_engine.abstract.abc_subst_realizer import ABC_subst_realizer
from camp_real_engine.model.realization import RegExpFileSubstNode
from camp_real_engine.dao.daos import FileContentCommiter
from camp_real_engine.abstract.abc_real_data_model import ABCSubstitutionNode
| 34.060606 | 97 | 0.825623 |
4d20e11d53db6d88edbeea07f1facb38a4748d8a | 2,131 | py | Python | mouse_burrows/scripts/show_info.py | david-zwicker/cv-mouse-burrows | 906476f49ff9711cd672feca5f70efedaab82b01 | [
"BSD-3-Clause"
] | 1 | 2016-03-06T05:16:38.000Z | 2016-03-06T05:16:38.000Z | mouse_burrows/scripts/show_info.py | david-zwicker/cv-mouse-burrows | 906476f49ff9711cd672feca5f70efedaab82b01 | [
"BSD-3-Clause"
] | null | null | null | mouse_burrows/scripts/show_info.py | david-zwicker/cv-mouse-burrows | 906476f49ff9711cd672feca5f70efedaab82b01 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python2
'''
Created on Sep 21, 2016
@author: David Zwicker <dzwicker@seas.harvard.edu>
'''
from __future__ import division
import argparse
import sys
import os
# add the root of the video-analysis project to the path
script_path = os.path.split(os.path.realpath(__file__))[0]
package_path = os.path.abspath(os.path.join(script_path, '..', '..'))
sys.path.append(package_path)
video_analysis_path_guess = os.path.join(package_path, '..', 'video-analysis')
sys.path.append(os.path.abspath(video_analysis_path_guess))
from mouse_burrows.simple import load_result_file
def get_info(result_file, parameters=False):
""" show information about an analyzed antfarm video
`result_file` is the file where the results from the video analysis are
stored. This is usually a *.yaml file
`parameters` is a flag that indicates whether the parameters of the result
file are shown
"""
# load the respective result file
analyzer = load_result_file(result_file)
info = {}
if parameters:
info['Parameters'] = analyzer.params.to_dict()
return info
def main():
""" main routine of the script """
# setup the argument parsing
parser = argparse.ArgumentParser(
description='Program that outputs information about the analysis of '
'antfarm processing.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('-r', '--result_file', metavar='FILE',
type=str, required=True,
help='filename of video analysis result')
parser.add_argument('-p', '--parameters', action='store_true',
help='show all parameters')
# fetch the arguments and build the parameter list
args = parser.parse_args()
# obtain information from data
info = get_info(result_file=args.result_file, parameters=args.parameters)
# TODO: add other output methods, like json, yaml, python dict
from pprint import pprint
pprint(info)
if __name__ == '__main__':
main()
| 27.675325 | 78 | 0.6687 |
4d220c47f8915f484fcada1de144cddca671bb25 | 25,729 | py | Python | google/cloud/networkmanagement/v1beta1/networkmanagement-v1beta1-py/google/cloud/networkmanagement_v1beta1/services/reachability_service/async_client.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/cloud/networkmanagement/v1beta1/networkmanagement-v1beta1-py/google/cloud/networkmanagement_v1beta1/services/reachability_service/async_client.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/cloud/networkmanagement/v1beta1/networkmanagement-v1beta1-py/google/cloud/networkmanagement_v1beta1/services/reachability_service/async_client.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.networkmanagement_v1beta1.services.reachability_service import pagers
from google.cloud.networkmanagement_v1beta1.types import connectivity_test
from google.cloud.networkmanagement_v1beta1.types import reachability
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import ReachabilityServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ReachabilityServiceGrpcAsyncIOTransport
from .client import ReachabilityServiceClient
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-networkmanagement",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"ReachabilityServiceAsyncClient",
)
| 41.700162 | 171 | 0.653309 |
4d237d356a17f205a24800037a5d0a053ed6c426 | 563 | py | Python | todo/urls.py | fidele000/Ftodo-RestAPI-Django | 8c695503e04a3957920910acb9f1bb823ece4287 | [
"MIT"
] | null | null | null | todo/urls.py | fidele000/Ftodo-RestAPI-Django | 8c695503e04a3957920910acb9f1bb823ece4287 | [
"MIT"
] | null | null | null | todo/urls.py | fidele000/Ftodo-RestAPI-Django | 8c695503e04a3957920910acb9f1bb823ece4287 | [
"MIT"
] | null | null | null | from django.urls import path
from rest_framework import viewsets
from rest_framework import routers
from . import views
from django.urls import include
from rest_framework.routers import DefaultRouter
router=DefaultRouter()
router.register('hello-viewset',views.HelloViewSet,basename='hello-viewset')
router.register('profile',views.UserProfileViewSet)
router.register('login',views.LoginViewSet,basename='login')
router.register('task',views.TaskViewset)
urlpatterns = [
path('helloview/',views.HelloAPIView.as_view()),
path('',include(router.urls)),
]
| 33.117647 | 76 | 0.801066 |
4d23e4d034125a3f8c2a16ba07229fdc1c90a016 | 257 | py | Python | toontown/estate/DistributedPlantBaseAI.py | TheFamiliarScoot/open-toontown | 678313033174ea7d08e5c2823bd7b473701ff547 | [
"BSD-3-Clause"
] | 99 | 2019-11-02T22:25:00.000Z | 2022-02-03T03:48:00.000Z | toontown/estate/DistributedPlantBaseAI.py | TheFamiliarScoot/open-toontown | 678313033174ea7d08e5c2823bd7b473701ff547 | [
"BSD-3-Clause"
] | 42 | 2019-11-03T05:31:08.000Z | 2022-03-16T22:50:32.000Z | toontown/estate/DistributedPlantBaseAI.py | TheFamiliarScoot/open-toontown | 678313033174ea7d08e5c2823bd7b473701ff547 | [
"BSD-3-Clause"
] | 57 | 2019-11-03T07:47:37.000Z | 2022-03-22T00:41:49.000Z | from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectAI import DistributedObjectAI
| 42.833333 | 82 | 0.879377 |
4d240f3eb85f0adcecd00489cbe4d3ad31ec57c5 | 27 | py | Python | test.py | justin-th/linux-pasword-protect | feba8712d5bc25c417cb7297aac9c0d23566378e | [
"MIT"
] | null | null | null | test.py | justin-th/linux-pasword-protect | feba8712d5bc25c417cb7297aac9c0d23566378e | [
"MIT"
] | null | null | null | test.py | justin-th/linux-pasword-protect | feba8712d5bc25c417cb7297aac9c0d23566378e | [
"MIT"
] | null | null | null | import os
print(os.curdir) | 9 | 16 | 0.777778 |
4d242ba823cf6de6e20e2768b1f065a06d916125 | 302 | py | Python | setup.py | samuel-spak/thermostate | 906d1e0b79289cd51cde510c797f007674b8bdcd | [
"BSD-3-Clause"
] | 6 | 2020-03-31T14:25:23.000Z | 2022-03-10T14:56:29.000Z | setup.py | samuel-spak/thermostate | 906d1e0b79289cd51cde510c797f007674b8bdcd | [
"BSD-3-Clause"
] | 35 | 2017-01-26T15:31:19.000Z | 2022-03-14T16:32:00.000Z | setup.py | samuel-spak/thermostate | 906d1e0b79289cd51cde510c797f007674b8bdcd | [
"BSD-3-Clause"
] | 15 | 2017-02-08T20:07:38.000Z | 2022-03-14T09:15:35.000Z | from setuptools import setup
from pathlib import Path
from typing import Dict
HERE = Path(__file__).parent
version: Dict[str, str] = {}
version_file = HERE / "src" / "thermostate" / "_version.py"
exec(version_file.read_text(), version)
setup(version=version["__version__"], package_dir={"": "src"})
| 25.166667 | 62 | 0.731788 |
4d242e01b427dcb6b1bf2d2cc3562c29ca378947 | 78,627 | py | Python | fps2c__ - Copy.py | GeorgeLoo/FPS | 775bf173437d2feb09bc91b7f842226a8c752980 | [
"MIT"
] | 1 | 2022-02-21T12:07:42.000Z | 2022-02-21T12:07:42.000Z | fps2c__ - Copy.py | GeorgeLoo/FPS | 775bf173437d2feb09bc91b7f842226a8c752980 | [
"MIT"
] | null | null | null | fps2c__ - Copy.py | GeorgeLoo/FPS | 775bf173437d2feb09bc91b7f842226a8c752980 | [
"MIT"
] | null | null | null |
'''
_
_._ _..._ .-', _.._(`))
'-. ` ' /-._.-' ',/
) \ '.
/ _ _ | \
| a a / |
\ .-. ;
'-('' ).-' ,' ;
'-; | .'
\ \ /
| 7 .__ _.-\ \
| | | ``/ /` /
/,_| | /,_/ /
/,_/ '`-'
-----------------------------------------
injured face like Duke Nukem
/moving hostages panic
children as terrorists! with RPGs
/taking cover
/Claymore 700 ball bearings
night shootouts
mp7 rifle with silencer
/2 images for hostages
/Barrett Browning M82 CQ
/go through walls, kill tango commandos with one shot
/see through walls with scope
?tablet version
Fellow shooters with Ai
/medical kits
shoot and shatter glass
guards and
/trigger waypoints
long distance shooting! sniper rifle!
show dead bodies in 3 positions: left, right, upside down!
assassinations
deeper missions
scenario announcement
scenario chooser
improve tango generation
background music
show next mission
give a summary of the performance
/fight against special forces who take 5 shots before dying
/weapons restriction!
/pause that pauses the bullets - important!
/campaign mode
/reset stats F1 key 13.3.2015
/prevent hero from going off screen 14.3.2015 pi day
tango random shooters
tango intelligent fire
tango intelligent movement, flanking!
game suicide bombers
/game hostages
hostage with timer shootings
/different rates of auto fire
small message window
bullets that can shoot through the walls!
blow cover away with bombs
/Hero cursor wasd movement
/Tangos will target Hero
RPG countdown to explosion
tangos
hostages
cover
sniper rifle and distances
blood?
/headshots
/leg shots
shield for storming rooms
weapon accuracy
range
rate of auto fire
Pistol
Name
Graphic
Aiming cursor
Sound firing
Sound hit
Safe, Semi
Number of rounds
Reload()
Choose()
Draw()
DrawRounds()
Fire()
Selector()
Knife - sheathed, stab/slash/throw
Silenced Pistol
Glock automatic pistol
Samurai sword
Parachute avoid 'obstacles'
Maze grid when covert missions
Rifle
Safe Semi Auto
Sniper Rifle
Safe Semi
Mini-gun
50 round a second!
4400 round box
SAW
safe Auto
Shotgun
spread shot
Stun granade
safe armed
Grenade
Safe Armed
Rocket
Safe Semi
Artillery
Coords
Confirm
auto fire
---------------
explosion objects
Grenades as well.
Grenade multiple launcher
use proper consts
better structure for
changing weapons
use hash to get to weapons instead of if-then-else
'''
import pyglet
from pyglet.window import key
from pyglet import clock
import random
#import Tkinter, tkMessageBox
gSound = Sounds()
gmw = MessageWin('Battle Report')
gBattleRep = BattleReport()
#gBattleRep.numberbodyhit += 1
#BattleReport.bullshit = 37
#print gBattleRep.numberbodyhit, BattleReport.bullshit
#gBattleRep.report()
#tkMessageBox.showinfo('Battle Report','Stuff')
#gmw.switch_to()
#gmw.show()
#gmw.setText('fuck'+' pap',0)
'''
Bystanders
Movement all over the place
stats
explosions
bullets
'''
'''
Hostages
Moving / stationary
crying sounds
statistics tracked
different looks
statistics
drawing
'''
'''
animate explosions
sound handled elsewhere
'''
def HandleModeSelect(modes,currMode):
#print Const.foo
i = 0
while i < len(modes):
if currMode == modes[i] and i < len(modes)-1:
return modes[i+1]
i += 1
return modes[0]
def withinrect( x,y,r):
x1,y1=r[0],r[1]
x2,y2=r[2],r[3]
if x>x1 and x<x2 and y>y1 and y<y2:
return True
return False
'''
man figure must go past to trigger
red to green
1 2 3
'''
'''
goes in front of the tangos to provide cover for bullets
typeofcover
'''
'''
ninja stealth
five bullets to kill one
cannot be killed by grenade
dead bodies list
'''
#class TargetBoard0():
#def __init__(self,x,y):
#self.target = SpriteLoad(Const.folder+'target.jpg')
#self.target.scale = 0.25
#self.target.set_position(x,y)
##self.target.rotation = -90.0
##self.hitlist = []
#print 'init Target'
#print self.target.width, self.target.height
#def move(self,w,h):
##print 'move',self.target.x,w,h
#d = GetDirection()
#tw = self.target.width
#th = self.target.height
#x = self.target.x
#y = self.target.y
#self.target.x,self.target.y = MoveXY(d, x,y,tw, th, w, h)
#pass
#def Hit(self,x,y):
#tx = self.target.x
#ty = self.target.y
#l = tx
#t = ty + self.target.height/4*3
#r = tx + self.target.width
#b = ty + self.target.height
#recthead = [l, t, r, b]
#l = tx
#t = ty + self.target.height/4
#r = tx + self.target.width
#b = ty + self.target.height/4*3
#rectbody = [l, t, r, b]
#l = tx
#t = ty
#r = tx + self.target.width
#b = ty + self.target.height/4
#rectlegs = [l, t, r, b]
#if withinrect( x, y, recthead):
#print 'head hit'
#return True
#elif withinrect( x, y, rectbody):
#print 'body hit'
##self.sound.Play(self.sound.pain)
#return True
#elif withinrect( x, y, rectlegs):
#print 'leg hit'
##self.sound.Play(self.sound.pain)
#return True
#else:
##print 'miss'
#return False
#def Draw(self):
#self.target.draw()
'''
appear near hero
dot moves randomly
dot moves toward hero
tries to hit hero
number
skill
speed
location of hero
add attacks
timed attacks, then end each
check hit
RPG
sound of hero hit
/graphic
'''
'''
to allow the number keys to be programmed with different weapons
as to the mission at hand.
'''
'''
Place where stuff that can be shot at are placed.
Tango
Hostages
Cover
can be distant and nearby
distant for sniper
How to account for the shot?
Scoring?
'''
#class ShootingGallery():
#gTargetBoard = TargetBoard()
gShootGallery = ShootingGallery()
gBulletHoles = BulletHoles()
#class Pistol():
#def __init__(self,
#name,
##sound,
##bulletholes,
#):
#self.name = name
#print 'pistol init'
#self.mode = Const.pistolSafe
#self.data = Const.folder
#weapondata = self.Database(name)
#self.drawing = SpriteLoad(self.data+weapondata[0])
#self.drawing.scale = weapondata[1]
#self.sound = gSound
#self.bulleth = gBulletHoles
#self.mousex = 0
#self.mousey = 0
#self.magazine = weapondata[2]
#self.ammo = self.magazine
#self.reloadweapon = False
#self.status = pyglet.text.Label('Hello, world',
#font_name='Times New Roman',
#font_size=24,
#x=220, y = 20)
#self.SetText()
#self.reticle = weapondata[3]
#pass
#def Database(self,name):
##filename,scale,magazine capacity,
#if name == Const.pistol1911:
#return 'm1911pistol.jpg',0.25,15,'reticlePistol1911.png'
#else:
#raise Exception("pistol Weapon not exist!")
#def reloadCall(self,dt):
#if self.reloadweapon:
#self.reloadtime -= 1
#if self.reloadtime < 1:
#self.ammo = self.magazine
#self.SetText()
#clock.unschedule(self.reloadCall)
#self.reloadweapon = False
#def mouse(self,x,y):
#if self.mode != Const.pistolSafe:
#self.trigger = True
#self.mousex = x
#self.mousey = y
#self.Fire()
#def mouseup(self,x,y):
#self.trigger = False
#def mousedrag(self,x,y):
##pistol got no drag
#pass
#def Fire(self):
#if self.ammo > 0:
##self.sound.Play(self.sound.sar21)
#self.sound.Play(self.sound.m1911)
#x = self.mousex
#y = self.mousey
#self.bulleth.record(x,y)
#self.ammo -= 1
#self.SetText()
##gTargetBoard.Hit(x, y)
#gShootGallery.Hit(x, y)
#def SetText(self):
#self.report = self.name + ' ' + self.mode + ' ' + str(self.ammo)
#self.status.text = self.report
#def select(self):
##print 'pistol mode'
#self.mode = HandleModeSelect(Const.pistolModes, self.mode)
##print self.mode
#self.SetText()
##print self.mode
#def draw(self):
#self.drawing.draw()
#self.bulleth.draw()
#self.status.draw()
#pass
#def Reload(self):
#self.sound.Player(self.sound.reLoad)
#self.reloadweapon = True
#self.reloadtime = 3
#clock.schedule_interval(self.reloadCall, 1.0)
#def SetSights(self,win):
#image = pyglet.image.load(Const.folder+self.reticle)
#cursor = pyglet.window.ImageMouseCursor(image, 25, 25)
#win.set_mouse_cursor( cursor)
#pass
#class CurrentGame():
if __name__ == "__main__":
#gmw = MessageWin('Messages')
#gmw2 = MessageWin('Main')
m = FPSWin()
pyglet.app.run() | 31.114761 | 118 | 0.525176 |
4d246a042f4d01726d7da3a16c2ca45068a1a3cb | 2,462 | py | Python | exercises/networking_v2/roles/ansible-network.network-engine/lib/network_engine/plugins/template/__init__.py | rcalvaga/linklight | bb6364272c167c017cb2ee0790015143df29fa19 | [
"MIT"
] | 1 | 2020-03-29T17:35:59.000Z | 2020-03-29T17:35:59.000Z | exercises/networking_v2/roles/ansible-network.network-engine/lib/network_engine/plugins/template/__init__.py | rcalvaga/linklight | bb6364272c167c017cb2ee0790015143df29fa19 | [
"MIT"
] | null | null | null | exercises/networking_v2/roles/ansible-network.network-engine/lib/network_engine/plugins/template/__init__.py | rcalvaga/linklight | bb6364272c167c017cb2ee0790015143df29fa19 | [
"MIT"
] | 1 | 2020-03-30T11:00:47.000Z | 2020-03-30T11:00:47.000Z | # (c) 2018, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
from ansible.module_utils.six import iteritems, string_types
from ansible.errors import AnsibleUndefinedVariable
| 34.676056 | 92 | 0.620634 |
4d24ec942e343c870cf4c7c64a35ce9e7ec32816 | 748 | py | Python | test/try_closures.py | RemuLang/sijuiacion-lang | e3b5be79fb7afadc0790311e612ddd430b3f0b9d | [
"MIT"
] | 21 | 2019-10-13T14:11:32.000Z | 2021-12-14T02:42:12.000Z | test/try_closures.py | RemuLang/sijuiacion-lang | e3b5be79fb7afadc0790311e612ddd430b3f0b9d | [
"MIT"
] | 1 | 2020-01-07T13:14:46.000Z | 2020-01-09T16:58:07.000Z | test/try_closures.py | RemuLang/sijuiacion-lang | e3b5be79fb7afadc0790311e612ddd430b3f0b9d | [
"MIT"
] | 1 | 2020-08-13T16:17:09.000Z | 2020-08-13T16:17:09.000Z | from Redy.Opt import feature, constexpr
import timeit
print(f1(1)(2))
print(f2(1)(2))
# 3
# 3
# mk closure
print(timeit.timeit("f(1)", globals=dict(f=f1)))
print(timeit.timeit("f(1)", globals=dict(f=f2)))
# 0.15244655999958923
# 0.16590227899905585
f1_ = f1(2)
f2_ = f2(2)
print(timeit.timeit("f(1)", globals=dict(f=f1_)))
print(timeit.timeit("f(1)", globals=dict(f=f2_)))
# 0.08070355000018026
# 0.20936105600048904
# So, use builtin closures instead of making our own
| 15.914894 | 52 | 0.639037 |
4d2521ea6310ee9cc5f131827f5a83488f594d5f | 66 | py | Python | python/testData/refactoring/move/docstringTypes/before/src/b.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2018-12-29T09:53:39.000Z | 2018-12-29T09:53:42.000Z | python/testData/refactoring/move/docstringTypes/before/src/b.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/refactoring/move/docstringTypes/before/src/b.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | def f(x):
'''Does nothing.
:type x: a.C
'''
pass
| 9.428571 | 20 | 0.409091 |
4d2554382ea5d2316a7d1a204e1adf6165ec8877 | 195 | py | Python | datapack/validate.py | emorgan00/EasyDatapacks | 535bf30a23e4e8fa22ff827bc6c223f91a0228ed | [
"MIT"
] | 35 | 2019-06-23T22:35:56.000Z | 2022-02-23T18:09:25.000Z | datapack/validate.py | emorgan00/EasyDatapacks | 535bf30a23e4e8fa22ff827bc6c223f91a0228ed | [
"MIT"
] | 5 | 2019-07-08T04:54:21.000Z | 2022-03-24T12:44:19.000Z | datapack/validate.py | emorgan00/EasyDatapacks | 535bf30a23e4e8fa22ff827bc6c223f91a0228ed | [
"MIT"
] | 5 | 2019-06-24T04:09:15.000Z | 2022-02-22T03:50:41.000Z | # this should accept a command as a string, and raturn a string detailing the issue
# if <command> is not a valid vanilla minecraft command. None otherwise.
| 27.857143 | 83 | 0.753846 |
4d26e4038ffff6b5c711d810347580c7c6e22de3 | 44 | py | Python | Python/Tutorial - 2/strings.py | JC2295/FCC_Tutorial_Projects | 990e1221b2177acb9e4db0264adab518620404a0 | [
"MIT"
] | null | null | null | Python/Tutorial - 2/strings.py | JC2295/FCC_Tutorial_Projects | 990e1221b2177acb9e4db0264adab518620404a0 | [
"MIT"
] | null | null | null | Python/Tutorial - 2/strings.py | JC2295/FCC_Tutorial_Projects | 990e1221b2177acb9e4db0264adab518620404a0 | [
"MIT"
] | null | null | null | print("One")
print("Two")
print("Three")
| 6.285714 | 14 | 0.590909 |
4d272df6572584be280304452391a2a0947eefaa | 4,220 | py | Python | frontend/Two_Dim_System.py | Pugavkomm/NS-analyst | 698af0e94f57b431fd77c17c49d4a23f11d21d3f | [
"MIT"
] | null | null | null | frontend/Two_Dim_System.py | Pugavkomm/NS-analyst | 698af0e94f57b431fd77c17c49d4a23f11d21d3f | [
"MIT"
] | null | null | null | frontend/Two_Dim_System.py | Pugavkomm/NS-analyst | 698af0e94f57b431fd77c17c49d4a23f11d21d3f | [
"MIT"
] | null | null | null | """AI is creating summary for
"""
from frontend import main_window
from PyQt5 import QtWidgets
from frontend import input_system
from PyQt5.QtWidgets import QInputDialog, qApp
from qt_material import apply_stylesheet
style_sheets = ['dark_amber.xml',
'dark_blue.xml',
'dark_cyan.xml',
'dark_lightgreen.xml',
'dark_pink.xml',
'dark_purple.xml',
'dark_red.xml',
'dark_teal.xml',
'dark_yellow.xml',
'light_amber.xml',
'light_blue.xml',
'light_cyan.xml',
'light_cyan_500.xml',
'light_lightgreen.xml',
'light_pink.xml',
'light_purple.xml',
'light_red.xml',
'light_teal.xml',
'light_yellow.xml']
| 45.869565 | 119 | 0.667536 |
4d283228992f8ac0459bba73af7effe988be2fc1 | 1,525 | py | Python | get_board_array.py | SuperStormer/minesweeper-bot | b503752ef4f4c1650ea48609dcebf0757d5ad209 | [
"MIT"
] | null | null | null | get_board_array.py | SuperStormer/minesweeper-bot | b503752ef4f4c1650ea48609dcebf0757d5ad209 | [
"MIT"
] | 2 | 2022-01-13T00:50:34.000Z | 2022-03-11T23:26:44.000Z | get_board_array.py | SuperStormer/minesweeper-bot | b503752ef4f4c1650ea48609dcebf0757d5ad209 | [
"MIT"
] | null | null | null | import mss
import numpy as np
from PIL import Image
from config import BOARD_HEIGHT, BOARD_WIDTH
CELL_SIZE = 22
BOARD_X = 14
BOARD_Y = 111
COLOR_CODES = {
(0, 0, 255): 1,
(0, 123, 0): 2,
(255, 0, 0): 3,
(0, 0, 123): 4,
(123, 0, 0): 5,
(0, 123, 123): 6,
(0, 0, 0): 7,
(123, 123, 123): 8,
(189, 189, 189): 0 #unopened/opened blank
}
| 32.446809 | 123 | 0.662951 |
4d293734f2f02cf252d19002878c81331dcad9c7 | 963 | py | Python | Challenges/13/tests/test_stack_and_queue_brackets.py | makkahwi/data-structures-and-algorithms | 06551786258bb7dabb9b0ab07c0f80ff78abca41 | [
"MIT"
] | null | null | null | Challenges/13/tests/test_stack_and_queue_brackets.py | makkahwi/data-structures-and-algorithms | 06551786258bb7dabb9b0ab07c0f80ff78abca41 | [
"MIT"
] | null | null | null | Challenges/13/tests/test_stack_and_queue_brackets.py | makkahwi/data-structures-and-algorithms | 06551786258bb7dabb9b0ab07c0f80ff78abca41 | [
"MIT"
] | null | null | null | import pytest
from stack_and_queue_brackets.stack_and_queue_brackets import validate_brackets
| 20.0625 | 79 | 0.688474 |
4d2a55ccfddd9dd9215b0c629a81b67391bf257c | 360 | py | Python | tts/symbols.py | entn-at/tn2-wg | 00f59da91a1e23020b20210ea62d838e20c049f2 | [
"BSD-3-Clause"
] | 38 | 2019-07-03T21:40:57.000Z | 2021-11-30T15:59:20.000Z | tts/symbols.py | entn-at/tn2-wg | 00f59da91a1e23020b20210ea62d838e20c049f2 | [
"BSD-3-Clause"
] | 4 | 2019-08-20T12:19:10.000Z | 2021-07-29T11:20:59.000Z | tts/symbols.py | entn-at/tn2-wg | 00f59da91a1e23020b20210ea62d838e20c049f2 | [
"BSD-3-Clause"
] | 11 | 2019-07-04T09:17:27.000Z | 2021-11-14T21:05:04.000Z | """ from https://github.com/keithito/tacotron """
_pad = '_'
#_punctuation = '!\'(),.:;? '
_punctuation = '!",.:;? '
_special = '-'
#_letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
_letters = ""
symbols = [_pad] + list(_special) + list(_punctuation) + list(_letters)
| 32.727273 | 79 | 0.711111 |
4d2be9274f40cf5428dff78da1531be1cba5e3f0 | 28 | py | Python | tests/fixtures/pkg1/pkg2/pkg3/pkg4/pkg5/__init__.py | shashankrnr32/pytkdocs | bf04764f1608970643932329c9f6c8c63a0c5632 | [
"0BSD"
] | 21 | 2021-02-20T05:20:52.000Z | 2022-03-04T20:57:16.000Z | tests/fixtures/pkg1/pkg2/pkg3/pkg4/pkg5/__init__.py | shashankrnr32/pytkdocs | bf04764f1608970643932329c9f6c8c63a0c5632 | [
"0BSD"
] | 84 | 2020-03-22T15:29:56.000Z | 2021-02-09T21:47:11.000Z | tests/fixtures/pkg1/pkg2/pkg3/pkg4/pkg5/__init__.py | shashankrnr32/pytkdocs | bf04764f1608970643932329c9f6c8c63a0c5632 | [
"0BSD"
] | 21 | 2020-04-09T13:56:23.000Z | 2021-01-19T19:18:42.000Z | """Hello from the abyss."""
| 14 | 27 | 0.607143 |
4d2c6196de9f0ffebba719e30abbaf48e28d2d23 | 5,719 | py | Python | test/test_tdodbc.py | Teradata/PyTd | 5e960ed4c380c4f8ae84d582ad779a87adce5ae1 | [
"MIT"
] | 133 | 2015-07-27T22:12:58.000Z | 2021-08-31T05:26:38.000Z | test/test_tdodbc.py | Teradata/PyTd | 5e960ed4c380c4f8ae84d582ad779a87adce5ae1 | [
"MIT"
] | 121 | 2015-07-30T18:03:03.000Z | 2021-08-09T13:46:37.000Z | test/test_tdodbc.py | Teradata/PyTd | 5e960ed4c380c4f8ae84d582ad779a87adce5ae1 | [
"MIT"
] | 57 | 2015-07-27T10:41:08.000Z | 2021-04-26T08:58:57.000Z | # The MIT License (MIT)
#
# Copyright (c) 2015 by Teradata
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import unittest
import os
import teradata
from teradata import tdodbc, util
configFiles = [os.path.join(os.path.dirname(__file__), 'udaexec.ini')]
udaExec = teradata.UdaExec(configFiles=configFiles, configureLogging=False)
dsn = 'ODBC'
odbcConfig = udaExec.config.section(dsn)
system = odbcConfig['system']
super_username = odbcConfig['username']
super_password = odbcConfig['password']
if __name__ == '__main__':
unittest.main()
| 43.656489 | 79 | 0.616541 |
4d2d2acea9bb79c046b8abea693dc31ff18efd72 | 143 | py | Python | submissions/abc035/a.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | 1 | 2021-05-10T01:16:28.000Z | 2021-05-10T01:16:28.000Z | submissions/abc035/a.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | 3 | 2021-05-11T06:14:15.000Z | 2021-06-19T08:18:36.000Z | submissions/abc035/a.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | null | null | null | import sys
input = sys.stdin.readline
w, h = map(int, input().split())
if w / h == 4 / 3:
ans = '4:3'
else:
ans = '16:9'
print(ans)
| 11.916667 | 32 | 0.538462 |
4d2da9056c6d973976290183ad18c7e824e87fbe | 1,029 | py | Python | setup.py | JosiahBradley/mod2win | f3636faea8cce041be2d9933574aa1ccd4b818ac | [
"Apache-2.0"
] | null | null | null | setup.py | JosiahBradley/mod2win | f3636faea8cce041be2d9933574aa1ccd4b818ac | [
"Apache-2.0"
] | null | null | null | setup.py | JosiahBradley/mod2win | f3636faea8cce041be2d9933574aa1ccd4b818ac | [
"Apache-2.0"
] | null | null | null | import setuptools
long_description = ""
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
author='Josiah Bradley',
author_email='JosiahBradley@gmail.com',
name="mod2win",
url="https://github.com/JosiahBradley/mod2win",
version="0.0.1",
entry_points={
'console_scripts': [
'play = mod2win.levels.level_launcher:launch',
'compile = mod2win.levels.level_launcher:_compile',
'scrub = mod2win.levels.level_launcher:scrub',
'restore = mod2win.levels.level_launcher:restore',
'spiral = mod2win.levels.spiral_test:main',
]
},
package_dir={'': 'src'},
packages=setuptools.find_packages('src'),
include_package_data=True,
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache License",
"Operating System :: OS Independent",
],
)
| 31.181818 | 63 | 0.640428 |
4d2f1c615c504ceda8bcf1bd0cf231a9e5310a56 | 4,914 | py | Python | release/stubs.min/Autodesk/Revit/UI/Plumbing.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 182 | 2017-06-27T02:26:15.000Z | 2022-03-30T18:53:43.000Z | release/stubs.min/Autodesk/Revit/UI/Plumbing.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 28 | 2017-06-27T13:38:23.000Z | 2022-03-15T11:19:44.000Z | release/stubs.min/Autodesk/Revit/UI/Plumbing.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 67 | 2017-06-28T09:43:59.000Z | 2022-03-20T21:17:10.000Z | # encoding: utf-8
# module Autodesk.Revit.UI.Plumbing calls itself Plumbing
# from RevitAPIUI,Version=17.0.0.0,Culture=neutral,PublicKeyToken=null
# by generator 1.145
# no doc
# no imports
# no functions
# classes
| 28.905882 | 215 | 0.71571 |
4d2f795e5817013dda3708c8ac386c1a237e9181 | 240 | py | Python | src/base/apps.py | jhernandez18p/mobyapp | 4add7bd3b3f1e933bbb6941674bf84f4c4462685 | [
"MIT"
] | null | null | null | src/base/apps.py | jhernandez18p/mobyapp | 4add7bd3b3f1e933bbb6941674bf84f4c4462685 | [
"MIT"
] | 7 | 2020-06-05T17:31:06.000Z | 2022-03-11T23:16:34.000Z | src/base/apps.py | jhernandez18p/mobyapp | 4add7bd3b3f1e933bbb6941674bf84f4c4462685 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
from django.db.models.signals import post_migrate
from django.utils.translation import gettext_lazy as _
| 26.666667 | 54 | 0.766667 |
4d31fdb12184ec34b1dcc98d224f9491db93ddd4 | 708 | py | Python | migrations/versions/010_Add_uploader_link_to_upload.py | LCBRU/lbrc_upload | be42fef97b67c1f25329db52ae3a88eb293a1203 | [
"MIT"
] | null | null | null | migrations/versions/010_Add_uploader_link_to_upload.py | LCBRU/lbrc_upload | be42fef97b67c1f25329db52ae3a88eb293a1203 | [
"MIT"
] | null | null | null | migrations/versions/010_Add_uploader_link_to_upload.py | LCBRU/lbrc_upload | be42fef97b67c1f25329db52ae3a88eb293a1203 | [
"MIT"
] | null | null | null | from sqlalchemy import MetaData, Table, Index, Column, Integer
meta = MetaData()
| 28.32 | 82 | 0.75565 |
4d32026c6758449b672d084b5b5fc4c71016f623 | 3,380 | py | Python | redbot/resource/active_check/base.py | Malvoz/redbot | 0edef8d4efefddde49d36cd97e471fc187837169 | [
"MIT"
] | null | null | null | redbot/resource/active_check/base.py | Malvoz/redbot | 0edef8d4efefddde49d36cd97e471fc187837169 | [
"MIT"
] | null | null | null | redbot/resource/active_check/base.py | Malvoz/redbot | 0edef8d4efefddde49d36cd97e471fc187837169 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Subrequests to do things like range requests, content negotiation checks,
and validation.
This is the base class for all subrequests.
"""
from abc import ABCMeta, abstractmethod
from configparser import SectionProxy
from typing import List, Tuple, Type, Union, TYPE_CHECKING
from redbot.resource.fetch import RedFetcher
from redbot.speak import Note, levels, categories
from redbot.type import StrHeaderListType
if TYPE_CHECKING:
from redbot.resource import (
HttpResource,
) # pylint: disable=cyclic-import,unused-import
| 32.190476 | 97 | 0.657988 |
4d3223ccf8b9ace60c35ba3ab835c0690408e671 | 92 | py | Python | demo/demo14.py | LXG-Shadow/SongRecogn | e02363db5dc40b6128c46f19249044c94e5ba425 | [
"Apache-2.0"
] | 22 | 2019-02-25T20:58:58.000Z | 2021-07-15T01:45:00.000Z | demo/demo14.py | aynakeya/SongRecogn | e02363db5dc40b6128c46f19249044c94e5ba425 | [
"Apache-2.0"
] | 4 | 2021-04-07T13:27:44.000Z | 2021-06-15T17:32:04.000Z | demo/demo14.py | aynakeya/SongRecogn | e02363db5dc40b6128c46f19249044c94e5ba425 | [
"Apache-2.0"
] | 4 | 2020-05-16T14:08:24.000Z | 2021-06-07T08:59:07.000Z | import getopt
a = "asdf asdf"
option,args = getopt.getopt(a,"","")
print(option,type(args))
| 18.4 | 36 | 0.684783 |
4d360ecaf65d937ea0be727ba4568099673793e8 | 41 | py | Python | eyap/utils/ghtools/__init__.py | emin63/eyap | 783bdede298e63bbafee81b50cd1e899c43f5847 | [
"BSD-3-Clause"
] | null | null | null | eyap/utils/ghtools/__init__.py | emin63/eyap | 783bdede298e63bbafee81b50cd1e899c43f5847 | [
"BSD-3-Clause"
] | 2 | 2017-07-17T03:50:32.000Z | 2017-08-05T02:39:36.000Z | eyap/utils/ghtools/__init__.py | emin63/eyap | 783bdede298e63bbafee81b50cd1e899c43f5847 | [
"BSD-3-Clause"
] | null | null | null | """Additional GitHub specific tools.
"""
| 13.666667 | 36 | 0.707317 |
4d36ab2ecf64dbe104d9ad83b84b202b59495ecf | 13,674 | py | Python | page_api.py | tomkludy/md_to_conf | 3b03f1c68eea1f8a6d788afab0add63f6d4dcf46 | [
"MIT"
] | null | null | null | page_api.py | tomkludy/md_to_conf | 3b03f1c68eea1f8a6d788afab0add63f6d4dcf46 | [
"MIT"
] | null | null | null | page_api.py | tomkludy/md_to_conf | 3b03f1c68eea1f8a6d788afab0add63f6d4dcf46 | [
"MIT"
] | null | null | null | """
# --------------------------------------------------------------------------------------------------
# Page APIs
# --------------------------------------------------------------------------------------------------
"""
import os
import tempfile
import re
import json
import collections
import mimetypes
import urllib
import urllib.parse
import common
from file_api import FILE_API
from child_pages import CHILD_PAGES
from page_cache import PAGE_CACHE
from globals import LOGGER
from globals import SPACE_KEY
from globals import CONFLUENCE_API_URL
from globals import SIMULATE
from globals import ANCESTOR
PAGE_API = _PageApi()
| 36.270557 | 114 | 0.546365 |
4d37f40cf39b5e290df6e5f9680f28b3b0ec78f5 | 5,918 | py | Python | testing/tests/data_handling/test_predict.py | JSKenyon/QuartiCal | 2113855b080cfecc4a1c77cc9dad346ef3619716 | [
"MIT"
] | null | null | null | testing/tests/data_handling/test_predict.py | JSKenyon/QuartiCal | 2113855b080cfecc4a1c77cc9dad346ef3619716 | [
"MIT"
] | null | null | null | testing/tests/data_handling/test_predict.py | JSKenyon/QuartiCal | 2113855b080cfecc4a1c77cc9dad346ef3619716 | [
"MIT"
] | 1 | 2022-03-18T14:30:04.000Z | 2022-03-18T14:30:04.000Z | from copy import deepcopy
import pytest
from quartical.data_handling.predict import (parse_sky_models,
daskify_sky_model_dict,
get_support_tables)
import dask.array as da
import numpy as np
from numpy.testing import assert_array_almost_equal
expected_clusters = {"DIE": {"point": 22, "gauss": 24},
"B290": {"point": 1, "gauss": 2},
"C242": {"point": 0, "gauss": 1},
"G195": {"point": 0, "gauss": 1},
"H194": {"point": 0, "gauss": 2},
"I215": {"point": 0, "gauss": 1},
"R283": {"point": 1, "gauss": 0},
"V317": {"point": 0, "gauss": 1}}
# -----------------------------parse_sky_models--------------------------------
# -------------------------daskify_sky_model_dict------------------------------
# ----------------------------get_support_tables-------------------------------
# ---------------------------------predict-------------------------------------
# NOTE: No coverage attempt is made for the predict internals copied from
# https://github.com/ska-sa/codex-africanus. This is because the majority
# of this functionality should be tested by codex-africanus. We do check that
# both the direction-independent predict and direction-dependent predict work
# for a number of different input values.
# -----------------------------------------------------------------------------
| 32.163043 | 79 | 0.622339 |
4d391ca815462113e85dde20f4caa4e28b604358 | 300 | py | Python | HACKERRANK_Numpy/concatenated.py | StefaniaSferragatta/ADM2020-HW1 | 8f85ac1c8dd4bff52c5c17987c9e96b209a93830 | [
"MIT"
] | null | null | null | HACKERRANK_Numpy/concatenated.py | StefaniaSferragatta/ADM2020-HW1 | 8f85ac1c8dd4bff52c5c17987c9e96b209a93830 | [
"MIT"
] | null | null | null | HACKERRANK_Numpy/concatenated.py | StefaniaSferragatta/ADM2020-HW1 | 8f85ac1c8dd4bff52c5c17987c9e96b209a93830 | [
"MIT"
] | null | null | null | import numpy
N,M,P = map(int,input().split())
p_cols1 =numpy.array([input().split() for _ in range(N)],int)
p_cols1.shape = (N,P)
p_cols2 =numpy.array([input().split() for _ in range(M)],int)
p_cols2.shape = (M,P)
concatenated = numpy.concatenate((p_cols1, p_cols2), axis = 0)
print(concatenated)
| 25 | 62 | 0.686667 |
4d3a0734d340535665b3ebc270eb897a3c7611c7 | 4,171 | py | Python | Discord Status Changer.py | vragonx/DiscordStatusChanger | 376a78e5653f99d266a0a45ac3ecc8d71159bd49 | [
"Apache-2.0"
] | null | null | null | Discord Status Changer.py | vragonx/DiscordStatusChanger | 376a78e5653f99d266a0a45ac3ecc8d71159bd49 | [
"Apache-2.0"
] | null | null | null | Discord Status Changer.py | vragonx/DiscordStatusChanger | 376a78e5653f99d266a0a45ac3ecc8d71159bd49 | [
"Apache-2.0"
] | 1 | 2021-06-06T07:24:14.000Z | 2021-06-06T07:24:14.000Z | from colorama import Fore, init, Style
import requests
import random
import ctypes
import time
import os
ctypes.windll.kernel32.SetConsoleTitleW('Discord Status Changer')
init(convert=True, autoreset=True)
SuccessCounter = 0
ErrorCounter = 0
os.system('cls')
print(Fore.RED + '\n[' + Fore.WHITE + Style.BRIGHT + '0' + Style.RESET_ALL + Fore.RED + '] ' + Fore.WHITE + Style.BRIGHT + 'Discord Status Changer by vragon')
print(Fore.GREEN + '\n[' + Fore.WHITE + Style.BRIGHT + '1' + Style.RESET_ALL + Fore.GREEN + '] ' + Fore.WHITE + Style.BRIGHT + 'Text')
print(Fore.GREEN + '[' + Fore.WHITE + Style.BRIGHT + '2' + Style.RESET_ALL + Fore.GREEN + '] ' + Fore.WHITE + Style.BRIGHT + 'Text including emoji')
try:
option = int(input(Fore.GREEN + '\n> ' + Fore.WHITE + Style.BRIGHT))
except ValueError as e:
print(' ')
print(Fore.RED + '[ERROR] ' + Fore.WHITE + Style.BRIGHT + str(e))
input()
quit()
if option == 1:
os.system('cls')
print(Fore.WHITE + Style.BRIGHT + '\nToken:')
token = str(input(Fore.GREEN + '> ' + Fore.WHITE + Style.BRIGHT))
print(' ')
while True:
ChangeStatus()
elif option == 2:
os.system('cls')
print(Fore.WHITE + Style.BRIGHT + '\nToken:')
token = str(input(Fore.GREEN + '> ' + Fore.WHITE + Style.BRIGHT))
print(Fore.WHITE + Style.BRIGHT + '\nEmoji name:')
EmojiName = str(input(Fore.GREEN + '> ' + Fore.WHITE + Style.BRIGHT))
print(Fore.WHITE + Style.BRIGHT + '\nEmoji ID:')
try:
EmojiID = int(input(Fore.GREEN + '> ' + Fore.WHITE + Style.BRIGHT))
except ValueError as e:
print(' ')
print(Fore.RED + '[ERROR] ' + Fore.WHITE + Style.BRIGHT + str(e))
input()
quit()
print(' ')
while True:
ChangeStatus()
| 45.336957 | 176 | 0.562455 |
4d3c620a15280505542a7dd73460b5056d95dccf | 1,269 | py | Python | hw1/feature_summary.py | doochi/gct634-ai613-2021 | af12a1ea9c622fca17928f8431cc0983470f97db | [
"MIT"
] | 9 | 2021-09-04T04:11:47.000Z | 2022-01-06T13:00:32.000Z | hw1/feature_summary.py | doochi/gct634-ai613-2021 | af12a1ea9c622fca17928f8431cc0983470f97db | [
"MIT"
] | null | null | null | hw1/feature_summary.py | doochi/gct634-ai613-2021 | af12a1ea9c622fca17928f8431cc0983470f97db | [
"MIT"
] | 19 | 2021-09-12T10:13:09.000Z | 2022-01-28T01:37:42.000Z | # GCT634 (2018) HW1
#
# Mar-18-2018: initial version
#
# Juhan Nam
#
import sys
import os
import numpy as np
import matplotlib.pyplot as plt
data_path = './dataset/'
mfcc_path = './mfcc/'
MFCC_DIM = 20
if __name__ == '__main__':
train_data = mean_mfcc('train')
valid_data = mean_mfcc('valid')
plt.figure(1)
plt.subplot(2,1,1)
plt.imshow(train_data, interpolation='nearest', origin='lower', aspect='auto')
plt.colorbar(format='%+2.0f dB')
plt.subplot(2,1,2)
plt.imshow(valid_data, interpolation='nearest', origin='lower', aspect='auto')
plt.colorbar(format='%+2.0f dB')
plt.show()
| 18.940299 | 82 | 0.597321 |
4d3c97fcf24ccce0fd906a45948aebe7fed32f87 | 6,685 | py | Python | package/kedro_viz/services/layers.py | pascalwhoop/kedro-viz | 5fd8dd8033da5f3d37c80a7adb51b60fd8daa64d | [
"BSD-3-Clause-Clear",
"Apache-2.0"
] | 246 | 2019-07-08T15:27:34.000Z | 2022-01-09T18:47:11.000Z | package/kedro_viz/services/layers.py | pascalwhoop/kedro-viz | 5fd8dd8033da5f3d37c80a7adb51b60fd8daa64d | [
"BSD-3-Clause-Clear",
"Apache-2.0"
] | 222 | 2019-07-08T14:07:13.000Z | 2022-01-06T19:19:27.000Z | package/kedro_viz/services/layers.py | pascalwhoop/kedro-viz | 5fd8dd8033da5f3d37c80a7adb51b60fd8daa64d | [
"BSD-3-Clause-Clear",
"Apache-2.0"
] | 53 | 2019-07-14T14:06:47.000Z | 2021-12-06T22:21:46.000Z | # Copyright 2021 QuantumBlack Visual Analytics Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND
# NONINFRINGEMENT. IN NO EVENT WILL THE LICENSOR OR OTHER CONTRIBUTORS
# BE LIABLE FOR ANY CLAIM, DAMAGES, OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF, OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# The QuantumBlack Visual Analytics Limited ("QuantumBlack") name and logo
# (either separately or in combination, "QuantumBlack Trademarks") are
# trademarks of QuantumBlack. The License does not grant you any right or
# license to the QuantumBlack Trademarks. You may not use the QuantumBlack
# Trademarks or any confusingly similar mark as a trademark for your product,
# or use the QuantumBlack Trademarks in any other manner that might cause
# confusion in the marketplace, including but not limited to in advertising,
# on websites, or on software.
#
# See the License for the specific language governing permissions and
# limitations under the License.
"""`kedro_viz.services.layers` defines layers-related logic."""
import logging
from collections import defaultdict
from typing import Dict, List, Set
from toposort import CircularDependencyError, toposort_flatten
from kedro_viz.models.graph import GraphNode
logger = logging.getLogger(__name__)
def sort_layers(
nodes: Dict[str, GraphNode], dependencies: Dict[str, Set[str]]
) -> List[str]:
"""Given a DAG represented by a dictionary of nodes, some of which have a `layer` attribute,
along with their dependencies, return the list of all layers sorted according to
the nodes' topological order, i.e. a layer should appear before another layer in the list
if its node is a dependency of the other layer's node, directly or indirectly.
For example, given the following graph:
node1(layer=a) -> node2 -> node4 -> node6(layer=d)
| ^
v |
node3(layer=b) -> node5(layer=c)
The layers ordering should be: [a, b, c, d]
In theory, this is a problem of finding the
[transitive closure](https://en.wikipedia.org/wiki/Transitive_closure) in a graph of layers
and then toposort them. The algorithm below follows a repeated depth-first search approach:
* For every node, find all layers that depends on it in a depth-first search.
* While traversing, build up a dictionary of {node_id -> layers} for the node
that have already been visited.
* Turn the final {node_id -> layers} into a {layer -> layers} to represent the layers'
dependencies. Note: the key is a layer and the values are the parents of that layer,
just because that's the format toposort requires.
* Feed this layers dictionary to ``toposort`` and return the sorted values.
* Raise CircularDependencyError if the layers cannot be sorted topologically,
i.e. there are cycles among the layers.
Args:
nodes: A dictionary of {node_id -> node} represents the nodes in the graph.
dependencies: A dictionary of {node_id -> set(child_ids)}
represents the direct dependencies between nodes in the graph.
Returns:
The list of layers sorted based on topological order.
Raises:
CircularDependencyError: When the layers have cyclic dependencies.
"""
node_layers: Dict[str, Set[str]] = {} # map node_id to the layers that depend on it
def find_child_layers(node_id: str) -> Set[str]:
"""For the given node_id, find all layers that depend on it in a depth-first manner.
Build up the node_layers dependency dictionary while traversing so each node is visited
only once.
Note: Python's default recursive depth limit is 1000, which means this algorithm won't
work for pipeline with more than 1000 nodes. However, we can rewrite this using stack if
we run into this limit in practice.
"""
if node_id in node_layers:
return node_layers[node_id]
node_layers[node_id] = set()
# The layer of the current node can also be considered as depending on that node.
# This is to cater for the edge case where all nodes are completely disjoint from each other
# and no dependency graph for layers can be constructed,
# yet the layers still need to be displayed.
node_layer = getattr(nodes[node_id], "layer", None)
if node_layer is not None:
node_layers[node_id].add(node_layer)
# for each child node of the given node_id,
# mark its layer and all layers that depend on it as child layers of the given node_id.
for child_node_id in dependencies[node_id]:
child_node = nodes[child_node_id]
child_layer = getattr(child_node, "layer", None)
if child_layer is not None:
node_layers[node_id].add(child_layer)
node_layers[node_id].update(find_child_layers(child_node_id))
return node_layers[node_id]
# populate node_layers dependencies
for node_id in nodes:
find_child_layers(node_id)
# compute the layer dependencies dictionary based on the node_layers dependencies,
# represented as {layer -> set(parent_layers)}
layer_dependencies = defaultdict(set)
for node_id, child_layers in node_layers.items():
node_layer = getattr(nodes[node_id], "layer", None)
# add the node's layer as a parent layer for all child layers.
# Even if a child layer is the same as the node's layer, i.e. a layer is marked
# as its own parent, toposort still works so we don't need to check for that explicitly.
if node_layer is not None:
for layer in child_layers:
layer_dependencies[layer].add(node_layer)
# toposort the layer_dependencies to find the layer order.
# Note that for string, toposort_flatten will default to alphabetical order for tie-break.
try:
return toposort_flatten(layer_dependencies)
except CircularDependencyError:
logger.warning(
"Layers visualisation is disabled as circular dependency detected among layers."
)
return []
| 48.093525 | 100 | 0.698579 |
4d3cec7b76ef0e1f26a6a2ea0b4008e98f8e6357 | 6,941 | py | Python | fixed_width_gen.py | pradnyaalc/fixed_width_file_generation | 989eb34f57a6061f89c4889ec1c3db3a45b86723 | [
"Apache-2.0"
] | null | null | null | fixed_width_gen.py | pradnyaalc/fixed_width_file_generation | 989eb34f57a6061f89c4889ec1c3db3a45b86723 | [
"Apache-2.0"
] | null | null | null | fixed_width_gen.py | pradnyaalc/fixed_width_file_generation | 989eb34f57a6061f89c4889ec1c3db3a45b86723 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# importing libraries
import json
from copy import deepcopy
from decimal import Decimal
import time
if __name__ == '__main__':
main()
| 33.370192 | 121 | 0.53998 |
4d3cfc02ebd8ee1182122794d381b0a0a452d148 | 855 | py | Python | 52digest.py | Ferdandez/homework | 07df8e0c63e93773e7fc354bfb4e6ae301d49124 | [
"MIT"
] | null | null | null | 52digest.py | Ferdandez/homework | 07df8e0c63e93773e7fc354bfb4e6ae301d49124 | [
"MIT"
] | null | null | null | 52digest.py | Ferdandez/homework | 07df8e0c63e93773e7fc354bfb4e6ae301d49124 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# 52digest.py
import re
import sys
# Write a program that performs an EcoRI digest on the SARS-COV2 genome
# The program should have 2 arguments
# 1. The genome file
# 2. The restriction pattern
# The output should be the sizes of the restriction fragments
originseen = False
seq = ''
digest = sys.argv[2]
filename = sys.argv[1]
with open(filename) as fp:
for line in fp.readlines():
if line.startswith('ORIGIN'): originseen = True
if originseen:
words = line.split()
seq += ''.join(words[1:])
#print(len(seq))
count = 0
k = len(sys.argv[2])
match = re.search(digest, seq)
for i in range(len(seq)-k+1):
scope = seq[i:i+k]
if scope == "gaattc": print(count)
if scope == "gaattc": count = 0
count += 1
"""
python3 52digest.py ../Data/sars-cov2.gb gaattc
1160
10573
5546
448
2550
2592
3569
2112
1069
"""
| 19 | 71 | 0.679532 |
4d3d62f955634f9c834d309435153be67f95acc0 | 2,394 | py | Python | yt_dlp/extractor/willow.py | mrBliss/yt-dlp | aecd021656b672dbb617e5bae54a8986f9c4ebaf | [
"Unlicense"
] | 80 | 2021-05-25T11:33:49.000Z | 2022-03-29T20:36:53.000Z | yt_dlp/extractor/willow.py | mrBliss/yt-dlp | aecd021656b672dbb617e5bae54a8986f9c4ebaf | [
"Unlicense"
] | 53 | 2017-04-12T19:53:18.000Z | 2022-02-22T10:33:13.000Z | yt_dlp/extractor/willow.py | mrBliss/yt-dlp | aecd021656b672dbb617e5bae54a8986f9c4ebaf | [
"Unlicense"
] | 22 | 2021-05-07T05:01:27.000Z | 2022-03-26T19:10:54.000Z | # coding: utf-8
from ..utils import ExtractorError
from .common import InfoExtractor
| 40.576271 | 125 | 0.581036 |
4d3d9898f84561406d75802e9d8427f790dcd648 | 32 | py | Python | vesicashapi/vesicash.py | vesicash/vesicash-python-sdk | 0a665e302b88c4eeb316a635c5485c9c3c1fffeb | [
"Apache-2.0"
] | null | null | null | vesicashapi/vesicash.py | vesicash/vesicash-python-sdk | 0a665e302b88c4eeb316a635c5485c9c3c1fffeb | [
"Apache-2.0"
] | null | null | null | vesicashapi/vesicash.py | vesicash/vesicash-python-sdk | 0a665e302b88c4eeb316a635c5485c9c3c1fffeb | [
"Apache-2.0"
] | 1 | 2021-04-20T14:54:40.000Z | 2021-04-20T14:54:40.000Z | """ Entry point defined here """ | 32 | 32 | 0.65625 |
4d3e1930813427009e7819522279ea71f06fd637 | 4,044 | py | Python | src/dissregarded/NetArivalsDeparturesHoures.py | sebastian-konicz/WRM | e60aafada7bb85df955a7e1357f33fe5846f4e6c | [
"MIT"
] | 1 | 2019-11-02T18:24:19.000Z | 2019-11-02T18:24:19.000Z | src/dissregarded/NetArivalsDeparturesHoures.py | sebastian-konicz/WRM | e60aafada7bb85df955a7e1357f33fe5846f4e6c | [
"MIT"
] | 7 | 2020-03-24T17:48:28.000Z | 2022-03-12T00:05:13.000Z | src/dissregarded/NetArivalsDeparturesHoures.py | sebastian-konicz/WRM | e60aafada7bb85df955a7e1357f33fe5846f4e6c | [
"MIT"
] | null | null | null | import folium
from folium.plugins import MarkerCluster
import pandas as pd
import datetime
from pathlib import Path
# pd.options.display.max_columns = 50
if __name__ == "__main__":
project_dir = str(Path(__file__).resolve().parents[2])
main(project_dir) | 42.568421 | 121 | 0.638477 |
4d3ee1ccb8692f8cfb3b7d31686fa015b7d46982 | 5,470 | py | Python | bin/lineage_parser.py | brianlee99/UVP | 5b7ff26c09d84760d4220268f34fb4814848eb4a | [
"MIT"
] | null | null | null | bin/lineage_parser.py | brianlee99/UVP | 5b7ff26c09d84760d4220268f34fb4814848eb4a | [
"MIT"
] | null | null | null | bin/lineage_parser.py | brianlee99/UVP | 5b7ff26c09d84760d4220268f34fb4814848eb4a | [
"MIT"
] | null | null | null | #! /usr/bin/python
import sys
""" This script accepts the final annotation file and the lineage marker SNPs file """
""" and infers the lineage and possible sublineage classification of the isolate """
""" it requires a sample ID name (string) and an output file name(string) """
"""
Author: Matthew Ezewudo
CPTR ReSeqTB Project - Critical Path Institute
"""
input1 = sys.argv[1]
input2 = sys.argv[2]
input3 = sys.argv[3]
input4 = sys.argv[4]
fh1 = open(input1, 'r')
sublinn = ""
(lineage,position,ref,alt) = ([],[],[],[])
prevlin = []
prevsub = []
tribes = ["lineages","Indo-Oceanic","East-Asian","East-African-Indian","Euro-American","West-Africa 1","West-Africa 2","Ethiopian"]
(concord,discord,concord1,discord1,count) = (0,0,0,0,0)
discordance = False
sublinneage = False
linfour = ""
hrv37 = ""
BOV = ""
BOV_AFRI = ""
for lines in fh1:
if lines.startswith('#'):
continue
fields = lines.rstrip("\r\n").split("\t")
lineage.append(fields[0])
position.append(fields[1])
ref.append(fields[2])
alt.append(fields[3])
fh1.close()
fh2 = open(input2,'r')
for lines in fh2:
count += 1
fields = lines.rstrip("\r\n").split("\t")
if fields[2] == '931123':
linfour = fields[2]
if fields[2] == '1759252':
hrv37 = fields[2]
if fields[2] == '2831482':
BOV = fields[2]
if fields[2] == '1882180':
BOV_AFRI = '1882180'
if fields[2] in position:
ind = position.index(fields[2])
if alt[ind] == fields[4]:
if len(lineage[ind]) > 1:
sublin = lineage[ind]
prevsub.append(sublin)
sublinn = prevsub[0]
print "SNP" + " " + position[ind] + " " + "suggests sub-lineage: " + lineage[ind]
if prevsub[0] != sublin:
discord += 1
else:
concord +=1
for i in range(0,len(prevsub)):
if len(sublinn) < len(prevsub[i]) :
sublinn = prevsub[i]
else:
lin = lineage[ind]
prevlin.append(lin)
print "SNP" + " " + position[ind] + " " + "suggests lineage: " + lineage[ind]
if prevlin[0] != lin:
discord1 += 1
else:
concord1 += 1
fh2.close()
fh3 = open(input3,'w')
print >> fh3, "Sample ID" + "\t" + "Lineage" + "\t" + "Lineage Name" + "\t" + "Sublineage"
split_first = ['NA']
if len(prevsub) > 0:
split_first = sublinn.split(".")
sublinneage = True
if len(prevlin) == 0:
if len(BOV) > 0:
print "Lineage: " + "BOV"
print >> fh3, input4 + "\t" + "BOV" + "\t" + "Bovis" + "\t" + "NA"
if len(BOV) == 0 or len(BOV_AFRI) == 0:
for i in range(0,len(prevsub)):
split_lin = prevsub[i].split(".")
if split_lin[0] != split_first[0]:
discordance = True
if split_lin[1] != split_first[1]:
discordance = True
if discordance:
print "no precise lineage inferred"
print >> fh3, "no precise lineage inferred"
sys.exit(1)
else:
if len(split_first) > 1:
print "Lineage: " + split_first[0] + " : " + tribes[int(split_first[0])]
print "Sub-lineage: " + sublinn
print >> fh3, input4 + "\t" + split_first[0] + "\t" + tribes[int(split_first[0])] + "\t" + sublinn
elif len(linfour) < 2:
print "Absence of SNP 931123 suggests lineage 4"
print "Lineage: " + "4" + " : " + "Euro-American"
if len(hrv37) > 2:
print >> fh3, input4 + "\t" + "4" + "\t" + "Euro American" + "\t" + "NA"
elif len(hrv37) < 2:
print "Absence of SNP 1759252 suggests sublineage 4.9"
print >> fh3, input4 + "\t" + "4" + "\t" + "Euro American" + "\t" + "4.9"
else:
print "No Informative SNPs detected"
print >> fh3, "No Informative SNPs detected"
else:
if len(prevlin) > 1:
for j in range(0,len(prevlin)):
if prevlin[0] != prevlin[j]:
discordance = True
if discordance == True:
print "no concordance between predicted lineage and sublineage(s)"
print >> fh3, "no concordance between predicted lineage and sublineage(s)"
sys.exit(1)
else:
if len(sublinn) < 1:
print "Lineage: " + prevlin[0] + " " + tribes[int(prevlin[0])]
print >> fh3, input4 + "\t" + prevlin[0] + "\t" + tribes[int(prevlin[0])] + "\t" + "NA"
elif len(sublinn) > 1:
for i in range(0,len(prevsub)):
split_lin = prevsub[i].split(".")
if split_lin[0] != prevlin[0] and split_lin[0] != 'BOV_AFRI':
discordance = True
if split_lin[0] != split_first[0]:
discordance = True
if discordance:
print "no precise lineage inferred"
print >> fh3, "no precise lineage inferred"
sys.exit(1)
else:
print "Lineage: " + prevlin[0] + " " + tribes[int(prevlin[0])]
if sublinn.startswith('BOV_A'):
print >> fh3, input4 + "\t" + prevlin[0] + "\t" + tribes[int(prevlin[0])] + "\t" + "NA"
else:
print "Sub-lineage: " + sublinn
print >> fh3, input4 + "\t" + prevlin[0] + "\t" + tribes[int(prevlin[0])] + "\t" + sublinn
| 36.466667 | 131 | 0.518464 |
4d3ef71c75b3b75d2218b109a8f46905d02e164e | 10,038 | py | Python | rdr_service/offline/metrics_export.py | all-of-us/raw-data-repository | d28ad957557587b03ff9c63d55dd55e0508f91d8 | [
"BSD-3-Clause"
] | 39 | 2017-10-13T19:16:27.000Z | 2021-09-24T16:58:21.000Z | rdr_service/offline/metrics_export.py | all-of-us/raw-data-repository | d28ad957557587b03ff9c63d55dd55e0508f91d8 | [
"BSD-3-Clause"
] | 312 | 2017-09-08T15:42:13.000Z | 2022-03-23T18:21:40.000Z | rdr_service/offline/metrics_export.py | all-of-us/raw-data-repository | d28ad957557587b03ff9c63d55dd55e0508f91d8 | [
"BSD-3-Clause"
] | 19 | 2017-09-15T13:58:00.000Z | 2022-02-07T18:33:20.000Z | from rdr_service import clock, config
from rdr_service.code_constants import EHR_CONSENT_QUESTION_CODE, PPI_SYSTEM, RACE_QUESTION_CODE, UNMAPPED
from rdr_service.dao.code_dao import CodeDao
from rdr_service.dao.database_utils import get_sql_and_params_for_array, replace_isodate
from rdr_service.dao.hpo_dao import HPODao
from rdr_service.field_mappings import NON_EHR_QUESTIONNAIRE_MODULE_FIELD_NAMES
from rdr_service.model.base import get_column_name
from rdr_service.model.participant_summary import ParticipantSummary
from rdr_service.offline.metrics_config import ANSWER_FIELD_TO_QUESTION_CODE
from rdr_service.offline.sql_exporter import SqlExporter
# from rdr_service.offline.metrics_pipeline import MetricsPipeline
from rdr_service.participant_enums import QuestionnaireStatus, TEST_EMAIL_PATTERN, TEST_HPO_NAME
# TODO: filter out participants that have withdrawn in here
_PARTICIPANTS_CSV = "participants_%d.csv"
_HPO_IDS_CSV = "hpo_ids_%d.csv"
_ANSWERS_CSV = "answers_%d.csv"
_ALL_CSVS = [_PARTICIPANTS_CSV, _HPO_IDS_CSV, _ANSWERS_CSV]
_QUEUE_NAME = "metrics-pipeline"
_PARTICIPANT_SQL_TEMPLATE = """
SELECT p.participant_id, ps.date_of_birth date_of_birth,
(SELECT ISODATE[MIN(bo.created)] FROM biobank_order bo
WHERE bo.participant_id = p.participant_id
AND bo.order_status is null or bo.order_status <> 2) first_order_date,
(SELECT ISODATE[MIN(bs.confirmed)] FROM biobank_stored_sample bs
WHERE bs.biobank_id = p.biobank_id) first_samples_arrived_date,
(SELECT ISODATE[MIN(pm.finalized)] FROM physical_measurements pm
WHERE pm.participant_id = p.participant_id
AND pm.finalized is not null
AND pm.status is null or pm.status <> 2) first_physical_measurements_date,
(SELECT ISODATE[MIN(bss.confirmed)] FROM biobank_stored_sample bss
WHERE bss.biobank_id = p.biobank_id
AND bss.test IN {}) first_samples_to_isolate_dna_date, {}
FROM participant p, participant_summary ps
WHERE p.participant_id = ps.participant_id
AND p.participant_id % :num_shards = :shard_number
AND p.hpo_id != :test_hpo_id
AND p.withdrawal_status != 2
AND NOT ps.email LIKE :test_email_pattern
AND p.is_test_participant != TRUE
"""
# Find HPO ID changes in participant history.
_HPO_ID_QUERY = """
SELECT ph.participant_id participant_id, hpo.name hpo,
ISODATE[ph.last_modified] last_modified
FROM participant_history ph, hpo, participant p
WHERE ph.participant_id % :num_shards = :shard_number
AND ph.hpo_id = hpo.hpo_id
AND ph.participant_id = p.participant_id
AND ph.hpo_id != :test_hpo_id
AND p.hpo_id != :test_hpo_id
AND p.withdrawal_status != 2
AND p.is_test_participant != TRUE
AND NOT EXISTS
(SELECT * FROM participant_history ph_prev
WHERE ph_prev.participant_id = ph.participant_id
AND ph_prev.version = ph.version - 1
AND ph_prev.hpo_id = ph.hpo_id)
AND NOT EXISTS
(SELECT * FROM participant_summary ps
WHERE ps.participant_id = ph.participant_id
AND ps.email LIKE :test_email_pattern)
"""
_ANSWER_QUERY = """
SELECT qr.participant_id participant_id, ISODATE[qr.created] start_time,
qc.value question_code,
(SELECT CASE WHEN ac.mapped THEN ac.value ELSE :unmapped END FROM code ac
WHERE ac.code_id = qra.value_code_id) answer_code,
qra.value_string answer_string
FROM questionnaire_response_answer qra, questionnaire_response qr, questionnaire_question qq,
code qc, participant p
WHERE qra.questionnaire_response_id = qr.questionnaire_response_id
AND qra.question_id = qq.questionnaire_question_id
AND qq.code_id = qc.code_id
AND qq.code_id in ({})
AND qr.participant_id % :num_shards = :shard_number
AND qr.participant_id = p.participant_id
AND p.hpo_id != :test_hpo_id
AND p.withdrawal_status != 2
AND p.is_test_participant != TRUE
AND NOT EXISTS
(SELECT * FROM participant_summary ps
WHERE ps.participant_id = p.participant_id
AND ps.email LIKE :test_email_pattern)
ORDER BY qr.participant_id, qr.created, qc.value
"""
| 41.139344 | 117 | 0.724846 |
4d3f64220d8ff34f2c9e4737de791b02b5323b50 | 701 | py | Python | fundata/dota2/player/player_detail.py | mengyuanhumy/fundata | e4090edf967e2ad7f7efadd64a7fc2ae8dc1ed32 | [
"MIT"
] | null | null | null | fundata/dota2/player/player_detail.py | mengyuanhumy/fundata | e4090edf967e2ad7f7efadd64a7fc2ae8dc1ed32 | [
"MIT"
] | null | null | null | fundata/dota2/player/player_detail.py | mengyuanhumy/fundata | e4090edf967e2ad7f7efadd64a7fc2ae8dc1ed32 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from ...client import get_api_client
def get_player_detail_stats(player_id):
"""
player_idID int
dict
"""
if get_player_data_status(player_id)==2:
client=get_api_client()
uri="/fundata-dota2-free/v2/player/"+str(player_id)+"/detail_stats"
return client.api(uri,{})
else:
print("player_id=%i has no data"%player_id)
return 0
def get_player_data_status(player_id):
"""
player_idID int
dict: status, 21
"""
client=get_api_client()
uri="/fundata-dota2-free/v2/player/"+str(player_id)+"/data_status"
res=client.api(uri,{})
if res["retcode"]==200 and res["data"]["status"]==2:
return 2
else:
return 1 | 22.612903 | 69 | 0.71184 |
4d40d6894572ebb56bff51cbd51d17f087ba2234 | 2,454 | py | Python | ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/resourcemanager.py | panfeiyy/ambari | 24077510723ede93d3024784f0b04422adaf56d6 | [
"Apache-2.0"
] | 16 | 2018-05-24T10:28:24.000Z | 2021-08-05T03:13:26.000Z | ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/resourcemanager.py | panfeiyy/ambari | 24077510723ede93d3024784f0b04422adaf56d6 | [
"Apache-2.0"
] | 3 | 2021-05-09T12:37:16.000Z | 2022-03-02T10:13:24.000Z | ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/resourcemanager.py | panfeiyy/ambari | 24077510723ede93d3024784f0b04422adaf56d6 | [
"Apache-2.0"
] | 17 | 2018-07-06T08:57:00.000Z | 2021-11-04T11:00:36.000Z | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import sys
from resource_management import *
from yarn import yarn
from service import service
if __name__ == "__main__":
Resourcemanager().execute()
| 24.058824 | 94 | 0.711899 |
4d4284d5a0b58c47616dd4e99223550ab8085447 | 166 | py | Python | src/core/command.py | cfmcdonald-78/Hexcrawler | 79ca4ab9327abf08de1743612c23eb89aa53a2b9 | [
"MIT"
] | null | null | null | src/core/command.py | cfmcdonald-78/Hexcrawler | 79ca4ab9327abf08de1743612c23eb89aa53a2b9 | [
"MIT"
] | null | null | null | src/core/command.py | cfmcdonald-78/Hexcrawler | 79ca4ab9327abf08de1743612c23eb89aa53a2b9 | [
"MIT"
] | 1 | 2021-12-01T01:38:12.000Z | 2021-12-01T01:38:12.000Z | '''
Created on Jul 19, 2012
@author: Chris
''' | 12.769231 | 29 | 0.554217 |
4d42f4da01153d9efccca4d19cc6efc9b683c41b | 8,039 | py | Python | gui/trimGui.py | lhalb/gfmanager | 449f071b3239faa672b7f06122dfc9bc23e68d79 | [
"MIT"
] | 1 | 2022-01-18T12:53:17.000Z | 2022-01-18T12:53:17.000Z | gui/trimGui.py | lhalb/gfmanager | 449f071b3239faa672b7f06122dfc9bc23e68d79 | [
"MIT"
] | null | null | null | gui/trimGui.py | lhalb/gfmanager | 449f071b3239faa672b7f06122dfc9bc23e68d79 | [
"MIT"
] | null | null | null | from PyQt5 import QtGui, QtWidgets
import seaborn as sns
from gui import trimming as tri
from gui import boxes as BOX
import matplotlib.image as mpimg
from math import floor, ceil
| 32.812245 | 105 | 0.619729 |
4d47790f7b2c6a08485b7da418683620a521d5cf | 1,178 | py | Python | __init__.py | hankangkangjim/djlog | cfdac281be811adc3fc1b91672c0230cc575722f | [
"MIT"
] | null | null | null | __init__.py | hankangkangjim/djlog | cfdac281be811adc3fc1b91672c0230cc575722f | [
"MIT"
] | null | null | null | __init__.py | hankangkangjim/djlog | cfdac281be811adc3fc1b91672c0230cc575722f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
| 29.45 | 78 | 0.618846 |
4d48c769d2260ad0c3c0d32734d1d092109092b3 | 6,697 | py | Python | manilaclient/tests/unit/common/test_httpclient.py | mail2nsrajesh/python-manilaclient | 37bf2d9b4be277ece01e9ff782234d264ed4fd56 | [
"CNRI-Python",
"Apache-1.1"
] | null | null | null | manilaclient/tests/unit/common/test_httpclient.py | mail2nsrajesh/python-manilaclient | 37bf2d9b4be277ece01e9ff782234d264ed4fd56 | [
"CNRI-Python",
"Apache-1.1"
] | null | null | null | manilaclient/tests/unit/common/test_httpclient.py | mail2nsrajesh/python-manilaclient | 37bf2d9b4be277ece01e9ff782234d264ed4fd56 | [
"CNRI-Python",
"Apache-1.1"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import requests
import manilaclient
from manilaclient.common import httpclient
from manilaclient import exceptions
from manilaclient.tests.unit import utils
fake_user_agent = "fake"
fake_response = utils.TestResponse({
"status_code": 200,
"text": '{"hi": "there"}',
})
mock_request = mock.Mock(return_value=(fake_response))
bad_400_response = utils.TestResponse({
"status_code": 400,
"text": '{"error": {"message": "n/a", "details": "Terrible!"}}',
})
bad_400_request = mock.Mock(return_value=(bad_400_response))
bad_401_response = utils.TestResponse({
"status_code": 401,
"text": '{"error": {"message": "FAILED!", "details": "DETAILS!"}}',
})
bad_401_request = mock.Mock(return_value=(bad_401_response))
bad_500_response = utils.TestResponse({
"status_code": 500,
"text": '{"error": {"message": "FAILED!", "details": "DETAILS!"}}',
})
bad_500_request = mock.Mock(return_value=(bad_500_response))
retry_after_response = utils.TestResponse({
"status_code": 413,
"text": '',
"headers": {
"retry-after": "5"
},
})
retry_after_mock_request = mock.Mock(return_value=retry_after_response)
retry_after_no_headers_response = utils.TestResponse({
"status_code": 413,
"text": '',
})
retry_after_no_headers_mock_request = mock.Mock(
return_value=retry_after_no_headers_response)
retry_after_non_supporting_response = utils.TestResponse({
"status_code": 403,
"text": '',
"headers": {
"retry-after": "5"
},
})
retry_after_non_supporting_mock_request = mock.Mock(
return_value=retry_after_non_supporting_response)
| 32.043062 | 78 | 0.626101 |
4d4909137a8281abf00add12e7109af6453220fd | 1,421 | py | Python | intro_to_algos_2020_mit/ps3/tests.py | venu-gopal-myneni/assignments | 871148ccaa6291539623fc7d3f9704cb497fbcb6 | [
"MIT"
] | 1 | 2022-02-26T13:52:31.000Z | 2022-02-26T13:52:31.000Z | assignments/ps3-template/tests.py | tallamjr/mit-6006 | c2aa6bb48edef5800c0779ba2eebd697d44249b5 | [
"MIT"
] | null | null | null | assignments/ps3-template/tests.py | tallamjr/mit-6006 | c2aa6bb48edef5800c0779ba2eebd697d44249b5 | [
"MIT"
] | null | null | null | import unittest
from count_anagram_substrings import count_anagram_substrings
tests = (
(
(
'esleastealaslatet',
('tesla',),
),
(3,),
),
(
(
'lrldrrrllddrrlllrddd',
('ldl', 'rld'),
),
(1, 3),
),
(
(
'kkkkkvvuvkvkkkvuuvkuukkuvvkukkvkkvuvukuk',
('vkuk', 'uvku', 'kukk'),
),
(5, 6, 1),
),
(
(
'trhtrthtrthhhrtthrtrhhhtrrrhhrthrrrttrrttrthhrrrrtrtthhhhrrrtrtthrttthrthhthrhrh',
('rrrht', 'tttrr', 'rttrr', 'rhrrr'),
),
(6, 5, 6, 1),
),
(
(
'hjjijjhhhihhjjhjjhijjihjjihijiiihhihjjjihjjiijjijjhhjijjiijhjihiijjiiiijhihihhiihhiiihhiijhhhiijhijj',
('jihjhj', 'hhjiii', 'ihjhhh', 'jjjiji'),
),
(10, 6, 2, 2),
),
)
if __name__ == '__main__':
res = unittest.main(verbosity = 3, exit = False)
| 25.375 | 115 | 0.553835 |
4d49fb464c7f3d5acfcbeab36ee17a5c9322cb65 | 9,387 | py | Python | sim2real-policies/sim2real_policies/sys_id/universal_policy_online_system_identification/osi_class.py | eugval/sim2real_dynamics_simulation | 2ed175803faa38792f6becc2dc91f44ae71ed9c2 | [
"MIT"
] | 16 | 2020-07-28T14:35:44.000Z | 2021-11-28T01:50:51.000Z | sim2real-policies/sim2real_policies/sys_id/universal_policy_online_system_identification/osi_class.py | eugval/sim2real_dynamics_simulation | 2ed175803faa38792f6becc2dc91f44ae71ed9c2 | [
"MIT"
] | 1 | 2020-11-26T07:58:30.000Z | 2020-12-01T04:40:28.000Z | sim2real-policies/sim2real_policies/sys_id/universal_policy_online_system_identification/osi_class.py | eugval/sim2real_dynamics_simulation | 2ed175803faa38792f6becc2dc91f44ae71ed9c2 | [
"MIT"
] | 2 | 2020-10-18T01:38:49.000Z | 2021-12-31T10:56:41.000Z | """
System Identification (SI)
https://arxiv.org/abs/1702.02453
Examples of two types:
1. Off-line SI: in sim2real_policies.sys_id.common.utils
2. On-line SI
"""
from sim2real_policies.sys_id.common.operations import *
from sim2real_policies.sys_id.common.utils import *
from sim2real_policies.utils.rl_utils import load, load_model
from sim2real_policies.utils.choose_env import choose_env
def stack_data(traj, length):
traj = np.array(traj)
return traj[-length:, :].reshape(-1)
if __name__ == '__main__':
ENV_NAME =['SawyerReach', 'SawyerPush', 'SawyerSlide'][0]
osi = OSI(env_name = ENV_NAME, length=3, context_dim=3, Projection=False, CAT_INTERNAL=True)
osi.osi_train() | 45.567961 | 155 | 0.629914 |
4d4c6f6195152d60976c1000937ec76667e66f99 | 2,099 | py | Python | rasp_camera.py | BrianDau/doorbell_dash | 940877c5019b39639e7de0081a616d20c8b5a0fc | [
"MIT"
] | 11 | 2017-04-12T13:27:39.000Z | 2021-05-16T16:27:15.000Z | rasp_camera.py | BrianDau/doorbell_dash | 940877c5019b39639e7de0081a616d20c8b5a0fc | [
"MIT"
] | null | null | null | rasp_camera.py | BrianDau/doorbell_dash | 940877c5019b39639e7de0081a616d20c8b5a0fc | [
"MIT"
] | 1 | 2019-01-10T18:33:42.000Z | 2019-01-10T18:33:42.000Z | import picamera
from time import sleep
IMG_WIDTH = 800
IMG_HEIGHT = 600
IMAGE_DIR = "/home/pi/Desktop/"
IMG = "snap.jpg"
# https://www.raspberrypi.org/learning/tweeting-babbage/worksheet/
######################################################
# picamera default values:
######################################################
# camera.sharpness = 0
# camera.contrast = 0
# camera.brightness = 50
# camera.saturation = 0
# camera.ISO = 0
# camera.video_stabilization = False
# camera.exposure_compensation = 0
# camera.exposure_mode = 'auto'
# camera.meter_mode = 'average'
# camera.awb_mode = 'auto'
# camera.image_effect = 'none'
# camera.color_effects = None
# camera.rotation = 180
# camera.hflip = False
# camera.vflip = False
# camera.crop = (0.0, 0.0, 1.0, 1.0)
######################################################
# video will record 5 seconds
######################################################
# camera.start_recording('video.h264')
# sleep(5)
# camera.stop_recording()
######################################################
# add text to video:
######################################################
#camera.start_preview()
#camera.annotate_text = "Doorbell pressed!"
#camera.annotate_text_size = 50
#sleep(5)
#camera.capture('/home/pi/Desktop/text.jpg')
#camera.stop_preview()
######################################################
# loop over camera effects:
######################################################
#camera = picamera.PiCamera()
#camera.vflip = True
#camera.hflip = True
#camera.start_preview()
#for effect in camera.IMAGE_EFFECTS:
# camera.image_effect = effect
# camera.annotate_text = "Effect: %s" % effect
# sleep(1)
#camera.stop_preview()
| 26.910256 | 66 | 0.549786 |
4d4d0ea614818e4dfdde9e585c36b4fdaeb09ea4 | 4,720 | py | Python | catalogue_flask/model.py | ScottWales/catalogue-flask | 4a9e659875fee6e831e6c31018c9f9d7285dc845 | [
"Apache-2.0"
] | null | null | null | catalogue_flask/model.py | ScottWales/catalogue-flask | 4a9e659875fee6e831e6c31018c9f9d7285dc845 | [
"Apache-2.0"
] | null | null | null | catalogue_flask/model.py | ScottWales/catalogue-flask | 4a9e659875fee6e831e6c31018c9f9d7285dc845 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2017 ARC Centre of Excellence for Climate Systems Science
# author: Scott Wales <scott.wales@unimelb.edu.au>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from flask_sqlalchemy import SQLAlchemy
import os
from datetime import datetime
db = SQLAlchemy()
netcdf_variable_association = db.Table('netcdf_variable_association', db.Model.metadata,
db.Column('netcdf_id', db.Integer, db.ForeignKey('netcdf_content.id')),
db.Column('concretevar_id', db.Integer, db.ForeignKey('concrete_variable.id'))
)
| 31.052632 | 98 | 0.661441 |
4d4daf56e54bd88f232ffc4ff205ca0bd68de320 | 1,823 | py | Python | userbot/core/vcbot/controls.py | Rewtio/Mikoo-Userbot | 418f0017241fa65bdf7f99c84381317cb4dbeb55 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 4 | 2022-03-03T01:31:48.000Z | 2022-03-26T00:15:41.000Z | userbot/core/vcbot/controls.py | Rewtio/Mikoo-Userbot | 418f0017241fa65bdf7f99c84381317cb4dbeb55 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2022-03-16T02:54:27.000Z | 2022-03-17T09:17:12.000Z | userbot/core/vcbot/controls.py | Rewtio/Mikoo-Userbot | 418f0017241fa65bdf7f99c84381317cb4dbeb55 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2022-03-16T02:41:38.000Z | 2022-03-16T02:41:38.000Z | # Mikoo - UserBot
# Copyright (c) 2022 Mikoo-Userbot
# Credits: @divarvian || https://github.com/divarvian
#
# This file is a part of < https://github.com/divarvian/Mikoo-Userbot/ >
# t.me/MikooUserbot & t.me/MikooUserbot
from pytgcalls.types.input_stream import AudioPiped, AudioVideoPiped
from pytgcalls.types.input_stream.quality import (
HighQualityAudio,
HighQualityVideo,
LowQualityVideo,
MediumQualityVideo,
)
from userbot import LOGS, call_py
from userbot.core.vcbot.queues import QUEUE, clear_queue, get_queue, pop_an_item
| 27.208955 | 80 | 0.623149 |
4d4ffc6370941aaa5377e63ac70bab0e9216c2d5 | 5,614 | py | Python | src/jaeger/jaeger.py | Novartis/JAEGER | 9f9441d97bb956d88b73e2d24edb65322420c251 | [
"Apache-2.0"
] | 9 | 2021-12-27T15:49:43.000Z | 2022-03-29T10:17:55.000Z | src/jaeger/jaeger.py | Novartis/JAEGER | 9f9441d97bb956d88b73e2d24edb65322420c251 | [
"Apache-2.0"
] | null | null | null | src/jaeger/jaeger.py | Novartis/JAEGER | 9f9441d97bb956d88b73e2d24edb65322420c251 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2021 Novartis Institutes for BioMedical Research Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
NAME="JAEGER"
TITLE="**JAEGER**: JT-VAE Generative Modeling"
JAEGER_HOME="/path/to/models"
BASE_DIR=JAEGER_HOME+"/assays"
TRAINING_DIR=JAEGER_HOME+"/training_data"
AVAIL_MODELS=JAEGER_HOME+"/jaeger_avail_models.csv"
### JAEGER
import pandas as pd
import numpy as np
from sklearn.decomposition import PCA
import os
# --- RDKIT imports
import rdkit.Chem as Chem
import rdkit
# --- TORCH imports
import torch
# --- JTVAE imports
from jtnn import *
from jtnn.jtprop_vae import JTPropVAE
# --- TOXSQUAD imports
from toxsquad.data import modelling_data_from_csv
import sys
import os
PACKAGE_PARENT = '..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
# --- JAEGER imports
from jaeger.utils.jtvae_utils import compute_properties
from jaeger.utils.jtvae_utils import get_vocab
from jaeger.utils.jtvae_utils import get_neighbors_along_directions_tree_then_graph
from jaeger.utils.jtvae_utils import check_for_similarity
from jaeger.utils.jtvae_utils import check_for_similarity_to_collection_fp
# --- utils
import argparse
### HERE I HAVE MOSTLY STREAMLIT CACHED FUNCTIONS
#try:
import streamlit as st
#@st.cache
def load_avail_models():
avail_models_file = AVAIL_MODELS
available_models = pd.read_csv(avail_models_file, index_col='assay_id')
return available_models
#except:
# e = sys.exc_info()[0]
# print("Unexpected error")
# print(e)
| 31.539326 | 110 | 0.685073 |
4d50bed8c76e8e60cc01b8081cea63dca711f207 | 805 | py | Python | test/test_vlan_group.py | nrfta/python-netbox-client | 68ba6dd4d7306513dc1ad38f3ac59122ba4f70a8 | [
"MIT"
] | null | null | null | test/test_vlan_group.py | nrfta/python-netbox-client | 68ba6dd4d7306513dc1ad38f3ac59122ba4f70a8 | [
"MIT"
] | null | null | null | test/test_vlan_group.py | nrfta/python-netbox-client | 68ba6dd4d7306513dc1ad38f3ac59122ba4f70a8 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
NetBox API
API to access NetBox # noqa: E501
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import netbox_client
from netbox_client.models.vlan_group import VLANGroup # noqa: E501
from netbox_client.rest import ApiException
if __name__ == '__main__':
unittest.main()
| 19.634146 | 79 | 0.680745 |
4d52174dea07f449946604a9e3b3a0ae9531e302 | 436 | py | Python | build/lib/more/scikit_helper/common.py | ngupta23/more | c8d867d280c5c88be7d1ddfac37ff670a2dcaa29 | [
"MIT"
] | 6 | 2019-07-18T20:26:30.000Z | 2021-06-10T17:20:19.000Z | more/scikit_helper/common.py | ngupta23/more | c8d867d280c5c88be7d1ddfac37ff670a2dcaa29 | [
"MIT"
] | null | null | null | more/scikit_helper/common.py | ngupta23/more | c8d867d280c5c88be7d1ddfac37ff670a2dcaa29 | [
"MIT"
] | null | null | null | # For Time Logging
import time
from contextlib import contextmanager
import logging
| 22.947368 | 71 | 0.651376 |
4d52bec8cefe73d9a93266481e15f6cb3b5e3a2d | 416 | py | Python | preprocess/rename.py | pprp/faster-rcnn.Supernova | 583bc9f6efd80d5a7fa88189a1c817d92d6018a6 | [
"MIT"
] | 15 | 2019-04-19T12:40:09.000Z | 2020-06-03T07:56:37.000Z | preprocess/rename.py | Zxl19990529/faster-rcnn.Supernova | 583bc9f6efd80d5a7fa88189a1c817d92d6018a6 | [
"MIT"
] | 2 | 2019-04-19T13:21:44.000Z | 2020-06-03T07:49:31.000Z | preprocess/rename.py | Zxl19990529/faster-rcnn.Supernova | 583bc9f6efd80d5a7fa88189a1c817d92d6018a6 | [
"MIT"
] | 5 | 2019-04-19T13:06:22.000Z | 2021-01-19T03:31:58.000Z | import os
import shutil
path = './ALL/'
outpath = "./rename/"
outb = "./b/"
outc = "./c/"
for f in os.listdir(path):
print(f)
name,ext = os.path.splitext(f)
a,ext2 = name.split('_')
if ext2.endswith('b'):
print(outb+f)
shutil.copy(path+f,outb+f)
elif ext2.endswith('c'):
print(outc+f)
shutil.copy(path+f,outc+f)
print(a)
#shutil.copy(path+f,outpath+a+ext) | 21.894737 | 38 | 0.567308 |
4d54d76c9267e5c02660af45e6cfbf3a771acab1 | 970 | py | Python | FALL/core/port_scanner.py | DevanshRaghav75/FALL | 643c19db18d76b850e427c026ff54dae547e69a5 | [
"MIT"
] | 1 | 2021-06-22T10:43:12.000Z | 2021-06-22T10:43:12.000Z | FALL/core/port_scanner.py | DevanshRaghav75/FALL | 643c19db18d76b850e427c026ff54dae547e69a5 | [
"MIT"
] | 2 | 2021-06-23T01:47:59.000Z | 2021-06-25T12:07:02.000Z | FALL/core/port_scanner.py | DevanshRaghav75/FALL | 643c19db18d76b850e427c026ff54dae547e69a5 | [
"MIT"
] | null | null | null | import socket
import threading
import concurrent.futures
from colorama import Fore, Style | 31.290323 | 159 | 0.609278 |
4d54e80804f63b1496a93c52549fb653034591c2 | 3,720 | py | Python | ui/main_window.py | Iorveth/min_surfaces_modelling | 279a091e12f98c0a7fc8054c6eadaafaec5a1258 | [
"MIT"
] | null | null | null | ui/main_window.py | Iorveth/min_surfaces_modelling | 279a091e12f98c0a7fc8054c6eadaafaec5a1258 | [
"MIT"
] | null | null | null | ui/main_window.py | Iorveth/min_surfaces_modelling | 279a091e12f98c0a7fc8054c6eadaafaec5a1258 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'untitled.ui'
#
# Created by: PyQt5 UI code generator 5.12.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
| 50.958904 | 190 | 0.706452 |
4d551b7eecd229efc3cad0fcf4d4e3f7b53e292a | 2,056 | py | Python | pyecharts/charts/scatter.py | chfw/pyecharts | 51de9d6f17946a356ff8b94c62a70404e4f55266 | [
"MIT"
] | 16 | 2018-01-24T00:56:15.000Z | 2022-03-18T09:00:42.000Z | pyecharts/charts/scatter.py | chfw/bryecharts | 51de9d6f17946a356ff8b94c62a70404e4f55266 | [
"MIT"
] | null | null | null | pyecharts/charts/scatter.py | chfw/bryecharts | 51de9d6f17946a356ff8b94c62a70404e4f55266 | [
"MIT"
] | 4 | 2018-01-21T17:49:49.000Z | 2021-03-13T22:08:49.000Z | #!/usr/bin/env python
# coding=utf-8
from pyecharts.chart import Chart
from pyecharts.option import get_all_options
| 27.413333 | 70 | 0.543288 |
4d556c14bf4ff80e91105975794bd70a02997423 | 1,799 | py | Python | alice.py | Tigven/benedict | c15620e7df573ad05cd25628d7cf55f6558968b8 | [
"MIT"
] | 1 | 2019-05-08T15:33:21.000Z | 2019-05-08T15:33:21.000Z | alice.py | Tigven/benedict | c15620e7df573ad05cd25628d7cf55f6558968b8 | [
"MIT"
] | null | null | null | alice.py | Tigven/benedict | c15620e7df573ad05cd25628d7cf55f6558968b8 | [
"MIT"
] | null | null | null | import json
class AliceResponse(object):
| 24.310811 | 70 | 0.591996 |
4d5681b659a285c56598406e0b757f35ff94c449 | 869 | py | Python | pythonSEC/Utils/useapi.py | hpcc-systems/EDGAR-SEC-Filings | 1727820ffa2216ed2447bb6a265a54ef6503f001 | [
"Apache-2.0"
] | 1 | 2020-08-02T13:08:14.000Z | 2020-08-02T13:08:14.000Z | pythonSEC/Utils/useapi.py | hpcc-systems/EDGAR-SEC-Filings | 1727820ffa2216ed2447bb6a265a54ef6503f001 | [
"Apache-2.0"
] | null | null | null | pythonSEC/Utils/useapi.py | hpcc-systems/EDGAR-SEC-Filings | 1727820ffa2216ed2447bb6a265a54ef6503f001 | [
"Apache-2.0"
] | null | null | null | import yfinance as yf
import pandas as pd
import Utils
from Utils import scrape_utils
| 36.208333 | 134 | 0.698504 |
4d56ddb615e06e9e1805753500a2cd6d9b81e048 | 8,014 | py | Python | src/pymor/reductors/parabolic.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | src/pymor/reductors/parabolic.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | src/pymor/reductors/parabolic.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | # This file is part of the pyMOR project (http://www.pymor.org).
# Copyright 2013-2016 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
import numpy as np
from pymor.core.interfaces import ImmutableInterface
from pymor.core.logger import getLogger
from pymor.reductors.basic import reduce_generic_rb
from pymor.reductors.residual import reduce_residual, reduce_implicit_euler_residual
from pymor.operators.constructions import IdentityOperator
from pymor.algorithms.timestepping import ImplicitEulerTimeStepper
def reduce_parabolic(discretization, RB, product=None, coercivity_estimator=None,
disable_caching=True, extends=None):
r"""Reductor for parabolic equations.
This reductor uses :meth:`~pymor.reductors.basic.reduce_generic_rb` for the actual
RB-projection. The only addition is the assembly of an error estimator which
bounds the discrete l2-in time / energy-in space error similar to [GP05]_, [HO08]_
as follows:
.. math::
\left[ C_a^{-1}(\mu)\|e_N(\mu)\|^2 + \sum_{n=1}^{N} \Delta t\|e_n(\mu)\|^2_e \right]^{1/2}
\leq \left[ C_a^{-1}(\mu)\Delta t \sum_{n=1}^{N}\|\mathcal{R}^n(u_n(\mu), \mu)\|^2_{e,-1}
+ C_a^{-1}(\mu)\|e_0\|^2 \right]^{1/2}
Here, :math:`\|\cdot\|` denotes the norm induced by the problem's mass matrix
(e.g. the L^2-norm) and :math:`\|\cdot\|_e` is an arbitrary energy norm w.r.t.
which the space operator :math:`A(\mu)` is coercive, and :math:`C_a(\mu)` is a
lower bound for its coercivity constant. Finally, :math:`\mathcal{R}^n` denotes
the implicit Euler timestepping residual for the (fixed) time step size :math:`\Delta t`,
.. math::
\mathcal{R}^n(u_n(\mu), \mu) :=
f - M \frac{u_{n}(\mu) - u_{n-1}(\mu)}{\Delta t} - A(u_n(\mu), \mu),
where :math:`M` denotes the mass operator and :math:`f` the source term.
The dual norm of the residual is computed using the numerically stable projection
from [BEOR14]_.
.. warning::
The reduced basis `RB` is required to be orthonormal w.r.t. the given
energy product. If not, the projection of the initial values will be
computed incorrectly.
.. [GP05] M. A. Grepl, A. T. Patera, A Posteriori Error Bounds For Reduced-Basis
Approximations Of Parametrized Parabolic Partial Differential Equations,
M2AN 39(1), 157-181, 2005.
.. [HO08] B. Haasdonk, M. Ohlberger, Reduced basis method for finite volume
approximations of parametrized evolution equations,
M2AN 42(2), 277-302, 2008.
Parameters
----------
discretization
The |InstationaryDiscretization| which is to be reduced.
RB
|VectorArray| containing the reduced basis on which to project.
product
The energy inner product |Operator| w.r.t. the reduction error is estimated.
RB must be to be orthonomrmal w.r.t. this product!
coercivity_estimator
`None` or a |Parameterfunctional| returning a lower bound :math:`C_a(\mu)`
for the coercivity constant of `discretization.operator` w.r.t. `product`.
disable_caching
If `True`, caching of solutions is disabled for the reduced |Discretization|.
extends
Set by :meth:`~pymor.algorithms.greedy.greedy` to the result of the
last reduction in case the basis extension was `hierarchic` (used to prevent
re-computation of residual range basis vectors already obtained from previous
reductions).
Returns
-------
rd
The reduced |Discretization|.
rc
The reconstructor providing a `reconstruct(U)` method which reconstructs
high-dimensional solutions from solutions `U` of the reduced |Discretization|.
reduction_data
Additional data produced by the reduction process (compare the
`extends` parameter).
"""
assert extends is None or len(extends) == 3
assert isinstance(discretization.time_stepper, ImplicitEulerTimeStepper)
logger = getLogger('pymor.reductors.parabolic.reduce_parabolic')
old_residual_data = extends[2].pop('residual') if extends else None
old_initial_resdidual_data = extends[2].pop('initial_residual') if extends else None
with logger.block('RB projection ...'):
rd, rc, data = reduce_generic_rb(discretization, RB, vector_product=product,
disable_caching=disable_caching, extends=extends)
dt = discretization.T / discretization.time_stepper.nt
with logger.block('Assembling error estimator ...'):
residual, residual_reconstructor, residual_data = reduce_implicit_euler_residual(
discretization.operator, discretization.mass, dt, discretization.rhs,
RB, product=product, extends=old_residual_data
)
initial_residual, initial_residual_reconstructor, initial_residual_data = reduce_residual(
IdentityOperator(discretization.solution_space), discretization.initial_data, RB, False,
product=discretization.l2_product, extends=old_initial_resdidual_data
)
estimator = ReduceParabolicEstimator(residual, residual_data.get('residual_range_dims', None),
initial_residual, initial_residual_data.get('residual_range_dims', None),
coercivity_estimator)
rd = rd.with_(estimator=estimator)
data.update(residual=(residual, residual_reconstructor, residual_data),
initial_residual=(initial_residual, initial_residual_reconstructor, initial_residual_data))
return rd, rc, data
| 48.569697 | 114 | 0.671949 |
4d572a2dac2905d58624d731de31416194c06588 | 3,050 | py | Python | frappe/core/doctype/scheduled_job_type/test_scheduled_job_type.py | ektai/erp2Dodock | 5ad64b01cba9b07437f9a27751101258679379e8 | [
"MIT"
] | null | null | null | frappe/core/doctype/scheduled_job_type/test_scheduled_job_type.py | ektai/erp2Dodock | 5ad64b01cba9b07437f9a27751101258679379e8 | [
"MIT"
] | null | null | null | frappe/core/doctype/scheduled_job_type/test_scheduled_job_type.py | ektai/erp2Dodock | 5ad64b01cba9b07437f9a27751101258679379e8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.utils import get_datetime
from frappe.core.doctype.scheduled_job_type.scheduled_job_type import sync_jobs | 45.522388 | 138 | 0.754754 |
4d57caac42b99c0bec97612a491e63a139339081 | 1,750 | py | Python | projects/2_bike_share_data/menu_text.py | ssi112/programming-data-science-python | 6f70743a89f286f5eb4c13765098f18cf050d7a7 | [
"MIT"
] | 1 | 2020-10-20T20:34:53.000Z | 2020-10-20T20:34:53.000Z | projects/2_bike_share_data/menu_text.py | ssi112/programming-data-science-python | 6f70743a89f286f5eb4c13765098f18cf050d7a7 | [
"MIT"
] | null | null | null | projects/2_bike_share_data/menu_text.py | ssi112/programming-data-science-python | 6f70743a89f286f5eb4c13765098f18cf050d7a7 | [
"MIT"
] | null | null | null | # menu_text.py
#
# simple python menu
# https://stackoverflow.com/questions/19964603/creating-a-menu-in-python
#
city_menu = { '1': 'Chicago',
'2': 'New York',
'3': 'Washington',
'x': 'Exit'}
month_menu = {'0': 'All',
'1': 'January',
'2': 'February',
'3': 'March',
'4': 'April',
'5': 'May',
'6': 'June',
'x': 'Exit'}
weekday_menu = {'0': 'All',
'1': 'Monday',
'2': 'Tuesday',
'3': 'Wednesday',
'4': 'Thursday',
'5': 'Friday',
'6': 'Saturday',
'7': 'Sunday',
'x': 'Exit'}
if __name__ == "__main__":
main()
| 26.515152 | 72 | 0.489714 |
4d5862aa5b9a7ab6497cf927fcc9eb557781d081 | 213 | py | Python | src/books/api.py | lukecyx/lms | eff57c4026e63cd7dd7ea5ac1f061fa0a9ec1a4c | [
"MIT"
] | null | null | null | src/books/api.py | lukecyx/lms | eff57c4026e63cd7dd7ea5ac1f061fa0a9ec1a4c | [
"MIT"
] | null | null | null | src/books/api.py | lukecyx/lms | eff57c4026e63cd7dd7ea5ac1f061fa0a9ec1a4c | [
"MIT"
] | null | null | null | from src.books.models import Book
from src.books.schema import BookOut
from ninja import Router
router = Router()
| 17.75 | 40 | 0.746479 |
4d5afb3a4a38cc784e6e3fd2bab719ca1b2bdd90 | 6,545 | py | Python | loldib/getratings/models/NA/na_khazix/na_khazix_sup.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_khazix/na_khazix_sup.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_khazix/na_khazix_sup.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | from getratings.models.ratings import Ratings
| 15.695444 | 46 | 0.766692 |
4d5be74386b958660be296261cac18d84c28239e | 297 | py | Python | tests/data/expected/main/main_jsonschema_ids/id.py | roadsync/datamodel-code-generator | c22ea471d46300771c15cfaf21864cbe23bfbba4 | [
"MIT"
] | null | null | null | tests/data/expected/main/main_jsonschema_ids/id.py | roadsync/datamodel-code-generator | c22ea471d46300771c15cfaf21864cbe23bfbba4 | [
"MIT"
] | null | null | null | tests/data/expected/main/main_jsonschema_ids/id.py | roadsync/datamodel-code-generator | c22ea471d46300771c15cfaf21864cbe23bfbba4 | [
"MIT"
] | null | null | null | # generated by datamodel-codegen:
# filename: Organization.schema.json
# timestamp: 1985-10-26T08:21:00+00:00
from __future__ import annotations
from pydantic import BaseModel, Field
| 24.75 | 79 | 0.750842 |
4d5d0cd3a4c4b3b0b3187dfacf67f623810fd0f8 | 776 | py | Python | tablo/migrations/0006_alter_geom_col_type.py | consbio/tablo | 8d70bd3fe3449c3298ad69cc7c85bb2d8e83fad8 | [
"BSD-3-Clause"
] | 4 | 2017-04-26T10:30:56.000Z | 2019-06-13T03:20:44.000Z | tablo/migrations/0006_alter_geom_col_type.py | nrdsdata/tablo | 628bcde8071229485438def01ff3b0da0ef9e502 | [
"BSD-3-Clause"
] | 23 | 2016-07-18T21:40:29.000Z | 2019-06-04T20:08:57.000Z | tablo/migrations/0006_alter_geom_col_type.py | nrdsdata/tablo | 628bcde8071229485438def01ff3b0da0ef9e502 | [
"BSD-3-Clause"
] | 3 | 2016-11-21T18:47:33.000Z | 2019-05-15T23:29:56.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from tablo import GEOM_FIELD_NAME, WEB_MERCATOR_SRID
forward_sql = """
DO
$$
DECLARE
table_name name;
BEGIN
FOR table_name IN
SELECT tablo_featureservicelayer.table FROM tablo_featureservicelayer
LOOP
EXECUTE format('ALTER TABLE %I ALTER COLUMN {geom_col} TYPE geometry(''GEOMETRY'', {srid});', table_name);
END LOOP;
END;
$$
LANGUAGE plpgsql;
""".format(geom_col=GEOM_FIELD_NAME, srid=WEB_MERCATOR_SRID)
| 23.515152 | 118 | 0.657216 |
4d5d0e78c95a6644e5c54d7d327b95b2ebfd2692 | 23,755 | py | Python | tests/test_atise.py | tkg-framework/TKG-framework | 98586b7199bda0e96d74b2ea02c62226901822cc | [
"MIT",
"Unlicense"
] | null | null | null | tests/test_atise.py | tkg-framework/TKG-framework | 98586b7199bda0e96d74b2ea02c62226901822cc | [
"MIT",
"Unlicense"
] | null | null | null | tests/test_atise.py | tkg-framework/TKG-framework | 98586b7199bda0e96d74b2ea02c62226901822cc | [
"MIT",
"Unlicense"
] | null | null | null | import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.functional as F
from torch.nn.init import xavier_normal_
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(BASE_DIR)
import numpy as np
from numpy.random import RandomState
from collections import defaultdict
import time
from tkge.data.dataset import SplitDataset
from tkge.data.custom_dataset import ICEWS14AtiseDatasetProcessor
from tkge.eval.metrics import Evaluation
from tkge.train.sampling import NonNegativeSampler
from Dataset import KnowledgeGraph
randseed = 9999
np.random.seed(randseed)
torch.manual_seed(randseed)
model_path = "/home/gengyuan/workspace/baseline/ATISE/icews14/ATISE/timediscrete0/dim500/lr0.0000/neg_num10/3day/gamma120/cmin0.0030/params.pkl"
model = ATISE(7129, 460, 500, 64, 0, 120, 0.003, 0.3, True)
model_state_dict = torch.load(model_path)
model.load_state_dict(model_state_dict)
if __name__ == '__main__':
test()
| 39.52579 | 144 | 0.540097 |
4d5dd414ac863f8b7e031913598446b42eb6a1d4 | 1,265 | py | Python | tests/security_test.py | smarkets/bravado-core | 5d9c7eff417118462893c08accc40e27b066e2f6 | [
"BSD-3-Clause"
] | null | null | null | tests/security_test.py | smarkets/bravado-core | 5d9c7eff417118462893c08accc40e27b066e2f6 | [
"BSD-3-Clause"
] | null | null | null | tests/security_test.py | smarkets/bravado-core | 5d9c7eff417118462893c08accc40e27b066e2f6 | [
"BSD-3-Clause"
] | 1 | 2022-03-26T12:14:35.000Z | 2022-03-26T12:14:35.000Z | # -*- coding: utf-8 -*-
import pytest
from six import iteritems
| 39.53125 | 88 | 0.679842 |
4d6009ffc4ef0b627d157000658c61a9b68d4726 | 3,555 | py | Python | training/data_splits.py | ethansaxenian/RosettaDecode | 8ea1a42a5f792280b50193ad47545d14ee371fb7 | [
"MIT"
] | null | null | null | training/data_splits.py | ethansaxenian/RosettaDecode | 8ea1a42a5f792280b50193ad47545d14ee371fb7 | [
"MIT"
] | null | null | null | training/data_splits.py | ethansaxenian/RosettaDecode | 8ea1a42a5f792280b50193ad47545d14ee371fb7 | [
"MIT"
] | null | null | null | import json
from typing import Union, Optional, Tuple, List
import numpy as np
from sklearn.feature_extraction import DictVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer, TfidfTransformer
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from shared import LANG_TO_INT
| 41.337209 | 211 | 0.663854 |
4d6063eae63c8ba714bea0a997b134856f495196 | 31,868 | py | Python | GAPP.py | timhartley/GAPP | ddd3b3c29b50bc5a9eb3d7f84403e425072145bb | [
"Apache-2.0"
] | null | null | null | GAPP.py | timhartley/GAPP | ddd3b3c29b50bc5a9eb3d7f84403e425072145bb | [
"Apache-2.0"
] | null | null | null | GAPP.py | timhartley/GAPP | ddd3b3c29b50bc5a9eb3d7f84403e425072145bb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#Sorts based on top 50 CMetric, all callPaths - CMetric
#, all call paths - call path count and all samples
from __future__ import print_function
from bcc import BPF, PerfType, PerfSWConfig
from bcc import BPF
import sys
import ctypes as ct # For mapping the 'C' structure to Python
import argparse #For parsing command line arguments
import datetime
import os
import operator
import subprocess
import re
# arg validation
parser = argparse.ArgumentParser(description="Generates stack traces for critical code sections")
parser.add_argument("-x", metavar="<Path to executable>", dest = "targetPath", required = True, help = "Full path to the executable file to be profiled - Required")
parser.add_argument("-t", metavar="<Threshold>", dest = "threshold", type = positive_int, required = False, help = "Number active threads to trigger stack trace. Default = total no. of threads/2" )
parser.add_argument("-f", metavar="<Sampling Frequency>", dest = "sample_freq", type = positive_int, required = False, help = "Sampling frequency in Hz. Default = 333Hz (equivalent to 3 ms)" )
parser.add_argument("-d", metavar="<Stack Depth>", dest = "stack_depth", type = positive_int, required = False, help = "Maximum Stack depth for stack unwinding. Default = 10" )
parser.add_argument("-b", metavar="<Ring buffer Size>", dest = "buffer", type = positive_int, required = False, help = "Number of pages to be allocated for the ring buffer, Default = 64" )
parser.add_argument("--threads_only", help = "Trace threads alone", action = "store_true")
parser.add_argument("--process_only", help = "Trace processes alone", action = "store_true")
parser.add_argument("--trace_lib", help = "Include library paths in tracing", action = "store_true")
parser.add_argument("--kernel_stack", help = "Get kernel stack traces", action = "store_true")
args = parser.parse_args()
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <uapi/linux/bpf_perf_event.h>
#include <linux/sched.h>
#include <linux/types.h>
//Structure to pass information from the kernel probe to the user probe
struct key_t {
u32 tid; //Thread ID
u32 tgid; // Parent thread ID
u64 cm; //CMetric
int source; // 0 - sampling, 1 - critical time slice, 2 - non-critical time slice
int user_stackid;
int kernel_stackid;
u64 inst_ptr;
int store_stackTop;
};
BPF_HASH(threadList, u32, u32); //Stores threadIds of participating threads - Global
BPF_HASH(threadCount, u32, u32, 1); //Stores number of active threads - Global
BPF_HASH(tsp, u32, u64, 1); //Stores timestamp of previous event
BPF_ARRAY(count, u32, 1); //Stores the total thread count (parent not included)
BPF_HASH(global_CM, u32, u64, 1); //Keeps track of cumulative sum of CMetric - Global
BPF_PERCPU_ARRAY(local_CM, u64, 1); // To store the snapshot of global_CM when a thread is switched in
BPF_HASH(CM_hash, u32, u64); // Criticality Metric hash map for each thread
BPF_HASH(GLOBAL_WT_TC, u32, u64,1); //Stores the cumulative sum of weighted thread Count - Global
BPF_PERCPU_ARRAY(LOCAL_WT_TC, u64,1); //Stores the snapshot of GLOBAL_WT_TC - CPU Local
BPF_PERCPU_ARRAY(inTS, u64, 1); //Store the time at which a thread was switched in - CPU Local
BPF_PERF_OUTPUT(events); //Buffer to write event details
BPF_STACK_TRACE(user_stacktraces, 4086);
BPF_STACK_TRACE(kernel_stacktraces, 4086);
/*sched_switch_args {
// from /sys/kernel/debug/tracing/events/sched/sched_switch/format
u64 __unused__;
char prev_comm[16];
pid_t prev_pid;
int prev_prio;
long prev_state;
char next_comm[16];
pid_t next_pid;
int next_prio;
};
*/
TRACEPOINT_PROBE(task, task_rename){
u32 threadId, totalCount;
char comm[16];
u32 zero32 = 0, one = 1;
int len = bpf_probe_read_str(&comm, sizeof(args->newcomm), args->newcomm);
if(!len)
return 0;
//Compare the command argument with traced command
if(PGM_FILTER){
bpf_probe_read(&threadId, sizeof(threadId), &args->pid);
threadList.insert(&threadId, &zero32); //Store the thread ID in the hash startTracing.lookup_or_init(&threadId, &zero32);
u32 *countVal = count.lookup_or_init(&zero32, &zero32);
lock_xadd(countVal,1);
}
return 0;
}
TASK_NEWTASK
int do_perf_event(struct bpf_perf_event_data *ctx){
u32 zero32 = 0;
u32 threadId = bpf_get_current_pid_tgid();
u32 *val = threadList.lookup(&threadId);
if(!val)
return 0;
u32 *activeCount = threadCount.lookup(&zero32);
if(!activeCount)
{return 0;}
u32 tempCount;
bpf_probe_read(&tempCount, sizeof(tempCount), activeCount);
u32 *totalThreadCount = count.lookup(&zero32);
if(!totalThreadCount)
return 0;
u32 totalCount;
bpf_probe_read(&totalCount, sizeof(totalCount), totalThreadCount);
if( (tempCount <= STACK_FILTER) || tempCount ==1 ){
struct key_t key = {};
key.tid = bpf_get_current_pid_tgid();
key.tgid = bpf_get_current_pid_tgid()>>32;
key.cm = 0;
key.source = 0;
if(TRACE_THREADS_ONLY){
key.inst_ptr = PT_REGS_IP(&ctx->regs); //Get the instruction pointer
events.perf_submit(ctx, &key, sizeof(key)); //Write details to the ring buffer
}
}
return 0;
}
TRACEPOINT_PROBE(sched, sched_process_exit){
u32 zero32 = 0;
//Get the current tid
u32 threadId;
bpf_probe_read(&threadId, sizeof(threadId), &args->pid);
//Check if the thread ID belongs to the application
u32 *val = threadList.lookup(&threadId);
if(!val)
return 0;
//Decrement the number of threads
u32 *countVal = count.lookup(&zero32);
if(!countVal)
return 0;
//lock_xadd(countVal, -1);
countVal -= 1;
return 0;
}
TRACEPOINT_PROBE(sched, sched_wakeup){
u32 targetID, zero32 = 0, status, one32 = 1;
//Check if thread being woken up belongs to the application
bpf_probe_read(&targetID, sizeof(targetID), &args->pid);
u32 *list = threadList.lookup(&targetID);
if (!list)
return 0;
/////////////////////////////////////////////////////////////////////
if(args->success){ //If waking was successful
u32 *activeCount = threadCount.lookup(&zero32);
if(!activeCount)
{return 0;}
u32 prev_tCount; //Local variable to store thread count
bpf_probe_read(&prev_tCount, sizeof(prev_tCount), activeCount);
//Increment thread count if thread was inactive
bpf_probe_read(&status, sizeof(status), list);
if(status == 0)
lock_xadd(activeCount,1);
//Set thread as active
threadList.update(&targetID,&one32);
}
return 0;
}
//Tracepoint probe for the Sched_Switch tracepoint
TRACEPOINT_PROBE(sched, sched_switch){
u32 one32=1, arrayKey=0, zero32=0;
u32 *listVal, *listVal1; //Pointers to entries in threadList map
u32 next_pid, prev_pid;
u64 zero64 = 0;
//Copy data to BPF stack
bpf_probe_read(&next_pid, sizeof(next_pid), &args->next_pid);
bpf_probe_read(&prev_pid, sizeof(prev_pid), &args->prev_pid);
//Look up thread ids in the list created by sys_clone()
listVal1 = threadList.lookup(&next_pid);
listVal = threadList.lookup(&prev_pid);
u32 prev=0, next=0;
if(listVal){
bpf_probe_read(&prev, sizeof(prev),listVal);
prev = 1;
}
if(listVal1){
bpf_probe_read(&next, sizeof(next),listVal1);
next = 1;
}
//Return if the switching threads do not belong to the application
if( !prev && !next)
return 0;
//////////////////////////////////////////////////////////////////////
//Calculate values common for all switching events
u64 interval, intervalCM;
u64 *oldTS = tsp.lookup_or_init(&arrayKey, &zero64);
if(!oldTS)
{return 0;}
u64 tempTS;
bpf_probe_read(&tempTS, sizeof(tempTS), oldTS); //Copy Old time from bpf map to local variable
u64 newTS = bpf_ktime_get_ns();
tsp.update(&arrayKey, &newTS); //Update time stamp
//The thread count is initialized to one as the first switch in event is always missed.
u32 *ptr_threadCount = threadCount.lookup_or_init(&arrayKey, &one32);
if(!ptr_threadCount)
{return 0;}
int prev_tc; //Temporary variable to store thread count for the previous switching interval
bpf_probe_read(&prev_tc, sizeof(prev_tc),ptr_threadCount);
if(newTS < tempTS)//Very rarely, event probes are triggered out of order, which are ignored
return 0;
if(tempTS==0 || prev_tc==0){ //If first event or no active threads in during the previous interval, prev interval = 0
interval = 0;
}
else
interval = (newTS - tempTS); //Switching interval
u64 *ptr_globalCM = global_CM.lookup_or_init(&arrayKey, &zero64);
if(!ptr_globalCM)
return 0;
//Calculate the CMetric for previous interval and add it to global_CM
if (interval != 0){
intervalCM = interval/prev_tc;
lock_xadd(ptr_globalCM, intervalCM);
}
//Calculate weighted thread count for previous interval
u64 wt_threadCount = (interval) * prev_tc;
u64 *g_wt_threadCount = GLOBAL_WT_TC.lookup_or_init(&arrayKey, &zero64);
if(!g_wt_threadCount)
return 0;
lock_xadd(g_wt_threadCount, wt_threadCount); //Add to global weighted thread count
//////////////////////////////////////////////////////////////////////
//If previous thread was a peer thread
if(prev){
//Decrement active thread count only if thread switched out is not in RUNNING (0) state
if(args->prev_state != TASK_RUNNING){
if(prev_tc > 0 ){
lock_xadd(ptr_threadCount, -1);
}
//Mark the thread as inactive in the threadList hash map
threadList.update(&prev_pid,&zero32);
}
else
//Mark the thread as active as thread is switched out to TASK_RUNNING state
threadList.update(&prev_pid,&one32);
u64 temp;
//Get updated CM
bpf_probe_read(&temp, sizeof(temp),ptr_globalCM);
//Get snapshot of global_CM which was stored in local_CM when prev_pid was switched in
u64 *cpuCM = local_CM.lookup_or_init(&arrayKey, &zero64);
if(!cpuCM)
{return 0;}
//Update the CM of the thread by adding the CM for the time slice
u64 updateCM = temp - (*cpuCM);
u64 *tCM = CM_hash.lookup_or_init(&prev_pid, &zero64);
if(!tCM)
{return 0;}
*tCM = *tCM + updateCM;
//Get LOCAL_WT_TC, the thread's weighted threadCount at the time it was switched in.
u64 *t_wt_threadCount;
t_wt_threadCount = LOCAL_WT_TC.lookup_or_init(&arrayKey, &zero64);
if(!t_wt_threadCount)
{return 0;}
u64 temp_g_wt_threadCount, temp_t_wt_threadCount;
bpf_probe_read(&temp_g_wt_threadCount, sizeof(temp_g_wt_threadCount), g_wt_threadCount);
bpf_probe_read(&temp_t_wt_threadCount, sizeof(temp_t_wt_threadCount), t_wt_threadCount);
//Reset the per-CPU CMetric counter
local_CM.update(&arrayKey, &zero64);
//Reset local weighted ThreadCount counter
LOCAL_WT_TC.update(&arrayKey, &zero64);
//Get time when this thread was switched in
oldTS = inTS.lookup_or_init(&arrayKey, &zero64);
if(!oldTS)
return 0;
u64 switch_in_time, timeSlice;
bpf_probe_read(&switch_in_time, sizeof(switch_in_time), oldTS);
timeSlice = (newTS - switch_in_time);
//Reset switch in time
inTS.update(&arrayKey, &zero64);
u32 *totalThreadCount = count.lookup(&zero32);
if(!totalThreadCount)
return 0;
u32 totalCount;
bpf_probe_read(&totalCount, sizeof(totalCount), totalThreadCount);
//Calculate the average number of threads
u32 ratio = (temp_g_wt_threadCount - temp_t_wt_threadCount) / timeSlice;
struct key_t key = {};
key.tid = prev_pid;
key.tgid = bpf_get_current_pid_tgid()>>32;
key.cm = updateCM;
if( (ratio <= STACK_FILTER || ratio == 1) && TRACE_THREADS_ONLY){ //If thread_avg < threshold and not parent thread
key.user_stackid = user_stacktraces.get_stackid(args, BPF_F_USER_STACK);
if (GET_KERNEL_STACK && args->prev_state != TASK_RUNNING)
key.kernel_stackid= kernel_stacktraces.get_stackid(args, 0);
else
key.kernel_stackid = -1;
key.source = 1;
}
else{
key.user_stackid = 0;
key.source = 2;
}
key.store_stackTop = ((prev_tc <= STACK_FILTER) || prev_tc == 1)? 1:0;
if(TRACE_THREADS_ONLY)
events.perf_submit(args, &key, sizeof(key));
}
//Next thread is a peer thread
if(next){
//Get the previous state of this thread from the THREADLIST
u32 tempNext;
bpf_probe_read(&tempNext, sizeof(tempNext), listVal1);
//If the thread was not in TASK_RUNNING state
if(tempNext == 0){
lock_xadd(ptr_threadCount, 1); //Increment the number of active threads
}
threadList.update(&next_pid, &one32); //Set the thread status to RUNNING state
u64 temp;
//Get updated CM and store it to the CPU counter
bpf_probe_read(&temp, sizeof(temp),ptr_globalCM);
local_CM.update(&arrayKey,&temp);
//Store switch in time
inTS.update(&arrayKey, &newTS);
//Store the local cumulative weighted thread count
u64 temp_g_wt_threadCount;
bpf_probe_read(&temp_g_wt_threadCount, sizeof(temp_g_wt_threadCount), g_wt_threadCount);
LOCAL_WT_TC.update(&arrayKey, &temp_g_wt_threadCount);
}
return 0;
}
"""
task_newtask_pgm = """TRACEPOINT_PROBE(task, task_newtask){
u32 zero32=0;
char comm[TASK_COMM_LEN];
bpf_get_current_comm(&comm, sizeof(comm));
//We can also check for the parent id in the threadlist
//But if the parent was created before starting tracing this can fail
//So we check the command line instead
//If application is being traced
if(PGM_FILTER){
u32 threadId;
bpf_probe_read(&threadId, sizeof(threadId), &args->pid);
u32 *val = threadList.lookup_or_init(&threadId, &zero32); //Store the thread ID in the hash
u32 *countVal = count.lookup_or_init(&zero32, &zero32);
lock_xadd(countVal,1);
}
return 0;
}"""
#Path to executable
targetPath = ""
#Executable name
pgmName = ""
#Segments for customizing the filters
task_newtask_probe = task_newtask_pgm
trace_threads_only = '1'
get_kernel_stack = '0'
if args.threads_only:
trace_threads_only = 'key.tgid != key.tid'
if args.process_only:
task_newtask_probe = ''
if args.kernel_stack:
get_kernel_stack = '1'
#Get the path to target
if args.targetPath is not None:
targetPath = args.targetPath.rstrip(os.sep)
pgmName = os.path.basename(targetPath)
if pgmName is not None:
pgm_filter = 'comm[0]==\'%c\' && comm[1]==\'%c\' && comm[2]==\'%c\' && comm[3]==\'%c\'' % (pgmName[0],pgmName[1], pgmName[2], pgmName[3])
if args.threshold is not None:
stack_filter = '%d' % ( (args.threshold) )
else:
stack_filter = 'totalCount/2'
if args.sample_freq is not None:
freq = args.sample_freq
else:
freq = 333
if args.stack_depth is not None:
depth = args.stack_depth
else:
depth = 10
if args.buffer is not None:
buffer_size = args.buffer
else:
buffer_size = 64
bpf_text = bpf_text.replace('TASK_NEWTASK', task_newtask_probe)
bpf_text = bpf_text.replace('PGM_FILTER', pgm_filter)
bpf_text = bpf_text.replace('STACK_FILTER', stack_filter)
bpf_text = bpf_text.replace('TRACE_THREADS_ONLY', trace_threads_only)
bpf_text = bpf_text.replace('GET_KERNEL_STACK', get_kernel_stack)
#Print the customized program
#print(bpf_text)
print ("\n\n---Press Ctrl-C to start post processing---")
# load BPF program
b = BPF(text=bpf_text)
b.attach_perf_event(ev_type=PerfType.SOFTWARE,
ev_config=PerfSWConfig.CPU_CLOCK, fn_name="do_perf_event",
sample_freq=freq)
user_stack_traces = b["user_stacktraces"]
kernel_stack_traces = b["kernel_stacktraces"]
sampleAddr = dict() #Stores addresses corresponding to samples
CMetric = dict() #Dictionary to store CMetric
CM_Entry = 1 #Number of CMetric entry
CMetric_sampleAddr = dict() # Stores the sample address for each Cmetric - to get line of code
CMetric_callPath = dict() # Stores the call path for each CMetric
user_symbolMap = dict() #Store symbols corresponding addresses
kernel_symbolMap = dict()
total_switch = 0
noSample = 0
###############################################
#Function to trim the symbols of arguments
################################################
################################################
#Function to execute for each event written to the ring buffer
b["events"].open_perf_buffer(print_event, page_cnt=buffer_size)
#To print criticality metric of each thread
threadCM = b.get_table("CM_hash")
sum = 0;
criticalSwitch = dict()
criticalSwitch_allCM= dict()
criticalLine = dict()
critLineSamples = dict()
critLineSamples_all = dict()
critKernelPaths = dict()
allFunction = dict()
allLines = dict()
addrMap_fun = dict()
addrMap_line= dict()
try:
while 1:
b.kprobe_poll()
finally:
#Post Processing the stack traces
start = datetime.datetime.now()
print("Criticality Metric for each thread");
for k, v in sorted(threadCM.items(), key=lambda x:x[1].value):
print("%10u %u " % ((k.value), (v.value)))
sum += v.value
print ("Sum = %d" % sum)
print ("***************************************************")
#for key, value in sorted(CMetric.items(), key=lambda x:x[1], reverse= True): # key is CM_Entry, value is CMetric
for key, value in CMetric.items(): # key is CM_Entry, value is CMetric
user_callPath = CMetric_callPath[key][0]
kernel_callPath = CMetric_callPath[key][1]
#Combine all call paths irrespective of CMetric value and then sort as per CMetric value
if user_callPath in criticalSwitch_allCM:
criticalSwitch_allCM[user_callPath][0] += value
criticalSwitch_allCM[user_callPath][1] += 1
else:
criticalSwitch_allCM[user_callPath] = [value,1]
#Combine the sample addresses
if user_callPath not in critLineSamples_all:
critLineSamples_all[user_callPath] = dict()
lineDict = critLineSamples_all[user_callPath]
addrList = CMetric_sampleAddr[key]
for element in addrList:
if element in lineDict:
lineDict[element] += 1
else:
lineDict[element] = 1
#Combine kernel call paths
if user_callPath not in critKernelPaths:
critKernelPaths[user_callPath] = dict()
allKernelPaths = critKernelPaths[user_callPath]
if kernel_callPath in allKernelPaths:
allKernelPaths[kernel_callPath] += 1
else:
allKernelPaths[kernel_callPath] = 1
user_callPath = ""
kernel_callPath = ""
print ("Critical Call Paths, functions and Lines of Code:")
choose_path(criticalSwitch_allCM, 1)
end = datetime.datetime.now()
post_time = end - start
print ("Post Processing time in milli seconds: %u" % int(post_time.total_seconds() * 1000))
print ("Total switches: %u Critical switches: %u" % (total_switch, CM_Entry ))
print ("Stack trace with no samples: %u" % noSample)
print ("***************************************************")
sys.exit()
| 34.414687 | 197 | 0.614096 |
4d610d4a0f33ed82366186a1ab547fe19c1f9cea | 4,484 | py | Python | data/activitynet_feature_cuhk/data_process.py | NEUdeep/BSN | e987cc159976ebe54027b562d833a92a5aadf864 | [
"MIT"
] | 1 | 2021-04-21T08:09:59.000Z | 2021-04-21T08:09:59.000Z | data/activitynet_feature_cuhk/data_process.py | NEUdeep/BSN | e987cc159976ebe54027b562d833a92a5aadf864 | [
"MIT"
] | null | null | null | data/activitynet_feature_cuhk/data_process.py | NEUdeep/BSN | e987cc159976ebe54027b562d833a92a5aadf864 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import random
import numpy as np
import scipy
import pandas as pd
import pandas
import numpy
import json
videoDict=getDatasetDict()
videoNameList=videoDict.keys()
random.shuffle(videoNameList)
col_names=[]
for i in range(400):
col_names.append("f"+str(i))
for videoName in videoNameList:
videoAnno=videoDict[videoName]
data=readData(videoName)
numFrame=videoAnno['duration_frame']
featureFrame=len(data)*16
videoAnno["feature_frame"]=featureFrame
videoDict[videoName]=videoAnno
print(numFrame,featureFrame)
videoFeature_mean=poolData(data,videoAnno,num_prop=100,num_bin=1,num_sample_bin=3,pool_type="mean")
outDf=pd.DataFrame(videoFeature_mean,columns=col_names)
outDf.to_csv("./csv_mean_100/"+videoName+".csv",index=False)
outfile=open("./anet_anno_anet.json","w")
json.dump(videoDict,outfile)
outfile.close() | 32.970588 | 103 | 0.674398 |
4d616609d3a6fbee45f8cfdcce708b41d046091c | 1,622 | py | Python | examples/cloud-test.py | Ferris-Labs/ferris-cli | 6a34706ab9163471bf2e03f31ba4018143f26d51 | [
"Apache-2.0"
] | null | null | null | examples/cloud-test.py | Ferris-Labs/ferris-cli | 6a34706ab9163471bf2e03f31ba4018143f26d51 | [
"Apache-2.0"
] | 1 | 2020-12-15T13:18:40.000Z | 2020-12-15T13:18:40.000Z | examples/cloud-test.py | Integration-Alpha/ferris-cli | 6a34706ab9163471bf2e03f31ba4018143f26d51 | [
"Apache-2.0"
] | null | null | null | from cloudevents.sdk.event import v03
import json
from ferris_cli.ferris_cli import CloudEventsAPI
import uuid
import os
import consul
from ferris_cli.ferris_cli import ApplicationConfigurator
from datetime import datetime
platform_environment = ApplicationConfigurator().get('ferris.env')
broker = f"kafka://{platform_environment['KAFKA_BOOTSTRAP_SERVER']}:{platform_environment['KAFKA_PORT']}"
dateTimeObj = datetime.now()
timestampStr = dateTimeObj.strftime("%Y-%m-%dT%H:%M:%SZ")
print(timestampStr)
send_direct_loading_event('/landing/zone/abc')
send_confirmation_event('/landing/zone/abc')
| 28.45614 | 106 | 0.7127 |
4d65330a040ed957f51a1f95089f84709aa653c7 | 2,355 | py | Python | REL_dashboarSERVIDORES.py | tiagotouso/TALENTOS_HUMANOS | c391f7d7a331d5f8b186b27af6a9b61448620cc6 | [
"MIT"
] | null | null | null | REL_dashboarSERVIDORES.py | tiagotouso/TALENTOS_HUMANOS | c391f7d7a331d5f8b186b27af6a9b61448620cc6 | [
"MIT"
] | null | null | null | REL_dashboarSERVIDORES.py | tiagotouso/TALENTOS_HUMANOS | c391f7d7a331d5f8b186b27af6a9b61448620cc6 | [
"MIT"
] | null | null | null | '''
GERADOR DE RELATRIO DOS SERVIDORES (DASHBOARD SERVIDORES)
'''
from SQL import sqlpandas
from MENSAGEM import mensagemInformacao, mensagemErro
from AUXILIAR import salvarPandas
def dashboardServidores():
'''
FUNO PARA CRIAR OS DASHDOARD
ENTRA
ENTRA NULL
SAI
PLANILHA COM OS DADOS PARA DASHBOARD
'''
sql = '''SELECT
GR_MATRICULA AS SIAPE,
IT_NO_SERVIDOR AS SERVIDOR,
IDADE,
IT_CO_SEXO AS SEXO,
DES_TITULACAO AS TITULAO,
DES_ETNIA AS ETNIA,
DES_REGIME_JURIDICO AS 'REG JUR',
IT_CO_JORNADA_TRABALHO as 'CARGA HORRIA',
DES_CARREIRA AS CARREIRA,
DES_CARGO AS CARGO,
DES_GRUPO AS GRUPO,
DES_UPAG AS UPAG
FROM tb_ser_rel
where
IT_DA_OCOR_EXCLUSAO_SERV is null
and IT_DA_OCOR_INATIVIDADE_SERV is null
and DES_CARREIRA in ('TCN', 'PROF 2', 'PROF 3');'''
dados = sqlpandas(sql)
if len(dados) > 0:
dados['IDADE'] = dados['IDADE'].apply(faixa)
dados['TITULAO'] = dados['TITULAO'].replace(['10 DOUTORADO', '08 ESPECIALIZAO', '09 MESTRADO', '06 MEDIO',
'04 FUNDAMENTAL I', '05 FUNDAMENTAL', '07 SUPERIOR',
'07 ENSINO SUPERIOR', '10 PHD', '07 SUPERIOR-INCOMPLETO'],
['DOUTORADO', 'ESPECIALIZAO', 'MESTRADO', 'ENSINO MDIO',
'ENSINO FUNDAMENTAL', 'ENSINO FUNDAMENTAL', 'ENSINO SUPERIOR',
'ENSINO SUPERIOR', 'DOUTORADO', 'ENSINO MDIO'])
dados['TOTAL'] = 1
if len(dados) > 0:
salvarPandas(dados, 'DAHSBOARD - SERVIDORES')
mensagemInformacao('Relatrio DAHSBOARD - SERVIDORES criado com sucesso.')
else:
mensagemErro('Relatrio DAHSBOARD - SERVIDORES no foi criado.')
| 32.260274 | 120 | 0.525265 |
4d656673d216ce0be4fe64d21204d4348b38598e | 60 | py | Python | pyroombaadapter/__init__.py | ymollard/PyRoombaAdapter | a4b63e9b97ac2e27a8b472f596a1111eb3c254b9 | [
"MIT"
] | null | null | null | pyroombaadapter/__init__.py | ymollard/PyRoombaAdapter | a4b63e9b97ac2e27a8b472f596a1111eb3c254b9 | [
"MIT"
] | null | null | null | pyroombaadapter/__init__.py | ymollard/PyRoombaAdapter | a4b63e9b97ac2e27a8b472f596a1111eb3c254b9 | [
"MIT"
] | null | null | null | from pyroombaadapter.pyroombaadapter import PyRoombaAdapter
| 30 | 59 | 0.916667 |
4d66d1254740f9bb3086a9048334ac1404d1dea1 | 2,189 | py | Python | torchkit/head/localfc/curricularface.py | sarvex/TFace | b3d8a1392816e0d941425c30ad843d185e286431 | [
"PSF-2.0"
] | 764 | 2021-05-26T15:40:25.000Z | 2022-03-30T03:26:31.000Z | torchkit/head/localfc/curricularface.py | Charlee-du/TFace | 490cf90a1f042b86d7d03042f26d0a7cf6b1f0c0 | [
"PSF-2.0"
] | 45 | 2021-06-07T12:57:19.000Z | 2022-03-25T16:04:54.000Z | torchkit/head/localfc/curricularface.py | Charlee-du/TFace | 490cf90a1f042b86d7d03042f26d0a7cf6b1f0c0 | [
"PSF-2.0"
] | 139 | 2021-06-04T09:25:21.000Z | 2022-03-31T22:49:23.000Z | from __future__ import print_function
from __future__ import division
import torch
import torch.nn as nn
from torch.nn import Parameter
import math
from torchkit.util.utils import l2_norm
from torchkit.head.localfc.common import calc_logits
| 37.741379 | 108 | 0.634536 |
4d69852609dac731ffb1bbf364bfde4e5a34166e | 390 | py | Python | lmsapi/api_cash/views.py | orkasolutions-develop/lms-api-new | c9e5a2d98874a4a7415002397048258526a94644 | [
"MIT"
] | null | null | null | lmsapi/api_cash/views.py | orkasolutions-develop/lms-api-new | c9e5a2d98874a4a7415002397048258526a94644 | [
"MIT"
] | null | null | null | lmsapi/api_cash/views.py | orkasolutions-develop/lms-api-new | c9e5a2d98874a4a7415002397048258526a94644 | [
"MIT"
] | null | null | null | from core.views import BaseViewSet
from .models import Cash
from .serializers import CashSerializer
| 27.857143 | 66 | 0.717949 |
4d6a8be82f4b05e6fd5305e91364bc1db446eb68 | 3,730 | py | Python | front_end/migration_scripts/1_to_2.py | zacheliason/CodeBuddy | 18694771ccbb74e6966e08f1247aadda2d5d06f7 | [
"MIT"
] | null | null | null | front_end/migration_scripts/1_to_2.py | zacheliason/CodeBuddy | 18694771ccbb74e6966e08f1247aadda2d5d06f7 | [
"MIT"
] | 11 | 2020-06-12T19:13:12.000Z | 2021-08-28T23:47:57.000Z | front_end/migration_scripts/1_to_2.py | zacheliason/CodeBuddy | 18694771ccbb74e6966e08f1247aadda2d5d06f7 | [
"MIT"
] | 3 | 2020-05-12T16:54:16.000Z | 2021-04-30T16:19:46.000Z | import atexit
import sqlite3
import traceback
#################
import sys
sys.path.append('/app')
from helper import *
settings_dict = load_yaml_dict(read_file("/Settings.yaml"))
conn = sqlite3.connect(f"/database/{settings_dict['db_name']}", isolation_level=None, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
atexit.register(conn.close)
atexit.register(cursor.close)
version = read_file("/VERSION").rstrip()
# This tells us whether the migration has already happened.
check_sql = '''SELECT COUNT(*) AS count
FROM pragma_table_info("problems")
WHERE name = "expected_text_output"'''
cursor.execute(check_sql)
check_result = cursor.fetchone()["count"]
if check_result > 0:
print("NotNeeded")
else:
alter_sql_list = ['ALTER TABLE problems RENAME COLUMN expected_output TO expected_text_output',
'ALTER TABLE problems ADD COLUMN expected_image_output text NOT NULL DEFAULT ""',
'''UPDATE problems
SET expected_image_output = expected_text_output
WHERE output_type = "jpg"''',
'''UPDATE problems
SET expected_text_output = ""
WHERE output_type = "jpg"''',
'ALTER TABLE submissions RENAME COLUMN code_output TO text_output',
'ALTER TABLE submissions ADD COLUMN image_output text NOT NULL DEFAULT ""',
'''UPDATE submissions
SET image_output = text_output
WHERE problem_id IN (SELECT problem_id FROM problems WHERE output_type = "jpg")''',
'''UPDATE submissions
SET text_output = ""
WHERE problem_id IN (SELECT problem_id FROM problems WHERE output_type = "jpg")''',
'''CREATE TABLE IF NOT EXISTS submissions2 (
course_id integer NOT NULL,
assignment_id integer NOT NULL,
problem_id integer NOT NULL,
user_id text NOT NULL,
submission_id integer NOT NULL,
code text NOT NULL,
text_output text NOT NULL,
image_output text NOT NULL,
passed integer NOT NULL,
date timestamp NOT NULL,
FOREIGN KEY (course_id) REFERENCES courses (course_id) ON DELETE CASCADE,
FOREIGN KEY (assignment_id) REFERENCES assignments (assignment_id) ON DELETE CASCADE,
FOREIGN KEY (problem_id) REFERENCES problems (problem_id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users(user_id) ON DELETE CASCADE,
PRIMARY KEY (course_id, assignment_id, problem_id, user_id, submission_id))''',
'''INSERT INTO submissions2
SELECT course_id, assignment_id, problem_id, user_id, submission_id, code,
text_output, image_output, passed, date
FROM submissions''',
'DROP TABLE IF EXISTS submissions',
'ALTER TABLE submissions2 RENAME TO submissions'
]
error_occurred = False
for sql in alter_sql_list:
try:
cursor.execute(sql)
except:
print(sql)
print(traceback.format_exc())
error_occurred = True
if not error_occurred:
print("Success")
| 41.910112 | 146 | 0.571314 |
4d6aba53be016dfafeb618c7972f5e2504c99521 | 809 | py | Python | examples/example_tbl.py | alexmaragko/pyPAHdb | 54cbf11748185d800e06cde603e93275bdd059f4 | [
"BSD-3-Clause"
] | 4 | 2018-03-13T23:44:48.000Z | 2022-01-28T15:27:24.000Z | examples/example_tbl.py | alexmaragko/pyPAHdb | 54cbf11748185d800e06cde603e93275bdd059f4 | [
"BSD-3-Clause"
] | 35 | 2018-03-14T22:47:03.000Z | 2021-10-21T12:08:38.000Z | examples/example_tbl.py | alexmaragko/pyPAHdb | 54cbf11748185d800e06cde603e93275bdd059f4 | [
"BSD-3-Clause"
] | 6 | 2018-03-13T17:50:17.000Z | 2021-07-16T02:42:50.000Z | #!/usr/bin/env python3
"""
example.py
Example of using pypahdb to decompose an astronomical PAH spectrum.
"""
import pkg_resources
from pypahdb.decomposer import Decomposer
from pypahdb.observation import Observation
if __name__ == '__main__':
# The sample data (IPAC table).
file_path = 'resources/sample_data_NGC7023.tbl'
data_file = pkg_resources.resource_filename('pypahdb', file_path)
# Construct an Observation object.
obs = Observation(data_file)
# Pass the Observation's spectrum to Decomposer, which performs the fit.
pahdb_fit = Decomposer(obs.spectrum)
# Save the fit to disk, both as a PDF and FITS file.
pahdb_fit.save_pdf('NGC7023_pypahdb_tbl_example.pdf', domaps=False)
pahdb_fit.save_fits('NGC7023_pypahdb_tbl_example.fits', header=obs.header)
| 27.896552 | 78 | 0.754017 |
4d6b1faba00fbb5e279ded48969722c18dfa7079 | 10,448 | py | Python | homeassistant/components/zha/core/registries.py | headcode/home-assistant | ef338fa8803c9691c545cb335503723d271c652c | [
"Apache-2.0"
] | null | null | null | homeassistant/components/zha/core/registries.py | headcode/home-assistant | ef338fa8803c9691c545cb335503723d271c652c | [
"Apache-2.0"
] | null | null | null | homeassistant/components/zha/core/registries.py | headcode/home-assistant | ef338fa8803c9691c545cb335503723d271c652c | [
"Apache-2.0"
] | null | null | null | """
Mapping registries for Zigbee Home Automation.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/zha/
"""
from .const import (
DEVICE_CLASS, SINGLE_INPUT_CLUSTER_DEVICE_CLASS,
SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS, COMPONENT_CLUSTERS, HUMIDITY,
TEMPERATURE, ILLUMINANCE, PRESSURE, METERING, ELECTRICAL_MEASUREMENT,
EVENT_RELAY_CLUSTERS, OPENING, ZONE,
OCCUPANCY, CLUSTER_REPORT_CONFIGS, REPORT_CONFIG_IMMEDIATE,
REPORT_CONFIG_ASAP, REPORT_CONFIG_DEFAULT, REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT, REPORT_CONFIG_OP,
NO_SENSOR_CLUSTERS, BINDABLE_CLUSTERS, ACCELERATION, SENSOR_TYPES,
BINARY_SENSOR_TYPES, RADIO_TYPES, RadioType, RADIO, RADIO_DESCRIPTION,
CONTROLLER
)
SMARTTHINGS_HUMIDITY_CLUSTER = 64581
SMARTTHINGS_ACCELERATION_CLUSTER = 64514
def establish_device_mappings():
"""Establish mappings between ZCL objects and HA ZHA objects.
These cannot be module level, as importing bellows must be done in a
in a function.
"""
from zigpy import zcl
from zigpy.profiles import PROFILES, zha, zll
if zha.PROFILE_ID not in DEVICE_CLASS:
DEVICE_CLASS[zha.PROFILE_ID] = {}
if zll.PROFILE_ID not in DEVICE_CLASS:
DEVICE_CLASS[zll.PROFILE_ID] = {}
RADIO_TYPES[RadioType.ezsp.name] = {
RADIO: get_ezsp_radio,
RADIO_DESCRIPTION: 'EZSP'
}
RADIO_TYPES[RadioType.xbee.name] = {
RADIO: get_xbee_radio,
RADIO_DESCRIPTION: 'XBee'
}
RADIO_TYPES[RadioType.deconz.name] = {
RADIO: get_deconz_radio,
RADIO_DESCRIPTION: 'Deconz'
}
EVENT_RELAY_CLUSTERS.append(zcl.clusters.general.LevelControl.cluster_id)
EVENT_RELAY_CLUSTERS.append(zcl.clusters.general.OnOff.cluster_id)
NO_SENSOR_CLUSTERS.append(zcl.clusters.general.Basic.cluster_id)
NO_SENSOR_CLUSTERS.append(
zcl.clusters.general.PowerConfiguration.cluster_id)
NO_SENSOR_CLUSTERS.append(zcl.clusters.lightlink.LightLink.cluster_id)
BINDABLE_CLUSTERS.append(zcl.clusters.general.LevelControl.cluster_id)
BINDABLE_CLUSTERS.append(zcl.clusters.general.OnOff.cluster_id)
BINDABLE_CLUSTERS.append(zcl.clusters.lighting.Color.cluster_id)
DEVICE_CLASS[zha.PROFILE_ID].update({
zha.DeviceType.ON_OFF_SWITCH: 'binary_sensor',
zha.DeviceType.LEVEL_CONTROL_SWITCH: 'binary_sensor',
zha.DeviceType.REMOTE_CONTROL: 'binary_sensor',
zha.DeviceType.SMART_PLUG: 'switch',
zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT: 'light',
zha.DeviceType.ON_OFF_LIGHT: 'light',
zha.DeviceType.DIMMABLE_LIGHT: 'light',
zha.DeviceType.COLOR_DIMMABLE_LIGHT: 'light',
zha.DeviceType.ON_OFF_LIGHT_SWITCH: 'binary_sensor',
zha.DeviceType.DIMMER_SWITCH: 'binary_sensor',
zha.DeviceType.COLOR_DIMMER_SWITCH: 'binary_sensor',
})
DEVICE_CLASS[zll.PROFILE_ID].update({
zll.DeviceType.ON_OFF_LIGHT: 'light',
zll.DeviceType.ON_OFF_PLUGIN_UNIT: 'switch',
zll.DeviceType.DIMMABLE_LIGHT: 'light',
zll.DeviceType.DIMMABLE_PLUGIN_UNIT: 'light',
zll.DeviceType.COLOR_LIGHT: 'light',
zll.DeviceType.EXTENDED_COLOR_LIGHT: 'light',
zll.DeviceType.COLOR_TEMPERATURE_LIGHT: 'light',
zll.DeviceType.COLOR_CONTROLLER: 'binary_sensor',
zll.DeviceType.COLOR_SCENE_CONTROLLER: 'binary_sensor',
zll.DeviceType.CONTROLLER: 'binary_sensor',
zll.DeviceType.SCENE_CONTROLLER: 'binary_sensor',
zll.DeviceType.ON_OFF_SENSOR: 'binary_sensor',
})
SINGLE_INPUT_CLUSTER_DEVICE_CLASS.update({
zcl.clusters.general.OnOff: 'switch',
zcl.clusters.measurement.RelativeHumidity: 'sensor',
# this works for now but if we hit conflicts we can break it out to
# a different dict that is keyed by manufacturer
SMARTTHINGS_HUMIDITY_CLUSTER: 'sensor',
zcl.clusters.measurement.TemperatureMeasurement: 'sensor',
zcl.clusters.measurement.PressureMeasurement: 'sensor',
zcl.clusters.measurement.IlluminanceMeasurement: 'sensor',
zcl.clusters.smartenergy.Metering: 'sensor',
zcl.clusters.homeautomation.ElectricalMeasurement: 'sensor',
zcl.clusters.security.IasZone: 'binary_sensor',
zcl.clusters.measurement.OccupancySensing: 'binary_sensor',
zcl.clusters.hvac.Fan: 'fan',
SMARTTHINGS_ACCELERATION_CLUSTER: 'binary_sensor',
})
SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.update({
zcl.clusters.general.OnOff: 'binary_sensor',
})
SENSOR_TYPES.update({
zcl.clusters.measurement.RelativeHumidity.cluster_id: HUMIDITY,
SMARTTHINGS_HUMIDITY_CLUSTER: HUMIDITY,
zcl.clusters.measurement.TemperatureMeasurement.cluster_id:
TEMPERATURE,
zcl.clusters.measurement.PressureMeasurement.cluster_id: PRESSURE,
zcl.clusters.measurement.IlluminanceMeasurement.cluster_id:
ILLUMINANCE,
zcl.clusters.smartenergy.Metering.cluster_id: METERING,
zcl.clusters.homeautomation.ElectricalMeasurement.cluster_id:
ELECTRICAL_MEASUREMENT,
})
BINARY_SENSOR_TYPES.update({
zcl.clusters.measurement.OccupancySensing.cluster_id: OCCUPANCY,
zcl.clusters.security.IasZone.cluster_id: ZONE,
zcl.clusters.general.OnOff.cluster_id: OPENING,
SMARTTHINGS_ACCELERATION_CLUSTER: ACCELERATION,
})
CLUSTER_REPORT_CONFIGS.update({
zcl.clusters.general.Alarms.cluster_id: [],
zcl.clusters.general.Basic.cluster_id: [],
zcl.clusters.general.Commissioning.cluster_id: [],
zcl.clusters.general.Identify.cluster_id: [],
zcl.clusters.general.Groups.cluster_id: [],
zcl.clusters.general.Scenes.cluster_id: [],
zcl.clusters.general.Partition.cluster_id: [],
zcl.clusters.general.Ota.cluster_id: [],
zcl.clusters.general.PowerProfile.cluster_id: [],
zcl.clusters.general.ApplianceControl.cluster_id: [],
zcl.clusters.general.PollControl.cluster_id: [],
zcl.clusters.general.GreenPowerProxy.cluster_id: [],
zcl.clusters.general.OnOffConfiguration.cluster_id: [],
zcl.clusters.lightlink.LightLink.cluster_id: [],
zcl.clusters.general.OnOff.cluster_id: [{
'attr': 'on_off',
'config': REPORT_CONFIG_IMMEDIATE
}],
zcl.clusters.general.LevelControl.cluster_id: [{
'attr': 'current_level',
'config': REPORT_CONFIG_ASAP
}],
zcl.clusters.lighting.Color.cluster_id: [{
'attr': 'current_x',
'config': REPORT_CONFIG_DEFAULT
}, {
'attr': 'current_y',
'config': REPORT_CONFIG_DEFAULT
}, {
'attr': 'color_temperature',
'config': REPORT_CONFIG_DEFAULT
}],
zcl.clusters.measurement.RelativeHumidity.cluster_id: [{
'attr': 'measured_value',
'config': (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
50
)
}],
zcl.clusters.measurement.TemperatureMeasurement.cluster_id: [{
'attr': 'measured_value',
'config': (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
50
)
}],
SMARTTHINGS_ACCELERATION_CLUSTER: [{
'attr': 'acceleration',
'config': REPORT_CONFIG_ASAP
}, {
'attr': 'x_axis',
'config': REPORT_CONFIG_ASAP
}, {
'attr': 'y_axis',
'config': REPORT_CONFIG_ASAP
}, {
'attr': 'z_axis',
'config': REPORT_CONFIG_ASAP
}],
SMARTTHINGS_HUMIDITY_CLUSTER: [{
'attr': 'measured_value',
'config': (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
50
)
}],
zcl.clusters.measurement.PressureMeasurement.cluster_id: [{
'attr': 'measured_value',
'config': REPORT_CONFIG_DEFAULT
}],
zcl.clusters.measurement.IlluminanceMeasurement.cluster_id: [{
'attr': 'measured_value',
'config': REPORT_CONFIG_DEFAULT
}],
zcl.clusters.smartenergy.Metering.cluster_id: [{
'attr': 'instantaneous_demand',
'config': REPORT_CONFIG_DEFAULT
}],
zcl.clusters.homeautomation.ElectricalMeasurement.cluster_id: [{
'attr': 'active_power',
'config': REPORT_CONFIG_DEFAULT
}],
zcl.clusters.general.PowerConfiguration.cluster_id: [{
'attr': 'battery_voltage',
'config': REPORT_CONFIG_DEFAULT
}, {
'attr': 'battery_percentage_remaining',
'config': REPORT_CONFIG_DEFAULT
}],
zcl.clusters.measurement.OccupancySensing.cluster_id: [{
'attr': 'occupancy',
'config': REPORT_CONFIG_IMMEDIATE
}],
zcl.clusters.hvac.Fan.cluster_id: [{
'attr': 'fan_mode',
'config': REPORT_CONFIG_OP
}],
})
# A map of hass components to all Zigbee clusters it could use
for profile_id, classes in DEVICE_CLASS.items():
profile = PROFILES[profile_id]
for device_type, component in classes.items():
if component not in COMPONENT_CLUSTERS:
COMPONENT_CLUSTERS[component] = (set(), set())
clusters = profile.CLUSTERS[device_type]
COMPONENT_CLUSTERS[component][0].update(clusters[0])
COMPONENT_CLUSTERS[component][1].update(clusters[1])
| 38.411765 | 77 | 0.656011 |
4d6b39d23d412e44e53a7dcf8c8b360f9c624422 | 3,292 | py | Python | contrib/EISeg/eiseg/data/base.py | haoyuying/PaddleSeg | 6607d88df39500330a7b6ed160b4626d9f38df66 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-08-04T02:47:33.000Z | 2021-08-04T02:47:33.000Z | contrib/EISeg/eiseg/data/base.py | isshf/PaddleSeg | 6607d88df39500330a7b6ed160b4626d9f38df66 | [
"Apache-2.0"
] | null | null | null | contrib/EISeg/eiseg/data/base.py | isshf/PaddleSeg | 6607d88df39500330a7b6ed160b4626d9f38df66 | [
"Apache-2.0"
] | 2 | 2021-08-04T02:48:50.000Z | 2021-11-16T08:13:41.000Z | import random
import pickle
import cv2
import numpy as np
import paddle
import paddleseg.transforms as T
from .points_sampler import MultiPointSampler
| 33.591837 | 110 | 0.630316 |
4d6be0364dfeb7a10616e3c99f46ad7c4e3cb501 | 1,008 | py | Python | handler/createThumbnail.py | brainlife/ezbids | 886c69942d19d59dc48d5049f226bd7bb98a8cb8 | [
"MIT"
] | 7 | 2020-08-07T20:46:27.000Z | 2022-03-21T12:08:20.000Z | handler/createThumbnail.py | brainlife/ezbids | 886c69942d19d59dc48d5049f226bd7bb98a8cb8 | [
"MIT"
] | 36 | 2020-10-18T15:17:17.000Z | 2022-03-31T14:41:26.000Z | handler/createThumbnail.py | brainlife/ezbids | 886c69942d19d59dc48d5049f226bd7bb98a8cb8 | [
"MIT"
] | 4 | 2020-07-29T16:48:55.000Z | 2021-11-17T22:13:19.000Z | #!/usr/bin/env python3
"""
Created on Wed Feb 17 08:32:55 2021
Deface anatomical image(s)
@author: dlevitas
"""
import os, sys
import nibabel as nib
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.style.use('dark_background')
from math import floor
os.environ[ 'MPLCONFIGDIR' ] = '/tmp/'
print("loading image to create thumbnail "+sys.argv[1])
image = nib.load(sys.argv[1])
output_image = sys.argv[2]
object_img_array = image.dataobj[:]
slice_x = object_img_array[floor(object_img_array.shape[0]/2), :, :]
slice_y = object_img_array[:, floor(object_img_array.shape[1]/2), :]
slice_z = object_img_array[:, :, floor(object_img_array.shape[2]/2)]
fig, axes = plt.subplots(1,3, figsize=(9,3))
for i, slice in enumerate([slice_x, slice_y, slice_z]):
print("creating thumbnail "+str(i))
axes[i].imshow(slice.T, cmap="gray", origin="lower", aspect='auto')
axes[i].axis('off')
plt.subplots_adjust(wspace=0, hspace=0)
plt.savefig(output_image, bbox_inches='tight')
| 24.585366 | 71 | 0.717262 |
4d6cac5c33887bd192cb3cbbad427835d929fc30 | 4,511 | py | Python | IPython/core/events.py | pyarnold/ipython | c4797f7f069d0a974ddfa1e4251c7550c809dba0 | [
"BSD-3-Clause-Clear"
] | 1 | 2020-12-18T01:07:55.000Z | 2020-12-18T01:07:55.000Z | IPython/core/events.py | pyarnold/ipython | c4797f7f069d0a974ddfa1e4251c7550c809dba0 | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/core/events.py | pyarnold/ipython | c4797f7f069d0a974ddfa1e4251c7550c809dba0 | [
"BSD-3-Clause-Clear"
] | null | null | null | """Infrastructure for registering and firing callbacks on application events.
Unlike :mod:`IPython.core.hooks`, which lets end users set single functions to
be called at specific times, or a collection of alternative methods to try,
callbacks are designed to be used by extension authors. A number of callbacks
can be registered for the same event without needing to be aware of one another.
The functions defined in this module are no-ops indicating the names of available
events and the arguments which will be passed to them.
.. note::
This API is experimental in IPython 2.0, and may be revised in future versions.
"""
from __future__ import print_function
# event_name -> prototype mapping
available_events = {}
# ------------------------------------------------------------------------
# Callback prototypes
#
# No-op functions which describe the names of available events and the
# signatures of callbacks for those events.
# ------------------------------------------------------------------------
| 30.275168 | 94 | 0.643095 |
4d6e2c5b304d30fabd691d7b3a8dc9cf98b02a31 | 290 | py | Python | api/routes/topic.py | quyenthucdoan/CUSTOMER-EMOTION-ANALYSIS-SYSTEM-BASED-ON-VIETNAMESE-COMMENTS | 445de0d169fd2b0005726f8d12b5844ea3a69ca4 | [
"Unlicense"
] | null | null | null | api/routes/topic.py | quyenthucdoan/CUSTOMER-EMOTION-ANALYSIS-SYSTEM-BASED-ON-VIETNAMESE-COMMENTS | 445de0d169fd2b0005726f8d12b5844ea3a69ca4 | [
"Unlicense"
] | null | null | null | api/routes/topic.py | quyenthucdoan/CUSTOMER-EMOTION-ANALYSIS-SYSTEM-BASED-ON-VIETNAMESE-COMMENTS | 445de0d169fd2b0005726f8d12b5844ea3a69ca4 | [
"Unlicense"
] | null | null | null | from app import app
from services import TopicServices
from flask import jsonify, request | 32.222222 | 48 | 0.737931 |
4d6eb4361eb99ba59182cf00398ec7253f4ded06 | 4,602 | py | Python | test/functional/feature_uaclient.py | syedrizwanmy/bitcoin-abc | 1241ef50e90d183421504fd783b9fd4dd6c1386a | [
"MIT"
] | 1,266 | 2017-05-02T07:02:29.000Z | 2022-03-31T17:15:44.000Z | test/functional/feature_uaclient.py | EGYVOICE/bitcoin-abc-avalanche | e0f1fe857e1fc85f01903f1c323c2d5c54aecc1c | [
"MIT"
] | 426 | 2017-05-07T12:40:52.000Z | 2022-03-29T18:12:01.000Z | test/functional/feature_uaclient.py | EGYVOICE/bitcoin-abc-avalanche | e0f1fe857e1fc85f01903f1c323c2d5c54aecc1c | [
"MIT"
] | 721 | 2017-05-07T10:36:11.000Z | 2022-03-15T09:07:48.000Z | #!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -uaclientname and -uaclientversion option."""
import re
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import assert_equal
if __name__ == '__main__':
UseragentTest().main()
| 48.442105 | 156 | 0.63342 |
4d6f1a664c1307c758152f80bcc30999ef1ce302 | 5,619 | py | Python | tests/test_dispersion_formulas.py | mnishida/refractiveindex.info-Pandas | 05317271ee9e550287887536ee03485164949ba2 | [
"MIT"
] | 1 | 2021-08-04T05:09:16.000Z | 2021-08-04T05:09:16.000Z | tests/test_dispersion_formulas.py | mnishida/refractiveindex.info-Pandas | 05317271ee9e550287887536ee03485164949ba2 | [
"MIT"
] | 4 | 2021-08-16T07:15:10.000Z | 2021-09-04T08:47:47.000Z | tests/test_dispersion_formulas.py | mnishida/refractiveindex.info-Pandas | 05317271ee9e550287887536ee03485164949ba2 | [
"MIT"
] | 1 | 2021-08-04T05:09:13.000Z | 2021-08-04T05:09:13.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import annotations
import unittest
import numpy as np
import numpy.testing as npt
import pandas as pd
from riip.material import RiiMaterial
if __name__ == "__main__":
unittest.main()
| 36.487013 | 88 | 0.433707 |
4d71a5d711522e37eda93e8de3bbe07019f81e95 | 3,832 | py | Python | neutron/tests/unit/services/logapi/rpc/test_server.py | mcadariu/neutron | 35494af5a25efb8b314941ab85b44923654f6acc | [
"Apache-2.0"
] | 1 | 2018-07-04T07:59:31.000Z | 2018-07-04T07:59:31.000Z | neutron/tests/unit/services/logapi/rpc/test_server.py | ljzjohnson/neutron | d78664321482c15981a09642985a540195e754e3 | [
"Apache-2.0"
] | null | null | null | neutron/tests/unit/services/logapi/rpc/test_server.py | ljzjohnson/neutron | d78664321482c15981a09642985a540195e754e3 | [
"Apache-2.0"
] | 1 | 2018-08-28T17:13:16.000Z | 2018-08-28T17:13:16.000Z | # Copyright (c) 2017 Fujitsu Limited
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
import oslo_messaging
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.handlers import resources_rpc
from neutron.services.logapi.common import constants as log_const
from neutron.services.logapi.rpc import server as server_rpc
from neutron.tests import base
| 40.765957 | 79 | 0.686587 |