hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4cd5a6c75e223d3abe361d5bb54c6c56d248951a | 307 | py | Python | learning/bs/bs2.py | Nephrin/Tut | 9454be28fd37c155d0b4e97876196f8d33ccf8e5 | [
"Apache-2.0"
] | 2 | 2019-06-23T07:17:30.000Z | 2019-07-06T15:15:42.000Z | learning/bs/bs2.py | Nephrin/Tut | 9454be28fd37c155d0b4e97876196f8d33ccf8e5 | [
"Apache-2.0"
] | null | null | null | learning/bs/bs2.py | Nephrin/Tut | 9454be28fd37c155d0b4e97876196f8d33ccf8e5 | [
"Apache-2.0"
] | 1 | 2019-06-23T07:17:43.000Z | 2019-06-23T07:17:43.000Z | def main():
with open("number.txt", "r") as file:
data = file.read()
data = data.split("\n")
x = [row.split("\t") for row in data[:5]]
print(function(x))
def function(x):
sum=0
for el in x[0:]:
sum += int(el[0])
return sum
if __name__=="__main__":
main();
| 18.058824 | 43 | 0.517915 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.107492 |
4cd722ff9dd1a9f34e8ee2680df4379e934645c7 | 207 | py | Python | {{cookiecutter.project_name}}/resources/admin.py | phalt/cookicutter-django-api | 01c7ffea8805a05e704819d21f20fe48cec436c0 | [
"BSD-3-Clause"
] | 16 | 2015-04-04T20:50:00.000Z | 2017-08-18T19:18:02.000Z | {{cookiecutter.project_name}}/resources/admin.py | phalt/cookicutter-django-api | 01c7ffea8805a05e704819d21f20fe48cec436c0 | [
"BSD-3-Clause"
] | null | null | null | {{cookiecutter.project_name}}/resources/admin.py | phalt/cookicutter-django-api | 01c7ffea8805a05e704819d21f20fe48cec436c0 | [
"BSD-3-Clause"
] | 4 | 2015-04-24T03:51:50.000Z | 2020-02-10T13:47:02.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import (
BaseModel
)
classes = [BaseModel]
for c in classes:
admin.site.register(c)
| 13.8 | 39 | 0.705314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.111111 |
4cd9e96ed0f6636dc77519153d4e814c1b90af2d | 1,118 | py | Python | moocng/contact/models.py | OpenMOOC/moocng | 1e3dafb84aa1838c881df0c9bcca069e47c7f52d | [
"Apache-2.0"
] | 36 | 2015-01-10T06:00:36.000Z | 2020-03-19T10:06:59.000Z | moocng/contact/models.py | OpenMOOC/moocng | 1e3dafb84aa1838c881df0c9bcca069e47c7f52d | [
"Apache-2.0"
] | 3 | 2015-10-01T17:59:32.000Z | 2018-09-04T03:32:17.000Z | moocng/contact/models.py | OpenMOOC/moocng | 1e3dafb84aa1838c881df0c9bcca069e47c7f52d | [
"Apache-2.0"
] | 17 | 2015-01-13T03:46:58.000Z | 2020-07-05T06:29:51.000Z | # Copyright 2013 UNED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
from django.utils.translation import ugettext_lazy as _
class CommunicationType(models.Model):
title = models.CharField(
verbose_name=_(u'Title'),
max_length=200,
)
destination = models.EmailField(
verbose_name=_(u'Email address to send this communications'),
null=False,
blank=True,
)
class Meta:
verbose_name = _(u'communication type')
verbose_name_plural = _(u'communication types')
def __unicode__(self):
return self.title
| 29.421053 | 74 | 0.71288 | 461 | 0.412343 | 0 | 0 | 0 | 0 | 0 | 0 | 649 | 0.580501 |
4cda412ef162b3f72b00e281488be9d02ef6682d | 172 | py | Python | nameko_mongoengine/__init__.py | ketgo/nameko-mongoengine | 94b4a41dfa845cec8b48f874c0b64658a1c4bef6 | [
"Apache-2.0"
] | 2 | 2019-12-06T17:51:44.000Z | 2020-02-20T22:38:38.000Z | nameko_mongoengine/__init__.py | ketgo/nameko-mongoengine | 94b4a41dfa845cec8b48f874c0b64658a1c4bef6 | [
"Apache-2.0"
] | 1 | 2022-03-07T02:32:45.000Z | 2022-03-07T06:45:43.000Z | nameko_mongoengine/__init__.py | ketgo/nameko-mongoengine | 94b4a41dfa845cec8b48f874c0b64658a1c4bef6 | [
"Apache-2.0"
] | null | null | null | """
MongoEngine dependency provider
"""
from __future__ import absolute_import
from .mongo_engine import MongoEngine
__all__ = ["MongoEngine", ] # pragma: no cover
| 17.2 | 47 | 0.744186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.430233 |
4cda8215848bfc1e46d422423beea22294f3245f | 1,717 | py | Python | search_for_similar_images__perceptual_hash__phash/ui/SearchForSimilarSettingsWidget.py | DazEB2/SimplePyScripts | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | [
"CC-BY-4.0"
] | 117 | 2015-12-18T07:18:27.000Z | 2022-03-28T00:25:54.000Z | search_for_similar_images__perceptual_hash__phash/ui/SearchForSimilarSettingsWidget.py | DazEB2/SimplePyScripts | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | [
"CC-BY-4.0"
] | 8 | 2018-10-03T09:38:46.000Z | 2021-12-13T19:51:09.000Z | search_for_similar_images__perceptual_hash__phash/ui/SearchForSimilarSettingsWidget.py | DazEB2/SimplePyScripts | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | [
"CC-BY-4.0"
] | 28 | 2016-08-02T17:43:47.000Z | 2022-03-21T08:31:12.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
from PyQt5.QtWidgets import QWidget, QFormLayout, QComboBox, QSpinBox, QCheckBox
from PyQt5.QtCore import QSettings, pyqtSignal
from common import IMAGE_HASH_ALGO, DEFAULT_IMAGE_HASH_ALGO, DEFAULT_IMAGE_HASH_MAX_SCORE
class SearchForSimilarSettingsWidget(QWidget):
about_mark_matching = pyqtSignal(bool)
def __init__(self):
super().__init__()
self.setWindowTitle("Search for similar")
self.cb_algo = QComboBox()
self.cb_algo.addItems(IMAGE_HASH_ALGO)
self.sb_max_score = QSpinBox()
self.cb_mark_matching = QCheckBox()
self.cb_mark_matching.clicked.connect(self.about_mark_matching)
layout = QFormLayout()
layout.addRow("Hash algo:", self.cb_algo)
layout.addRow("Max score:", self.sb_max_score)
layout.addRow("Mark matching:", self.cb_mark_matching)
self.setLayout(layout)
def read_settings(self, ini: QSettings):
ini.beginGroup(self.__class__.__name__)
self.cb_algo.setCurrentText(
ini.value('algo', DEFAULT_IMAGE_HASH_ALGO)
)
self.sb_max_score.setValue(
int(ini.value('max_score', DEFAULT_IMAGE_HASH_MAX_SCORE))
)
self.cb_mark_matching.setChecked(
ini.value('mark_matching', 'true') == 'true'
)
ini.endGroup()
def write_settings(self, ini: QSettings):
ini.beginGroup(self.__class__.__name__)
ini.setValue('algo', self.cb_algo.currentText())
ini.setValue('max_score', self.sb_max_score.value())
ini.setValue('mark_matching', self.cb_mark_matching.isChecked())
ini.endGroup()
| 29.101695 | 89 | 0.675015 | 1,421 | 0.827606 | 0 | 0 | 0 | 0 | 0 | 0 | 191 | 0.111241 |
4cdadb09f1bb690b5cf67fc0d25f299913c44ad2 | 128 | py | Python | app/auth_app/blueprint.py | ganggas95/E-Wisata | fb66fc7d3d4cc5a45ad9acea42fb306140a6449f | [
"Apache-2.0"
] | null | null | null | app/auth_app/blueprint.py | ganggas95/E-Wisata | fb66fc7d3d4cc5a45ad9acea42fb306140a6449f | [
"Apache-2.0"
] | null | null | null | app/auth_app/blueprint.py | ganggas95/E-Wisata | fb66fc7d3d4cc5a45ad9acea42fb306140a6449f | [
"Apache-2.0"
] | 1 | 2020-02-12T09:21:15.000Z | 2020-02-12T09:21:15.000Z | from flask import Blueprint
auth_blueprint = Blueprint(
'auth_blueprint',
__name__,
template_folder='templates'
)
| 14.222222 | 31 | 0.726563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.210938 |
4cdc30990721503a59591c44c05564a326c709e4 | 47 | py | Python | Python/Topics/Bytes basics/Ord() application/main.py | drtierney/hyperskill-problems | b74da993f0ac7bcff1cbd5d89a3a1b06b05f33e0 | [
"MIT"
] | 5 | 2020-08-29T15:15:31.000Z | 2022-03-01T18:22:34.000Z | Python/Topics/Bytes basics/Ord() application/main.py | drtierney/hyperskill-problems | b74da993f0ac7bcff1cbd5d89a3a1b06b05f33e0 | [
"MIT"
] | null | null | null | Python/Topics/Bytes basics/Ord() application/main.py | drtierney/hyperskill-problems | b74da993f0ac7bcff1cbd5d89a3a1b06b05f33e0 | [
"MIT"
] | 1 | 2020-12-02T11:13:14.000Z | 2020-12-02T11:13:14.000Z | a = input()
b= input()
print(ord(a) + ord(b))
| 9.4 | 22 | 0.531915 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4cdd1fd0a18fed3da4d3c58601225a03c0e5fbd6 | 1,571 | py | Python | test.py | ThomDietrich/singletonify-python | 11cc56237095544c61c4d45bb61f1a7824da19dc | [
"MIT"
] | 3 | 2018-10-08T07:01:15.000Z | 2019-12-12T03:48:53.000Z | test.py | ThomDietrich/singletonify-python | 11cc56237095544c61c4d45bb61f1a7824da19dc | [
"MIT"
] | 1 | 2021-05-19T00:04:48.000Z | 2021-06-01T17:11:05.000Z | test.py | ThomDietrich/singletonify-python | 11cc56237095544c61c4d45bb61f1a7824da19dc | [
"MIT"
] | 1 | 2021-06-01T16:35:57.000Z | 2021-06-01T16:35:57.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017~2999 - cologler <skyoflw@gmail.com>
# ----------
#
# ----------
from pytest import raises
from singletonify import singleton
def test_base():
@singleton()
class A:
pass
assert not A._is_init()
assert A() is A()
assert A._is_init()
def test_with_args():
@singleton(x='s')
class A:
def __init__(self, x):
self.x = x
assert A() is A()
assert A().x == 's'
def test_instance_check():
@singleton()
class A:
pass
assert isinstance(A(), A)
def test_subclass_check():
class B:
pass
@singleton()
class A(B):
pass
assert issubclass(A, B)
def test_multi_apply():
@singleton()
class A:
pass
@singleton()
class B:
pass
assert A() is A()
assert B() is B()
assert A() is not B()
def test_with_slots():
@singleton()
class D:
pass
@singleton()
class S:
__slots__ = ('buffer', )
assert hasattr(D(), '__dict__')
assert not hasattr(S(), '__dict__')
def test_inherit():
class B:
pass
@singleton()
class A(B):
pass
assert A() is A()
assert B() is not B()
assert A() is not B()
assert type(A()) is A
assert isinstance(A(), A)
def test_inherit_from_singleton():
@singleton()
class B:
pass
# cannot inherit
with raises(TypeError, match='cannot inherit from a singleton class'):
@singleton()
class A(B):
pass
| 16.536842 | 74 | 0.54233 | 347 | 0.220878 | 0 | 0 | 501 | 0.318905 | 0 | 0 | 215 | 0.136856 |
4cdd5fc3021ddb28a615b5faa80ee7587dea639a | 2,166 | py | Python | vika/services/svc_crypto.py | jtprogru/vika | 6bf59dbad169cdb3a10f354e6df0c99cb5d98988 | [
"WTFPL"
] | null | null | null | vika/services/svc_crypto.py | jtprogru/vika | 6bf59dbad169cdb3a10f354e6df0c99cb5d98988 | [
"WTFPL"
] | null | null | null | vika/services/svc_crypto.py | jtprogru/vika | 6bf59dbad169cdb3a10f354e6df0c99cb5d98988 | [
"WTFPL"
] | null | null | null | from coinmarketcapapi import CoinMarketCapAPI, CoinMarketCapAPIError
from pycoingecko import CoinGeckoAPI
from vika.config import COIN_MARKET_CAP_API_KEY
class Crypto:
def __init__(self, api_key=None):
self.coin_market_cap = CoinMarketCap(api_key=api_key)
self.gecko = Gecko()
def price(self, coin, currency):
try:
return self.coin_market_cap.price(coin, currency)
except CoinMarketCapAPIError:
return self.gecko.price(coin, currency)
class CoinMarketCap:
def __init__(self, api_key=None):
self.api = CoinMarketCapAPI(api_key or COIN_MARKET_CAP_API_KEY)
@staticmethod
def filter_essential_data(data, coin, currency):
return {
"source": "Coin Market Cap",
"symbol": data[coin]["symbol"],
"name": data[coin]["name"],
"total_supply": data[coin]["total_supply"],
"value": f"{data[coin]['quote'][currency]['price']}",
"currency": currency,
"percent_change_24h": data[coin]["quote"][currency]["percent_change_24h"],
"market_cap": data[coin]["quote"][currency]["market_cap"],
}
def price(self, coin, currency):
result = self.api.cryptocurrency_quotes_latest(symbol=coin, convert=currency)
return self.filter_essential_data(result.data, coin=coin, currency=currency)
class Gecko:
def __init__(self):
self.api = CoinGeckoAPI()
@staticmethod
def filter_essential_data(data, coin, currency):
currency = currency.lower()
return {
"source": "Gecko",
"name": coin,
"currency": currency,
"value": float(data[currency]),
"percent_change_24h": data[f"{currency}_24h_change"],
"market_cap": data[f"{currency}_market_cap"],
}
def price(self, coin, currency):
result = self.api.get_price(
ids=coin,
vs_currencies=currency,
include_market_cap="true",
include_24hr_change="true",
)
return self.filter_essential_data(result[coin.lower()], coin=coin, currency=currency)
| 33.323077 | 93 | 0.625115 | 2,002 | 0.924284 | 0 | 0 | 920 | 0.424746 | 0 | 0 | 349 | 0.161127 |
4cdde0fb0db22226b5857fab163db859a979f97e | 7,440 | py | Python | sf2_to_dex.py | rupa/sf2_to_dex | d7e074b0332d668385b4a955e3509dd4fbe0f55c | [
"MIT"
] | null | null | null | sf2_to_dex.py | rupa/sf2_to_dex | d7e074b0332d668385b4a955e3509dd4fbe0f55c | [
"MIT"
] | null | null | null | sf2_to_dex.py | rupa/sf2_to_dex | d7e074b0332d668385b4a955e3509dd4fbe0f55c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
cleanup and refactor -> pretty much a rewrite
soundfonts are messy, you gotta kind of figure out where the note names
and velocities are in sample name. usually the pitch info is wack
"""
from chunk import Chunk
import logging
import os
import re
import struct
import wave
logging.basicConfig(level=logging.INFO)
SAMPLE_TYPES = {1: 'mono', 2: 'right', 4: 'left', 8: 'linked'}
NOTE_NAMES = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']
ENHARMONICS = {
'Db': 'C#',
'Eb': 'D#',
'Gb': 'F#',
'Ab': 'G#',
'Bb': 'A#',
}
def _read_dword(f):
return struct.unpack('<i', f.read(4))[0]
def _read_word(f):
return struct.unpack('<h', f.read(2))[0]
def _read_byte(f):
return struct.unpack('<b', f.read(1))[0]
def _write_dword(f, v):
f.write(struct.pack('<i', v))
def _write_word(f, v):
f.write(struct.pack('<h', v))
class SfSample:
def __init__(self):
pass
def __str__(self):
return self.name
def __repr__(self):
return 'SfSample(name="{}",start={})'.format(self.name, self.start)
def parse_sf2(sf2file):
samples = []
with open(sf2file, 'rb') as f:
chfile = Chunk(f)
_ = chfile.getname() # riff
_ = chfile.read(4) # WAVE
while 1:
try:
chunk = Chunk(chfile, bigendian=0)
except EOFError:
break
name = chunk.getname()
if name == 'smpl':
sample_data_start = chfile.tell() + 8
logging.debug('samples start: {}'.format(sample_data_start))
chunk.skip()
elif name == 'shdr':
for i in range((chunk.chunksize / 46) - 1):
s = SfSample()
s.name = chfile.read(20).rstrip('\0')
s.start = _read_dword(chfile)
s.end = _read_dword(chfile)
s.startLoop = _read_dword(chfile)
s.endLoop = _read_dword(chfile)
s.sampleRate = _read_dword(chfile)
s.pitch = _read_byte(chfile)
s.correction = _read_byte(chfile)
s.link = _read_word(chfile)
s.type = _read_word(chfile)
samples.append(s)
chfile.read(46)
elif name == 'LIST':
_ = chfile.read(4)
else:
chunk.skip()
for s in samples:
type_name = SAMPLE_TYPES[s.type & 0x7fff]
logging.debug('{} {} {} {} {} {} {} {} {} {}'.format(
s.name,
type_name,
s.pitch,
s.start,
s.end,
s.startLoop,
s.endLoop,
s.sampleRate,
s.correction,
s.link
))
return samples, sample_data_start
def write_loop(filename):
with open(filename, 'r+b') as f:
f.seek(4)
riff_size = _read_dword(f)
f.seek(4)
_write_dword(f, riff_size + 0x76)
f.seek(8 + riff_size)
_write_dword(f, 0x20657563) # 'cue '
_write_dword(f, 0x34)
_write_dword(f, 0x2) # num cues
_write_dword(f, 0x1) # id
_write_dword(f, s.startLoop-s.start) # position
_write_dword(f, 0x61746164) # 'data'
_write_dword(f, 0x0)
_write_dword(f, 0x0)
_write_dword(f, s.startLoop-s.start) # position
_write_dword(f, 0x2) # id
_write_dword(f, s.endLoop-s.start) # position
_write_dword(f, 0x61746164) # 'data'
_write_dword(f, 0x0)
_write_dword(f, 0x0)
_write_dword(f, s.endLoop-s.start) # position
_write_dword(f, 0x5453494C) # 'LIST'
_write_dword(f, 0x32)
_write_dword(f, 0x6C746461) # 'adtl'
_write_dword(f, 0x6C62616C) # 'labl'
_write_dword(f, 0x10)
_write_dword(f, 0x1) # id
_write_dword(f, 0x706F6F4C) # 'Loop'
_write_dword(f, 0x61745320) # ' Sta'
_write_dword(f, 0x7472) # 'rt'
_write_dword(f, 0x6C62616C) # 'labl'
_write_dword(f, 0x0E)
_write_dword(f, 0x2) # id
_write_dword(f, 0x706F6F4C) # 'Loop'
_write_dword(f, 0x646E4520) # ' End'
_write_word(f, 0x0)
f.close()
if __name__ == '__main__':
import sys
sf2file = sys.argv[1]
samples, sample_data_start = parse_sf2(sf2file)
F = open(sf2file, 'rb')
F2 = open(sf2file, 'rb')
# make a dir for our samples
folder_name = os.path.basename(sf2file).split('.')[0]
folder_name = "".join(x for x in folder_name if x.isalnum() or x == ' ')
if not os.path.exists(folder_name):
os.mkdir(folder_name)
os.chdir(folder_name)
for i, s in enumerate(samples):
# Here's where we gotta get creative, depending on the soundfont
type_name = SAMPLE_TYPES[s.type & 0x7fff]
# mono or L, we'll pick up R channel via s.link
if s.type not in [1, 4]:
# print 'skipping', type_name, s.name
continue
# os impl
"""
filename = "".join(x for x in s.name if x.isalnum())
filename += '_'
filename += note_names[s.pitch % 12]
filename += str((s.pitch/12) - 1)
filename += '.wav'
"""
# Steinway B-JNv2.0.sf2
"""
n, note, end = re.split('([ABCDEFG]#?[0123456789])', s.name)
filename = '{}_{}.wav'.format(s.name.strip().replace(' ', ''), note)
"""
# Chateau Grand-v1.8.sf2
"""
pre, note, end = re.split('([ABCDEFG]#?[0123456789])', s.name)
vel_match = re.findall('([01234567])L', end)
if not vel_match:
continue
filename = 'Chateau_{}_V{}.wav'.format(note, vel_match[0])
"""
# Rhodes EPs Plus-JN1.5.sf2
"""
if not s.name.startswith('RHODES'):
continue
pre, note, end = re.split('([ABCDEFG]#?[0123456789])', s.name)
filename = '{}_{}_V{}.wav'.format(s.name.replace(' ', '-'), note, end.strip())
filename = 'RHODES_{}_V{}.wav'.format(note, end.strip())
"""
# Nice-Steinway-v3.8.sf2
"""
note, lvl = re.search('([ABCDEFG][#b]?)([0123456789]+)', s.name).groups()
note = ENHARMONICS.get(note, note)
filename = 'Piano.ff.{}_V{}.wav'.format(note, lvl)
"""
print '[{}]\t-> [{}]'.format(s.name, filename)
continue
# once we're ok with filenames, write a file
g = wave.open(filename, 'w')
g.setsampwidth(2)
g.setframerate(s.sampleRate)
F.seek(sample_data_start + 2*s.start)
frames = s.end-s.start+1
if s.type == 1:
g.setnchannels(1)
data = F.read(2*frames)
g.writeframesraw(data)
else:
g.setnchannels(2)
F2.seek(sample_data_start + 2 * samples[s.link].start)
for i in range(frames):
data = F.read(2)
g.writeframesraw(data)
data = F2.read(2)
g.writeframesraw(data)
g.close()
loop_length = s.endLoop - s.startLoop
if loop_length > 1:
write_loop(filename)
| 30.617284 | 86 | 0.510618 | 202 | 0.027151 | 0 | 0 | 0 | 0 | 0 | 0 | 2,125 | 0.285618 |
4cdfad127953dd829561e4a0404e4a6449e304d9 | 4,126 | py | Python | src/slack.py | planetrics/aws-iam-key-rotator | 890c28d80e062dfc569e6577bc48fac23dc0b1a0 | [
"MIT"
] | null | null | null | src/slack.py | planetrics/aws-iam-key-rotator | 890c28d80e062dfc569e6577bc48fac23dc0b1a0 | [
"MIT"
] | null | null | null | src/slack.py | planetrics/aws-iam-key-rotator | 890c28d80e062dfc569e6577bc48fac23dc0b1a0 | [
"MIT"
] | null | null | null | import os
import json
import logging
import requests
logger = logging.getLogger('slack')
logger.setLevel(logging.INFO)
def notify(url, account, userName, existingAccessKey, accessKey=None, secretKey=None, instruction=None, deleteAfterDays=None):
if accessKey is not None:
# New key pair generated
logger.info('Sending notification to {} about new access key generation via {}'.format(userName, url))
msg = {
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": ":mega: NEW KEY PAIR GENERATED FOR *{}* :mega:".format(userName)
}
},
{
"type": "section",
"fields": [
{
"type": "mrkdwn",
"text": "*Account ID:*\n{}".format(account['id'])
},
{
"type": "mrkdwn",
"text": "*Account Name:*\n{}".format(account['name'])
}
]
},
{
"type": "section",
"fields": [
{
"type": "mrkdwn",
"text": "*Access Key:*\n{}".format(accessKey)
},
{
"type": "mrkdwn",
"text": "*Secret Key:*\n{}".format(secretKey)
}
]
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*Instruction:* {}".format(instruction)
}
},
{
"type": "divider"
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*NOTE:* Existing key pair *{}* will be deleted after {} days so please update the new key pair wherever required".format(existingAccessKey, deleteAfterDays)
}
},
]
}
else:
# Old key pair is deleted
logger.info('Sending notification to {} about deletion of old access key via {}'.format(userName, url))
msg = {
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": ":mega: OLD KEY PAIR DELETED :mega:".format(userName)
}
},
{
"type": "section",
"fields": [
{
"type": "mrkdwn",
"text": "*Account ID:*\n{}".format(account['id'])
},
{
"type": "mrkdwn",
"text": "*Account Name:*\n{}".format(account['name'])
}
]
},
{
"type": "section",
"fields": [
{
"type": "mrkdwn",
"text": "*User:*\n{}".format(userName)
},
{
"type": "mrkdwn",
"text": "*Old Access Key:*\n{}".format(existingAccessKey)
}
]
}
]
}
resp = requests.post(url=url, json=msg)
if resp.status_code == 200:
logger.info('Notification sent to {} about key deletion via {}'.format(userName, url))
else:
logger.error('Notificaiton failed with {} status code. Reason: {}'.format(resp.status_code, resp.text))
| 36.513274 | 189 | 0.335676 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,132 | 0.274358 |
4ce0ef437d59c0cf917eb22170bf3eac536d5e80 | 1,509 | py | Python | src/core/structs.py | lasse-aagren/poezio | 38399f2cae9ffad42c4f2756802bbceaa60d0e92 | [
"Zlib"
] | null | null | null | src/core/structs.py | lasse-aagren/poezio | 38399f2cae9ffad42c4f2756802bbceaa60d0e92 | [
"Zlib"
] | null | null | null | src/core/structs.py | lasse-aagren/poezio | 38399f2cae9ffad42c4f2756802bbceaa60d0e92 | [
"Zlib"
] | null | null | null | """
Module defining structures useful to the core class and related methods
"""
import collections
# http://xmpp.org/extensions/xep-0045.html#errorstatus
ERROR_AND_STATUS_CODES = {
'401': 'A password is required',
'403': 'Permission denied',
'404': 'The room doesn’t exist',
'405': 'Your are not allowed to create a new room',
'406': 'A reserved nick must be used',
'407': 'You are not in the member list',
'409': 'This nickname is already in use or has been reserved',
'503': 'The maximum number of users has been reached',
}
# http://xmpp.org/extensions/xep-0086.html
DEPRECATED_ERRORS = {
'302': 'Redirect',
'400': 'Bad request',
'401': 'Not authorized',
'402': 'Payment required',
'403': 'Forbidden',
'404': 'Not found',
'405': 'Not allowed',
'406': 'Not acceptable',
'407': 'Registration required',
'408': 'Request timeout',
'409': 'Conflict',
'500': 'Internal server error',
'501': 'Feature not implemented',
'502': 'Remote server error',
'503': 'Service unavailable',
'504': 'Remote server timeout',
'510': 'Disconnected',
}
possible_show = {'available':None,
'chat':'chat',
'away':'away',
'afk':'away',
'dnd':'dnd',
'busy':'dnd',
'xa':'xa'
}
Status = collections.namedtuple('Status', 'show message')
Command = collections.namedtuple('Command', 'func desc comp short usage')
| 30.18 | 73 | 0.58383 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 993 | 0.657181 |
4ce10c58d708a32523b675746af1ff74ba6e03e0 | 895 | py | Python | easy-todo-backend/module/user/handler.py | hubenchang0515/EasyTodo | b3cde21090f76401c0649a760b152ebbdd1d4fbe | [
"MIT"
] | null | null | null | easy-todo-backend/module/user/handler.py | hubenchang0515/EasyTodo | b3cde21090f76401c0649a760b152ebbdd1d4fbe | [
"MIT"
] | null | null | null | easy-todo-backend/module/user/handler.py | hubenchang0515/EasyTodo | b3cde21090f76401c0649a760b152ebbdd1d4fbe | [
"MIT"
] | null | null | null | from flask import Flask, request, jsonify
from ..common import app, db, getJson
from .model import User
from .method import *
@app.route("/api/user/register", methods=["POST"])
def register():
json = getJson()
username = json['username']
password = json['password']
userId = addUser(username, password)
if userId != 0:
return jsonify({"status": "ok", "username": username, "user id": userId})
else:
return jsonify({"status": "error", "username": username, "message": username + " is exist"})
@app.route("/api/user/login", methods=["GET", "POST"])
def login():
json = getJson()
username = json['username']
password = json['password']
if checkPassword(username, password):
return jsonify({"status": "ok", "username": username})
else:
return jsonify({"status": "error", "username": username, "message": "Auth failed"})
| 33.148148 | 100 | 0.634637 | 0 | 0 | 0 | 0 | 764 | 0.853631 | 0 | 0 | 239 | 0.267039 |
4ce1e5e3825fda71c1d79c7d91358a7fb5966bfa | 4,326 | py | Python | contrib/bin_wrapper.py | brikkho-net/windmill | 994bd992b17f3f2d6f6b276fe17391fea08f32c3 | [
"Apache-2.0"
] | 61 | 2015-03-16T18:36:06.000Z | 2021-12-02T10:08:17.000Z | contrib/bin_wrapper.py | admc/windmill | 4304ee7258eb0c2814f215d8ce90abf02b1f737f | [
"Apache-2.0"
] | 8 | 2015-03-10T10:01:26.000Z | 2020-05-18T10:51:24.000Z | contrib/bin_wrapper.py | admc/windmill | 4304ee7258eb0c2814f215d8ce90abf02b1f737f | [
"Apache-2.0"
] | 14 | 2015-01-29T16:28:33.000Z | 2021-09-04T11:19:48.000Z | # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Mozilla Corporation Code.
#
# The Initial Developer of the Original Code is
# Mikeal Rogers.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mikeal Rogers <mikeal.rogers@gmail.com>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import sys, os
if sys.platform != 'win32':
import pwd
import commands
import logging
import signal
import exceptions
from StringIO import StringIO
from time import sleep
import subprocess
from datetime import datetime
from datetime import timedelta
if sys.platform != 'cygwin':
from windmill.dep import mozrunner
killableprocess = mozrunner.killableprocess
else:
import subprocess as killableprocess
logger = logging.getLogger(__name__)
stdout_wrap = StringIO()
def run_command(cmd, env=None):
"""Run the given command in killable process."""
kwargs = {'stdout':-1 ,'stderr':sys.stderr, 'stdin':sys.stdin}
if sys.platform != "win32":
return killableprocess.Popen(cmd, preexec_fn=lambda : os.setpgid(0, 0), env=env, **kwargs)
else:
return killableprocess.Popen(cmd, **kwargs)
def get_pids(name, minimun_pid=0):
"""Get all the pids matching name, exclude any pids below minimum_pid."""
if sys.platform == 'win32':
import win32api, win32pdhutil, win32con
#win32pdhutil.ShowAllProcesses() #uncomment for testing
pids = win32pdhutil.FindPerformanceAttributesByName(name)
else:
get_pids_cmd = ['ps', 'ax']
h = killableprocess.runCommand(get_pids_cmd, stdout=subprocess.PIPE, universal_newlines=True)
h.wait()
data = h.stdout.readlines()
pids = [int(line.split()[0]) for line in data if line.find(name) is not -1]
matching_pids = [m for m in pids if m > minimun_pid and m != os.getpid()]
return matching_pids
def kill_process_by_name(name):
"""Find and kill all processes containing a certain name"""
pids = get_pids(name)
if sys.platform == 'win32':
for p in pids:
handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0, p) #get process handle
win32api.TerminateProcess(handle,0) #kill by handle
win32api.CloseHandle(handle) #close api
else:
for pid in pids:
os.kill(pid, signal.SIGTERM)
sleep(.5)
if len(get_pids(name)) is not 0:
try:
os.kill(pid, signal.SIGKILL)
except OSError: pass
sleep(.5)
if len(get_pids(name)) is not 0:
logger.error('Could not kill process')
def main():
"""Command Line main function."""
args = list(sys.argv)
args.pop(0)
name = args[0]
kill_process_by_name(name)
print "Starting "+str(args)
sys.exit(subprocess.call(args))
if __name__ == "__main__":
main() | 34.608 | 101 | 0.676144 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,079 | 0.480583 |
4ce2cf2e72ae8243f6c91430c43ebdb5bcdbda2f | 634 | py | Python | tsheets/models/user_permissions_set.py | eturpin/api_python | fac15d06ef2510972ed3c812bb16a675d4e30e3c | [
"MIT"
] | 6 | 2018-12-16T19:53:57.000Z | 2020-11-22T12:36:57.000Z | tsheets/models/user_permissions_set.py | eturpin/api_python | fac15d06ef2510972ed3c812bb16a675d4e30e3c | [
"MIT"
] | 6 | 2019-02-01T13:51:59.000Z | 2020-11-23T22:42:57.000Z | tsheets/models/user_permissions_set.py | eturpin/api_python | fac15d06ef2510972ed3c812bb16a675d4e30e3c | [
"MIT"
] | 8 | 2018-12-16T19:53:48.000Z | 2021-11-24T17:08:04.000Z | from tsheets.model import Model
from datetime import date, datetime
class UserPermissionsSet(Model):
pass
UserPermissionsSet.add_field("admin", bool)
UserPermissionsSet.add_field("mobile", bool)
UserPermissionsSet.add_field("status_box", bool)
UserPermissionsSet.add_field("reports", bool)
UserPermissionsSet.add_field("manage_timesheets", bool)
UserPermissionsSet.add_field("manage_authorization", bool)
UserPermissionsSet.add_field("manage_users", bool)
UserPermissionsSet.add_field("manage_my_timesheets", bool)
UserPermissionsSet.add_field("manage_jobcodes", bool)
UserPermissionsSet.add_field("approve_timesheets", bool)
| 33.368421 | 58 | 0.834385 | 41 | 0.064669 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.236593 |
4ce456f58b59cb287e63e3fc893ff6046bbcd1b1 | 474 | py | Python | backpack/extensions/secondorder/diag_ggn/conv1d.py | jabader97/backpack | 089daafa0d611e13901fd7ecf8a0d708ce7a5928 | [
"MIT"
] | 395 | 2019-10-04T09:37:52.000Z | 2022-03-29T18:00:56.000Z | backpack/extensions/secondorder/diag_ggn/conv1d.py | jabader97/backpack | 089daafa0d611e13901fd7ecf8a0d708ce7a5928 | [
"MIT"
] | 78 | 2019-10-11T18:56:43.000Z | 2022-03-23T01:49:54.000Z | backpack/extensions/secondorder/diag_ggn/conv1d.py | jabader97/backpack | 089daafa0d611e13901fd7ecf8a0d708ce7a5928 | [
"MIT"
] | 50 | 2019-10-03T16:31:10.000Z | 2022-03-15T19:36:14.000Z | from backpack.core.derivatives.conv1d import Conv1DDerivatives
from backpack.extensions.secondorder.diag_ggn.convnd import (
BatchDiagGGNConvND,
DiagGGNConvND,
)
class DiagGGNConv1d(DiagGGNConvND):
def __init__(self):
super().__init__(derivatives=Conv1DDerivatives(), params=["bias", "weight"])
class BatchDiagGGNConv1d(BatchDiagGGNConvND):
def __init__(self):
super().__init__(derivatives=Conv1DDerivatives(), params=["bias", "weight"])
| 29.625 | 84 | 0.751055 | 298 | 0.628692 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.059072 |
4ce462d62058170799793cdc170a8f43baf76ca6 | 1,088 | py | Python | api/scripts/test/test_generate_promoter_terminator.py | IsaacLuo/webexe | aec0582b8669f7e941b8a14df1a9154993470f05 | [
"MIT"
] | null | null | null | api/scripts/test/test_generate_promoter_terminator.py | IsaacLuo/webexe | aec0582b8669f7e941b8a14df1a9154993470f05 | [
"MIT"
] | 6 | 2021-03-02T00:34:35.000Z | 2022-03-24T14:26:50.000Z | api/scripts/test/test_generate_promoter_terminator.py | IsaacLuo/webexe | aec0582b8669f7e941b8a14df1a9154993470f05 | [
"MIT"
] | null | null | null | import subprocess
import pytest
import os
import json
def test_call_generate_promoter_terminator():
print('')
process_result = subprocess.run(['python', 'generate_promoter_terminator.py', './test/1.gff.json', '500', '200'], \
capture_output=True)
assert process_result.returncode == 0
result_line = process_result.stdout.decode().splitlines()[-1]
result_obj = json.loads(result_line)
assert result_obj['type'] == 'result'
file_url = result_obj['data']['files'][0]['url']
assert file_url
with open(os.path.join('test', '1.gff.json')) as fp:
src_gff = json.load(fp)
with open(os.path.join('results', file_url)) as fp:
dst_gff = json.load(fp)
assert len(dst_gff['records']) > len(src_gff['records'])
#all sequence must have hash
for record in dst_gff['records']:
assert 'sequenceHash' in record
assert record['sequenceHash'] == tools.get_sequence_hash(dst_gff, record['chrName'], record['start'], record['end'], record['strand'])
os.remove(os.path.join('results', file_url))
| 32.969697 | 142 | 0.662684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.231618 |
4ce5ee6ba6e39b98743bd5a01fdada0a46b8373a | 89 | py | Python | config/airflow_config_property.py | ranjeettyadav/ranjeettyadav | 13325501bc8abc27f352d2b6e3cbe9b8dd321232 | [
"MIT"
] | null | null | null | config/airflow_config_property.py | ranjeettyadav/ranjeettyadav | 13325501bc8abc27f352d2b6e3cbe9b8dd321232 | [
"MIT"
] | null | null | null | config/airflow_config_property.py | ranjeettyadav/ranjeettyadav | 13325501bc8abc27f352d2b6e3cbe9b8dd321232 | [
"MIT"
] | null | null | null | project_dm = 'dmgcp-ingestion-poc'
location = 'US'
bq_connection_id= 'my_gcp_connection'
| 22.25 | 37 | 0.786517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.494382 |
4ce6266278571310e4c8afa8c6e017f931de9d26 | 1,478 | py | Python | tests/model/template/test_read_include.py | scailfin/flowserv-core | 69376f84bae71b5699688bd213c34a6bf8806319 | [
"MIT"
] | 1 | 2020-02-13T18:57:53.000Z | 2020-02-13T18:57:53.000Z | tests/model/template/test_read_include.py | scailfin/flowserv-core | 69376f84bae71b5699688bd213c34a6bf8806319 | [
"MIT"
] | 46 | 2020-02-14T22:14:33.000Z | 2021-06-10T21:17:49.000Z | tests/model/template/test_read_include.py | scailfin/rob-core | 791383085181747cf41c30f6cd13f6762e438d8a | [
"MIT"
] | 3 | 2021-05-06T15:22:29.000Z | 2021-06-01T16:19:36.000Z | # This file is part of the Reproducible and Reusable Data Analysis Workflow
# Server (flowServ).
#
# Copyright (C) 2019-2021 NYU.
#
# flowServ is free software; you can redistribute it and/or modify it under the
# terms of the MIT License; see LICENSE file for more details.
"""Unit tests for reading a workflow templates that contains references to
additional files using !include (pyyaml-include) and {{}} (in instructions
markdown).
"""
import os
from flowserv.model.workflow.manifest import read_instructions
import flowserv.util as util
DIR = os.path.dirname(os.path.realpath(__file__))
BENCHMARK_DIR = os.path.join(DIR, '../../.files/benchmark/include-test')
INSTRUCTIONS_FILE = os.path.join(BENCHMARK_DIR, 'instructions.md')
TEMPLATE_FILE = os.path.join(BENCHMARK_DIR, 'template.yaml')
def test_read_instructions_with_include():
"""Test reading a template that includes other files."""
text = read_instructions(filename=INSTRUCTIONS_FILE)
assert '# These are the main instructions.' in text
assert '* Include me, and' in text
assert '* Include me, too.' in text
def test_read_template_with_include():
"""Test reading a template that includes other files."""
doc = util.read_object(filename=TEMPLATE_FILE)
assert doc['parameters'] == [
{'name': 'names', 'label': 'Input file', 'dtype': 'file', 'target': 'data/names.txt'},
{'name': 'sleeptime', 'label': 'Sleep time (s)', 'dtype': 'int', 'defaultValue': 10}
]
| 35.190476 | 94 | 0.717185 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 835 | 0.564953 |
4ce69af3f9f9960cf7540e83c34640cd731c18f4 | 4,058 | py | Python | vf.py | polydojo/vf | 457b6f26c06f44c9adcab2ea04c48eafb677c323 | [
"MIT"
] | null | null | null | vf.py | polydojo/vf | 457b6f26c06f44c9adcab2ea04c48eafb677c323 | [
"MIT"
] | null | null | null | vf.py | polydojo/vf | 457b6f26c06f44c9adcab2ea04c48eafb677c323 | [
"MIT"
] | null | null | null | """
VF: Validation Functions (for Python dicts.)
Copyright (c) 2020 Polydojo, Inc.
SOFTWARE LICENSING
------------------
The software is released "AS IS" under the MIT License,
WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. Kindly
see LICENSE.txt for more details.
NO TRADEMARK RIGHTS
-------------------
The above software licensing terms DO NOT grant any right in the
trademarks, service marks, brand names or logos of Polydojo, Inc.
""";
import functools;
import re;
__version__ = "0.0.2"; # Req'd by flit.
############################################################
# SIMPLE: ##################################################
############################################################
identity = lambda x: x;
truthy = lambda x: bool(x);
falsy = lambda x: not x;
noneIs = lambda x: x is None;
############################################################
# CHECKER MAKERS: ##########################################
############################################################
def typeIs (typ): # <-- TODO: Intro truthy option.
"Makes `func (x)` for checking `type(x) is typ`.";
return lambda x: type(x) is typ;
def instanceOf (*typs):
"Makes `func (x)` for checking `isinstance(x, typs)`.";
return lambda x: isinstance(x, typs);
def typeIn (*typs):
"Makes `func (x)` for checking `type(x) in typs`.";
return lambda x: type(x) in typs;
def patternIs (pattern):
"Makes `func (s)` for checking `s` against `pattern`.";
if type(pattern) is str:
return lambda s: bool(re.match(pattern, s));
if type(pattern) is re.Pattern:
return lambda s: bool(pattern.match(s));
raise ValueError("Expected `pattern` to be of type "
"`str` or `re.Pattern`, not: %r" % (pattern,)
);
def allOf (*fns):
"Makes `func (x)` for checking `all(fn(x) for fn in fns)`.";
return lambda x: all(map(lambda fn: fn(x), fns));
def anyOf (*fns):
"Makes `func (x)` for checking `any(fn(x) for fn in fns)`.";
return lambda x: any(map(lambda fn: fn(x), fns));
def listOf (fn, minLen=0):
"Makes `func (li)` for checking `all(fn(x) for x in li)`.";
return lambda li: (
isinstance(li, list) and
len(li) >= minLen and
all(map(fn, li)) #and
);
############################################################
# DICT VALIDATION: #########################################
############################################################
class BadSchemaError (ValueError): pass;
class ValidationError (ValueError): pass;
def _validateSchemaItself (schema):
"Ensures that `schema` itself is valid.";
if not isinstance(schema, dict):
raise BadSchemaError("Not an instance of `dict`.");
for key, rhsFn in schema.items():
if not callable(rhsFn):
raise BadSchemaError(
"Non-callable value against key: %r" % (key,),
);
return True;
def dictOf (schema, extraKeysOk=False):
"Makes `func (d)` for VALIDATING `d` against `schema`.";
assert _validateSchemaItself(schema);
def validateFn (d):
if not isinstance(d, dict):
raise ValidationError(
"Expected dict-like object, not: %r" % (d,),
);
dKeySet = set(d.keys());
sKeySet = set(schema.keys());
if not dKeySet.issuperset(sKeySet):
raise ValidationError("Dict-like object is missing " +
"required keys: {}".format(sKeySet - dKeySet) #+
);
if (not extraKeysOk) and (dKeySet != sKeySet):
raise ValidationError("Dict-like object has " +
"excess keys: {}".format(dKeySet - sKeySet) #+
);
for key, rhsFn in schema.items():
assert callable(rhsFn);
if not rhsFn(d[key]):
raise ValidationError(
("Against key: %r\n" % (key,)) +
("Unexpected value: %r" % (d[key],)) #+
);
return True;
return validateFn;
# End ######################################################
| 33.53719 | 66 | 0.506407 | 81 | 0.019961 | 0 | 0 | 0 | 0 | 0 | 0 | 1,885 | 0.464515 |
4ce7dce48f6457b8561f7776f1bda9d9903b5e23 | 792 | py | Python | application/account/forms.py | fgronqvist/votingbooth | 45d8505e2380f3f49938b8a7b8343e6a067a86d8 | [
"Unlicense"
] | null | null | null | application/account/forms.py | fgronqvist/votingbooth | 45d8505e2380f3f49938b8a7b8343e6a067a86d8 | [
"Unlicense"
] | 2 | 2018-10-03T15:59:05.000Z | 2018-10-21T19:08:07.000Z | application/account/forms.py | fgronqvist/votingbooth | 45d8505e2380f3f49938b8a7b8343e6a067a86d8 | [
"Unlicense"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, validators
from wtforms.fields.html5 import EmailField
class RegisterForm(FlaskForm):
firstname = StringField(u"Firstname", [validators.Length(min=2, max=256)])
lastname = StringField(u"Lastname", [validators.length(min=2, max=256)])
email = EmailField(u"Email", [validators.InputRequired(), validators.Email()])
password = PasswordField(u"Password", [validators.InputRequired(), validators.EqualTo('passwordb')])
passwordb = PasswordField(u"Confirm password")
class Meta:
csrf = False
class LoginForm(FlaskForm):
email = StringField(u"Email", [validators.InputRequired(), validators.Email()])
password = PasswordField(u"Password", [validators.InputRequired()]) | 46.588235 | 104 | 0.736111 | 654 | 0.825758 | 0 | 0 | 0 | 0 | 0 | 0 | 91 | 0.114899 |
4ce94970f8650ca31e2796829e3d3df224319f92 | 823 | py | Python | setup.py | Bouke/mvvm | f05d4bfd2817c372f335705977bff6afd6c979be | [
"MIT"
] | 3 | 2018-06-29T07:12:54.000Z | 2021-05-30T10:24:14.000Z | setup.py | Bouke/mvvm | f05d4bfd2817c372f335705977bff6afd6c979be | [
"MIT"
] | null | null | null | setup.py | Bouke/mvvm | f05d4bfd2817c372f335705977bff6afd6c979be | [
"MIT"
] | 3 | 2017-03-01T01:23:06.000Z | 2017-10-27T02:33:48.000Z | from setuptools import setup, find_packages
version = '0.0.1'
setup(name='mvvm',
version=version,
description='Model-View-ViewModel framework for Python, based on Wx',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: User Interfaces',
'Topic :: Utilities',
],
author='Bouke Haarsma',
author_email='bouke@webatoom.nl',
url='http://github.com/Bouke/mvvm',
license='MIT',
packages=find_packages(),
zip_safe=False,
)
| 32.92 | 81 | 0.609964 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 439 | 0.533414 |
4ce975fb2a7c077d40e8b8fc8ab1568e48fcfdd5 | 603 | py | Python | mundo_2/ex037.py | tseiiti/curso_em_video | 59565ce809c1f025fb41ab69de3b8c5b53c8f7b2 | [
"MIT"
] | null | null | null | mundo_2/ex037.py | tseiiti/curso_em_video | 59565ce809c1f025fb41ab69de3b8c5b53c8f7b2 | [
"MIT"
] | null | null | null | mundo_2/ex037.py | tseiiti/curso_em_video | 59565ce809c1f025fb41ab69de3b8c5b53c8f7b2 | [
"MIT"
] | null | null | null | from os import system, name
system('cls' if name == 'nt' else 'clear')
dsc = ('''DESAFIO 037:
Escreva um programa que leia um número inteiro qualquer e peça
para o usuário escolher qual será a base de conversão:
- 1 para binário
- 2 para octal
- 3 para hexadecimal
''')
n = int(input('Digite um número: '))
print('1: binário')
print('2: octal')
print('3: hexadecimal')
c = int(input('Qual base para conversão: '))
if c == 1:
print('em binário: {}'.format(bin(n).upper()))
elif c == 2:
print('em octal: {}'.format(oct(n).upper()))
elif c == 3:
print('em hexadecimal: {}'.format(hex(n).upper()))
| 25.125 | 62 | 0.6534 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 352 | 0.574225 |
4cebc8d8c5709c45d465740aef28dc7747b5c871 | 4,234 | py | Python | tables.py | sadaszewski/scimd | 3f8cad382e4891cd710c8e4e9c48aa4d56130040 | [
"BSD-2-Clause"
] | null | null | null | tables.py | sadaszewski/scimd | 3f8cad382e4891cd710c8e4e9c48aa4d56130040 | [
"BSD-2-Clause"
] | null | null | null | tables.py | sadaszewski/scimd | 3f8cad382e4891cd710c8e4e9c48aa4d56130040 | [
"BSD-2-Clause"
] | null | null | null | #
# Copyright (C) 2015, Stanislaw Adaszewski
# s.adaszewski@gmail.com
# http://algoholic.eu
#
# License: 2-clause BSD
#
from markdown import Extension
from markdown.blockprocessors import BlockProcessor
from markdown.util import etree
import numpy as np
from collections import defaultdict
import numpy.core.defchararray as dca
class TableExtension(Extension):
def extendMarkdown(self, md, md_globals):
md.parser.blockprocessors.add('table',
TableProcessor(md.parser),
'<hashheader')
def makeExtension(configs={}):
return TableExtension(configs=configs)
class TableProcessor(BlockProcessor):
def test(self, parent, block):
lines = block.split('\n')
for l in lines:
if set(l.strip()) == set(('-', '|')):
return True
return False
def run(self, parent, blocks):
block = blocks.pop(0)
lines = map(lambda x: list(x.strip()), block.split('\n'))
# print 'lines:', lines
ary = np.array(lines, dtype='|U1')
cstart = np.zeros(ary.shape, dtype=np.int)
cend = np.zeros(ary.shape, dtype=np.int)
for r in xrange(ary.shape[0]):
for c in xrange(ary.shape[1]):
if ary[r, c] == '|':
if c + 1 < ary.shape[1] and (r == 0 or ary[r - 1, c + 1] == '-'):
cstart[r, c] = True
if c > 0 and (r + 1 == ary.shape[0] or ary[r + 1, c - 1] == '-'):
cend[r, c] = True
cstart = zip(*np.nonzero(cstart))
cend = zip(*np.nonzero(cend))
# print 'cstart:', cstart
# print 'cend:', cend
rpos = np.nonzero(np.max(ary == '-', axis=1))
cpos = np.nonzero(np.max(ary == '|', axis=0))
# print rpos
# print cpos
assert(len(cstart) == len(cend))
cells = []
for k in xrange(len(cstart)):
r, c = cstart[k][0], cstart[k][1] + 1
while r < ary.shape[0] and c < ary.shape[1]:
# print r, c
if ary[r, c] == '|':
if (r, c) in cend:
rowspan = len(np.nonzero((rpos >= cstart[k][0]) * (rpos <= r))[0]) + 1
colspan = len(np.nonzero((cpos >= cstart[k][1]) * (cpos <= c))[0]) - 1
# print 'Cell', k, cstart[k], (r, c), 'rowspan:', rowspan, 'colspan:', colspan
# print ' %s' % ary[cstart[k][0]:r+1, cstart[k][1]:c-1].tostring()
cells.append((cstart[k], (r, c), rowspan, colspan))
break
else:
r += 1
c = cstart[k][1]
c += 1
# print cells
table = etree.SubElement(parent, 'table')
# table.set('style', 'border: solid 1px black;')
table.set('border', '1')
rows = defaultdict(lambda: [])
for k in xrange(len(cells)):
cell = cells[k]
r = len(np.nonzero(rpos < cells[k][0][0])[0])
c = len(np.nonzero(cpos < cells[k][0][1])[0])
# print 'Cell', k, 'r:', r, 'c:', c, 'rowspan:', cells[k][2], 'colspan:', cells[k][3]
text = ary[cells[k][0][0]:cells[k][1][0]+1, cells[k][0][1]+1:cells[k][1][1]]
text = map(lambda x: u''.join(x).strip(), text)
# text = list(np.ravel(text))
# text = np
text = u'\n'.join(text) # map(lambda x: x.tostring().strip(), text))
# print ' %s' % text
rows[r].append((text, cells[k][2], cells[k][3]))
for r in xrange(len(rows)):
# print 'Row', r
tr = etree.SubElement(table, 'tr')
for c in xrange(len(rows[r])):
td = etree.SubElement(tr, 'td')
try:
td.text = rows[r][c][0] # .encode('utf-8')
except:
print str(type(block))
raise ValueError(str(rows[r][c][0]) + ' ' + str(type(rows[r][c][0])))
td.set('rowspan', str(rows[r][c][1]))
td.set('colspan', str(rows[r][c][2]))
# return table
| 39.570093 | 102 | 0.467879 | 3,822 | 0.902692 | 0 | 0 | 0 | 0 | 0 | 0 | 778 | 0.183751 |
4cec6669f8861c5d6808c43e630416fb7bc66a24 | 1,538 | py | Python | agronet_be/AgronetApp/views/orderDetailView.py | lauraC4MP0/Prueba-github | 291fc266fc0a8efc80ab36dd6eb4bff3e98e7c1f | [
"MIT"
] | 1 | 2021-10-06T00:39:08.000Z | 2021-10-06T00:39:08.000Z | agronet_be/AgronetApp/views/orderDetailView.py | lauraC4MP0/Prueba-github | 291fc266fc0a8efc80ab36dd6eb4bff3e98e7c1f | [
"MIT"
] | null | null | null | agronet_be/AgronetApp/views/orderDetailView.py | lauraC4MP0/Prueba-github | 291fc266fc0a8efc80ab36dd6eb4bff3e98e7c1f | [
"MIT"
] | 1 | 2021-10-03T13:39:31.000Z | 2021-10-03T13:39:31.000Z | from django.conf import settings
from django.db.models.query import QuerySet
from rest_framework import views
from rest_framework.response import Response
from AgronetApp.serializers import orderDetailSerializer
from AgronetApp.serializers.orderDetailSerializer import OrderDetailSerializer
from AgronetApp.models.orderDetail import OrderDetail
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework import generics
class OrderDetailView(generics.ListCreateAPIView):
queryset = OrderDetail.objects.all()
serializer_class = OrderDetailSerializer
class OrderDetailDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = OrderDetail.objects.all()
serializer_class = OrderDetailSerializer
#class OrderDetailView(views.APIView):
# permission_classes = (AllowAny,)
# def get(self, request):
# Detalle_orden = OrderDetail.objects.all()
# serializer = orderDetailSerializer.OrderDetailSerializer(Detalle_orden, many=True)
# return Response(serializer.data,status=status.HTTP_200_OK)
#def post(self, request):
# Detalle_orden = request.data.get('Detalle_orden')
# serializer = orderDetailSerializer.OrderDetailSerializer(data=Detalle_orden)
# if serializer.is_valid(raise_exception=True):
# Detail_saved = serializer.save()
#return Response(serializer.data,{"success": "Orden Detalle '{}' creada correctamente".format(Detail_saved)})
| 42.722222 | 117 | 0.751625 | 323 | 0.210013 | 0 | 0 | 0 | 0 | 0 | 0 | 672 | 0.436931 |
4ced33f2e305fc01ed18bf724293146d776b1f32 | 1,012 | py | Python | predict.py | kadn/carla-imitation | 874030f4f4d726f80e739721fb704489672da9b0 | [
"MIT"
] | null | null | null | predict.py | kadn/carla-imitation | 874030f4f4d726f80e739721fb704489672da9b0 | [
"MIT"
] | null | null | null | predict.py | kadn/carla-imitation | 874030f4f4d726f80e739721fb704489672da9b0 | [
"MIT"
] | null | null | null | import tensorflow as tf
import numpy as np
from network import make_network
from data_provider import DataProvider
from tensorflow.core.protobuf import saver_pb2
import time
import os
from IPython import embed
with tf.Session(config=tf.ConfigProto(log_device_placement=True)) as sess:
network = make_network()
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver(write_version=saver_pb2.SaverDef.V2)
saver.restore(sess, './data/step-10500.ckpt')
val_provider = DataProvider('val.tfrecords', sess)
one_batch = val_provider.get_minibatch()
for i in range(120):
one_image = one_batch.images[i,...][None]
one_speed = one_batch.data[0][i][None]
a = time.time()
target_control, = sess.run(network['outputs'],
feed_dict={network['inputs'][0]: one_image,
network['inputs'][1]: one_speed})
b = time.time()
print("Inference consumes %.5f seconds" % (b-a))
print(target_control[0])
| 29.764706 | 74 | 0.677866 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 97 | 0.09585 |
4cee63421a6a0026a74361c99866ca8a1654719f | 494 | py | Python | App0/migrations/0019_auto_20210118_0317.py | LTSana/lost-empire | 495397345f1226b025434e37c5e1703273f475a8 | [
"CC0-1.0"
] | null | null | null | App0/migrations/0019_auto_20210118_0317.py | LTSana/lost-empire | 495397345f1226b025434e37c5e1703273f475a8 | [
"CC0-1.0"
] | null | null | null | App0/migrations/0019_auto_20210118_0317.py | LTSana/lost-empire | 495397345f1226b025434e37c5e1703273f475a8 | [
"CC0-1.0"
] | null | null | null | # Generated by Django 3.1.5 on 2021-01-18 01:17
from django.db import migrations, models
import gdstorage.storage
class Migration(migrations.Migration):
dependencies = [
('App0', '0018_auto_20210117_1820'),
]
operations = [
migrations.AlterField(
model_name='products',
name='image_1',
field=models.ImageField(blank=True, null=True, storage=gdstorage.storage.GoogleDriveStorage(), upload_to='lost-empire/'),
),
]
| 24.7 | 133 | 0.645749 | 376 | 0.761134 | 0 | 0 | 0 | 0 | 0 | 0 | 111 | 0.224696 |
4ceea5f4cec9bb94754cc5da49b08837bb9ff83a | 119 | py | Python | baseline/exp3_2d3ds/models/__init__.py | COATZ/ugscnn | 23feb8465863aa473048ca40ede651356d977ac3 | [
"MIT"
] | null | null | null | baseline/exp3_2d3ds/models/__init__.py | COATZ/ugscnn | 23feb8465863aa473048ca40ede651356d977ac3 | [
"MIT"
] | null | null | null | baseline/exp3_2d3ds/models/__init__.py | COATZ/ugscnn | 23feb8465863aa473048ca40ede651356d977ac3 | [
"MIT"
] | null | null | null | from .duc_hdc import *
from .fcn8s import *
from .fcn8s_sphe import *
from .u_net import *
from .u_net_sphe import * | 23.8 | 26 | 0.731092 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4cef81cfd3c231e8447459975ee732e4f8082b19 | 4,422 | py | Python | tests/test_dm2-xnat-upload.py | gabiherman/datman | dcbca4981ff7bb1be536d6c62c3b27786cabdef9 | [
"Apache-2.0"
] | null | null | null | tests/test_dm2-xnat-upload.py | gabiherman/datman | dcbca4981ff7bb1be536d6c62c3b27786cabdef9 | [
"Apache-2.0"
] | null | null | null | tests/test_dm2-xnat-upload.py | gabiherman/datman | dcbca4981ff7bb1be536d6c62c3b27786cabdef9 | [
"Apache-2.0"
] | null | null | null | import os
import unittest
import importlib
import logging
import zipfile
from nose.tools import raises
from mock import patch, MagicMock
import datman
import datman.xnat
import datman.scanid
# Disable all logging for the duration of testing
logging.disable(logging.CRITICAL)
upload = importlib.import_module('bin.dm_xnat_upload')
FIXTURE = "tests/fixture_xnat_upload/"
class CheckFilesExist(unittest.TestCase):
ident = datman.scanid.parse("STUDY_SITE_9999_01_01")
archive = "some_dir/STUDY_SITE_9999_01_01.zip"
session = FIXTURE + "xnat_session.txt"
session_no_resources = FIXTURE + "xnat_session_missing_resources.txt"
session_missing_data = FIXTURE + "xnat_session_missing_scan_data.txt"
archive_scan_uids = [
'1.2.840.113619.2.336.4120.8413787.19465.1412083372.445',
'1.2.840.113619.2.336.4120.8413787.19465.1412083372.444',
'1.2.840.113619.2.336.4120.8413787.19465.1412083372.447',
'1.2.840.113619.2.336.4120.8413787.19465.1412083372.446',
'1.2.840.113619.2.336.4120.8413787.19465.1412083372.440',
'1.2.840.113619.2.80.142631515.25030.1412106144.3.0.2',
'1.2.840.113619.2.336.4120.8413787.19465.1412083372.443',
'1.2.840.113619.2.336.4120.8413787.19465.1412083372.442',
'1.2.840.113619.2.5.18242516414121059301412105930313000',
'1.2.840.113619.2.80.142631515.25030.1412106138.1.0.2']
archive_experiment_id = '1.2.840.113619.6.336.' \
'254801968553430904107911738210738061468'
@raises(Exception)
@patch('bin.dm_xnat_upload.missing_resource_data')
@patch('datman.utils.get_archive_headers')
def test_raises_exception_if_scan_uids_mismatch(self, mock_headers,
mock_missing_resources):
# Set up
mock_headers.return_value = self.__generate_mock_headers(bad_id=True)
mock_missing_resources.return_value = False
xnat_session = self.__get_xnat_session(self.session)
# Run
files_exist = upload.check_files_exist(self.archive, xnat_session,
self.ident)
# Should raise an exception, so assertion is never reached
assert False
##### To do:
# Test that false is returned when a resource is missing, or when a scan is
# missing
def __generate_mock_headers(self, bad_id=False):
headers = {}
for num, item in enumerate(self.archive_scan_uids):
scan = MagicMock()
scan.SeriesInstanceUID = item
scan.StudyInstanceUID = self.archive_experiment_id
headers[num] = scan
if bad_id:
bad_scan = headers[0]
bad_scan.StudyInstanceUID = '1.1.111.111111.1.111.111111111111111'
headers[0] = bad_scan
return headers
def __get_xnat_session(self, text_file):
with open(text_file, 'r') as session_data:
xnat_session = eval(session_data.read())
return xnat_session
class GetResources(unittest.TestCase):
name_list = ['some_zipfile_name/',
'some_zipfile_name/dicom_file1.dcm',
'some_zipfile_name/dicom_file2.dcm',
'some_zipfile_name/bvals.txt',
'some_zipfile_name/gradOrs.txt',
'some_zipfile_name/dicom_file3.dcm',
'some_zipfile_name/Name_info.txt',
'some_zipfile_name/subjectid_EmpAcc.log']
@patch('bin.dm_xnat_upload.is_dicom')
@patch('io.BytesIO')
def test_returns_all_resources(self, mock_IO, mock_isdicom):
# Set up inputs
archive_zip = MagicMock(spec=zipfile.ZipFile)
archive_zip.return_value.namelist.return_value = self.name_list
expected_resources = ['some_zipfile_name/bvals.txt',
'some_zipfile_name/gradOrs.txt',
'some_zipfile_name/Name_info.txt',
'some_zipfile_name/subjectid_EmpAcc.log']
# Stop get_resources from verifying 'dicoms' in the mock zipfile
archive_zip.return_value.read.side_effect = lambda x: x
mock_IO.side_effect = lambda x: x
mock_isdicom.side_effect = lambda x: True if '.dcm' in x else False
actual_resources = upload.get_resources(archive_zip.return_value)
assert sorted(actual_resources) == sorted(expected_resources)
| 40.568807 | 79 | 0.661918 | 4,044 | 0.914518 | 0 | 0 | 1,619 | 0.366124 | 0 | 0 | 1,667 | 0.376979 |
4cf0924837acecb1d2d66dfcb3f08a377e503f09 | 7,513 | py | Python | digideep/environment/common/vec_env/subproc_vec_env.py | sharif1093/digideep | e42f10a58cec6cab70ac2be5ce3af6102caefd81 | [
"BSD-2-Clause"
] | 11 | 2019-03-09T23:54:02.000Z | 2020-09-05T20:47:55.000Z | log_sessions/cartpole_8_5_05_1/modules/digideep/environment/common/vec_env/subproc_vec_env.py | godnpeter/DMC_Clustering_PICA | 1b3e14dd4034f3941af1caa06c1d4b6f9d606408 | [
"BSD-2-Clause"
] | 1 | 2021-09-30T01:15:57.000Z | 2021-09-30T01:15:57.000Z | digideep/environment/common/vec_env/subproc_vec_env.py | sharif1093/digideep | e42f10a58cec6cab70ac2be5ce3af6102caefd81 | [
"BSD-2-Clause"
] | null | null | null | """
The MIT License
Copyright (c) 2017 OpenAI (http://openai.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import numpy as np
from multiprocessing import Process, Pipe
from . import VecEnv, CloudpickleWrapper
def worker(remote, parent_remote, env_fn_wrapper):
parent_remote.close()
env = env_fn_wrapper.x()
try:
while True:
cmd, data = remote.recv()
if cmd == 'step':
ob, reward, done, info = env.step(data)
if done:
ob = env.reset()
remote.send((ob, reward, done, info))
elif cmd == 'reset':
ob = env.reset()
remote.send(ob)
elif cmd == 'render':
remote.send(env.render(mode='rgb_array'))
elif cmd == 'close':
remote.close()
break
elif cmd == 'get_spaces':
remote.send((env.observation_space, env.action_space))
elif cmd == 'get_specs':
remote.send(env.spec)
elif cmd == 'get_type':
remote.send(env.unwrapped.__module__)
elif cmd == 'get_env_state':
if hasattr(env.unwrapped, "get_env_state"):
# print("We really got the env state!")
remote.send(env.unwrapped.get_env_state())
else:
remote.send(None)
elif cmd == 'set_env_state':
if hasattr(env.unwrapped, "set_env_state"):
# print("We really set the env state!")
remote.send(env.unwrapped.set_env_state(data))
else:
remote.send(None)
elif cmd == 'get_rng_state':
remote.send(env.unwrapped.np_random.get_state())
elif cmd == 'set_rng_state':
remote.send(env.unwrapped.np_random.set_state(data))
else:
raise NotImplementedError
except KeyboardInterrupt:
print('SubprocVecEnv worker: got KeyboardInterrupt')
finally:
env.close()
class SubprocVecEnv(VecEnv):
"""
VecEnv that runs multiple environments in parallel in subproceses and communicates with them via pipes.
Recommended to use when num_envs > 1 and step() can be a bottleneck.
"""
def __init__(self, env_fns, spaces=None):
"""
Arguments:
env_fns: iterable of callables - functions that create environments to run in subprocesses. Need to be cloud-pickleable
"""
self.waiting = False
self.closed = False
nenvs = len(env_fns)
self.remotes, self.work_remotes = zip(*[Pipe() for _ in range(nenvs)])
self.ps = [Process(target=worker, args=(work_remote, remote, CloudpickleWrapper(env_fn)))
for (work_remote, remote, env_fn) in zip(self.work_remotes, self.remotes, env_fns)]
for p in self.ps:
p.daemon = True # if the main process crashes, we should not cause things to hang
p.start()
for remote in self.work_remotes:
remote.close()
self.remotes[0].send(('get_spaces', None))
observation_space, action_space = self.remotes[0].recv()
self.viewer = None
self._delayed_init_flag = False
# Here we get the current spec of the env. But later we will update it in '_delayed_init'
self.remotes[0].send(('get_specs', None))
spec = self.remotes[0].recv()
# Get the type of the environment, which is the main class that has created the environment
self.remotes[0].send(('get_type', None))
env_type = self.remotes[0].recv()
VecEnv.__init__(self, len(env_fns), observation_space, action_space, spec, env_type)
def _delayed_init(self):
"""We get the spec later, because we know that some environments are late in creating their spec's.
"""
if self._delayed_init_flag:
return
self._delayed_init_flag = True
# It will be a new spec, BUT it's too late!
self.remotes[0].send(('get_specs', None))
spec = self.remotes[0].recv()
# TODO: Update the self.spec attributes with all attributes from the new spec!
self.spec.__dict__.update(spec.__dict__)
def step_async(self, actions):
self._assert_not_closed()
for remote, action in zip(self.remotes, actions):
remote.send(('step', action))
self.waiting = True
def step_wait(self):
self._assert_not_closed()
results = [remote.recv() for remote in self.remotes]
self.waiting = False
obs, rews, dones, infos = zip(*results)
return np.stack(obs), np.stack(rews), np.stack(dones), infos
def reset(self):
self._assert_not_closed()
for remote in self.remotes:
remote.send(('reset', None))
result = np.stack([remote.recv() for remote in self.remotes])
self._delayed_init()
return result
def close_extras(self):
self.closed = True
if self.waiting:
for remote in self.remotes:
remote.recv()
for remote in self.remotes:
remote.send(('close', None))
for p in self.ps:
p.join()
def get_images(self):
self._assert_not_closed()
for pipe in self.remotes:
pipe.send(('render', None))
imgs = [pipe.recv() for pipe in self.remotes]
return imgs
def _assert_not_closed(self):
assert not self.closed, "Trying to operate on a SubprocVecEnv after calling close()"
def set_rng_state(self, states):
for remote, state in zip(self.remotes, states):
remote.send(('set_rng_state', state))
results = [remote.recv() for remote in self.remotes]
return results
def get_rng_state(self):
for remote in self.remotes:
remote.send(('get_rng_state', None))
states = [remote.recv() for remote in self.remotes]
return states
def state_dict(self):
for remote in self.remotes:
remote.send(('get_env_state', None))
states = [remote.recv() for remote in self.remotes]
return states
def load_state_dict(self, state_dicts):
for remote, state_dict in zip(self.remotes, state_dicts):
remote.send(('set_env_state', state_dict))
results = [remote.recv() for remote in self.remotes]
return results
| 38.528205 | 128 | 0.615467 | 4,414 | 0.587515 | 0 | 0 | 0 | 0 | 0 | 0 | 2,404 | 0.319979 |
4cf0f488113cf847a420ceaef66021526af038d1 | 9,796 | py | Python | sni/api/routers/corporation.py | altaris/seat-navy-issue | 0595cdd25503e9702b847f083fbb6d5cb92b6829 | [
"MIT"
] | 1 | 2021-06-03T22:07:52.000Z | 2021-06-03T22:07:52.000Z | sni/api/routers/corporation.py | r0kym/seat-navy-issue | dfd12df1230f68de7b7b05c1004e901be5046ad5 | [
"MIT"
] | 1 | 2020-07-06T13:59:09.000Z | 2020-07-06T16:42:20.000Z | sni/api/routers/corporation.py | r0kym/seat-navy-issue | dfd12df1230f68de7b7b05c1004e901be5046ad5 | [
"MIT"
] | 1 | 2020-10-29T15:01:03.000Z | 2020-10-29T15:01:03.000Z | """
Corporation management paths
"""
from datetime import datetime
from typing import Dict, Iterator, List, Optional
from fastapi import APIRouter, Depends
import pydantic as pdt
from sni.esi.scope import EsiScope, esi_scope_set_to_hex
from sni.esi.token import tracking_status, TrackingStatus
from sni.uac.clearance import assert_has_clearance
from sni.uac.token import (
create_state_code,
from_authotization_header_nondyn,
Token,
)
from sni.user.models import Alliance, Corporation, User
from sni.user.user import ensure_corporation
from .user import GetUserShortOut
router = APIRouter()
class GetAllianceShortOut(pdt.BaseModel):
"""
Short alliance description
"""
alliance_id: int
alliance_name: str
@staticmethod
def from_record(alliance: Alliance) -> "GetAllianceShortOut":
"""
Converts an instance of :class:`sni.user.models.Alliance` to
:class:`sni.api.routers.alliance.GetAllianceShortOut`
"""
return GetAllianceShortOut(
alliance_id=alliance.alliance_id,
alliance_name=alliance.alliance_name,
)
class GetCorporationOut(pdt.BaseModel):
"""
Corporation data
"""
alliance: Optional[GetAllianceShortOut]
authorized_to_login: Optional[bool]
ceo: GetUserShortOut
corporation_id: int
corporation_name: str
cumulated_mandatory_esi_scopes: List[EsiScope]
mandatory_esi_scopes: List[EsiScope]
ticker: str
updated_on: datetime
@staticmethod
def from_record(corporation: Corporation) -> "GetCorporationOut":
"""
Converts an instance of :class:`sni.user.models.Corporation` to
:class:`sni.api.routers.corporation.GetCorporationOut`
"""
return GetCorporationOut(
alliance=GetAllianceShortOut.from_record(corporation.alliance)
if corporation.alliance is not None
else None,
authorized_to_login=corporation.authorized_to_login,
ceo=GetUserShortOut.from_record(corporation.ceo),
corporation_id=corporation.corporation_id,
corporation_name=corporation.corporation_name,
cumulated_mandatory_esi_scopes=list(
corporation.cumulated_mandatory_esi_scopes()
),
mandatory_esi_scopes=corporation.mandatory_esi_scopes,
ticker=corporation.ticker,
updated_on=corporation.updated_on,
)
class PostCorporationGuestOut(pdt.BaseModel):
"""
Model for ``POST /corporation/{corporation_id}/guest`` reponses.
"""
state_code: str
class GetCorporationShortOut(pdt.BaseModel):
"""
Short corporation description
"""
corporation_id: int
corporation_name: str
@staticmethod
def from_record(corporation: Corporation) -> "GetCorporationShortOut":
"""
Converts an instance of :class:`sni.user.models.Corporation` to
:class:`sni.api.routers.corporation.GetCorporationShortOut`
"""
return GetCorporationShortOut(
corporation_id=corporation.corporation_id,
corporation_name=corporation.corporation_name,
)
class GetTrackingOut(pdt.BaseModel):
"""
Represents a corporation tracking response.
"""
invalid_refresh_token: List[GetUserShortOut] = []
no_refresh_token: List[GetUserShortOut] = []
valid_refresh_token: List[GetUserShortOut] = []
@staticmethod
def from_user_iterator(iterator: Iterator[User]) -> "GetTrackingOut":
"""
Creates a tracking response from a user iterator. See
:meth:`sni.esi.token.tracking_status`
"""
result = GetTrackingOut()
ldict: Dict[int, List[GetUserShortOut]] = {
TrackingStatus.HAS_NO_REFRESH_TOKEN: result.no_refresh_token,
TrackingStatus.ONLY_HAS_INVALID_REFRESH_TOKEN: result.invalid_refresh_token,
TrackingStatus.HAS_A_VALID_REFRESH_TOKEN: result.valid_refresh_token,
}
for usr in iterator:
status = tracking_status(usr)
ldict[status].append(GetUserShortOut.from_record(usr))
return result
class PutCorporationIn(pdt.BaseModel):
"""
Model for ``PUT /corporation/{corporation_id}`` requests
"""
authorized_to_login: Optional[bool]
mandatory_esi_scopes: Optional[List[EsiScope]]
@router.get(
"",
response_model=List[GetCorporationShortOut],
summary="Get the list of corporations",
)
def get_corporations(tkn: Token = Depends(from_authotization_header_nondyn),):
"""
Gets the list of corporations registered in this instance. Requires a
clearance level of 0 or more.
"""
assert_has_clearance(tkn.owner, "sni.read_corporation")
return [
GetCorporationShortOut.from_record(corporation)
for corporation in Corporation.objects(
corporation_id__gte=2000000
).order_by("corporation_name")
]
@router.get(
"/{corporation_id}",
response_model=GetCorporationOut,
summary="Get informations about a corporation",
)
def get_corporation(
corporation_id: int,
tkn: Token = Depends(from_authotization_header_nondyn),
):
"""
Get informations about a corporation. Note that this corporation must be
registered on SNI
"""
assert_has_clearance(tkn.owner, "sni.read_corporation")
corporation = Corporation.objects(corporation_id=corporation_id).get()
return GetCorporationOut.from_record(corporation)
@router.post(
"/{corporation_id}",
response_model=GetCorporationOut,
summary="Manually fetch a corporation from the ESI",
)
def post_corporation(
corporation_id: int,
tkn: Token = Depends(from_authotization_header_nondyn),
):
"""
Manually fetches a corporation from the ESI. Requires a clearance level of
8 or more.
"""
assert_has_clearance(tkn.owner, "sni.fetch_corporation")
corporation = ensure_corporation(corporation_id)
return GetCorporationOut.from_record(corporation)
@router.put(
"/{corporation_id}",
response_model=GetCorporationOut,
summary="Modify a corporation registered on SNI",
)
def put_corporation(
corporation_id: int,
data: PutCorporationIn,
tkn: Token = Depends(from_authotization_header_nondyn),
):
"""
Modify a corporation registered on SNI. Note that it does not modify it on
an ESI level. Requires a clearance level of 2 or more.
"""
corporation: Corporation = Corporation.objects(
corporation_id=corporation_id
).get()
assert_has_clearance(tkn.owner, "sni.update_corporation", corporation.ceo)
corporation.authorized_to_login = data.authorized_to_login
if data.mandatory_esi_scopes is not None:
corporation.mandatory_esi_scopes = data.mandatory_esi_scopes
corporation.save()
return GetCorporationOut.from_record(corporation)
@router.delete(
"/{corporation_id}/guest/{character_id}",
summary="Deletes a corporation guest",
)
def delete_corporation_guest(
corporation_id: int,
character_id: int,
tkn: Token = Depends(from_authotization_header_nondyn),
):
"""
Deletes a corporation guest
"""
corporation: Corporation = Corporation.objects(
corporation_id=corporation_id
).get()
assert_has_clearance(
tkn.owner, "sni.delete_corporation_guest", corporation.ceo
)
guest: User = User.objects(
character_id=character_id,
clearance_level__lt=0,
corporation=corporation,
).get()
guest.delete()
@router.get(
"/{corporation_id}/guest",
response_model=List[GetUserShortOut],
summary="Corporation guests",
)
def get_corporation_guests(
corporation_id: int,
tkn: Token = Depends(from_authotization_header_nondyn),
):
"""
Returns the list of guests in this corporation.
"""
corporation: Corporation = Corporation.objects(
corporation_id=corporation_id
).get()
assert_has_clearance(
tkn.owner, "sni.read_corporation_guests", corporation.ceo
)
return [
GetUserShortOut.from_record(guest)
for guest in corporation.guest_iterator()
]
@router.post(
"/{corporation_id}/guest",
response_model=PostCorporationGuestOut,
summary="Creates a state code for a new guest to this corporation",
)
def post_corporation_guest(
corporation_id: int,
tkn: Token = Depends(from_authotization_header_nondyn),
):
"""
Creates a state code for a new guest to this corporation. The user then has
to login with this state code to be considered a guest.
"""
corporation: Corporation = Corporation.objects(
corporation_id=corporation_id
).get()
assert_has_clearance(
tkn.owner, "sni.create_corporation_guest", corporation.ceo
)
state_code = create_state_code(
tkn.parent,
inviting_corporation=corporation,
code_prefix=esi_scope_set_to_hex(
corporation.cumulated_mandatory_esi_scopes()
),
)
return PostCorporationGuestOut(state_code=str(state_code.uuid))
@router.get(
"/{corporation_id}/tracking",
response_model=GetTrackingOut,
summary="Corporation tracking",
)
def get_corporation_tracking(
corporation_id: int,
tkn: Token = Depends(from_authotization_header_nondyn),
):
"""
Reports which member (of a given corporation) have a valid refresh token
attacked to them, and which do not. Requires a clearance level of 1 and
having authority over this corporation.
"""
corporation: Corporation = Corporation.objects(
corporation_id=corporation_id
).get()
assert_has_clearance(tkn.owner, "sni.track_corporation", corporation.ceo)
return GetTrackingOut.from_user_iterator(corporation.user_iterator())
| 29.957187 | 88 | 0.703655 | 3,747 | 0.382503 | 0 | 0 | 7,861 | 0.80247 | 0 | 0 | 2,646 | 0.27011 |
4cf16cd9ea3ef1049b453f14c5dd39b3b590b2d4 | 2,454 | py | Python | LeanIX/__init__.py | JeffGebhart/leanix-python-library | 15449231b7ae0cbd96a1645192cdc57464924b4e | [
"MIT"
] | 4 | 2020-04-28T12:34:15.000Z | 2021-12-29T19:57:26.000Z | LeanIX/__init__.py | JeffGebhart/leanix-python-library | 15449231b7ae0cbd96a1645192cdc57464924b4e | [
"MIT"
] | 4 | 2020-05-06T13:26:13.000Z | 2021-04-03T21:23:36.000Z | LeanIX/__init__.py | JeffGebhart/leanix-python-library | 15449231b7ae0cbd96a1645192cdc57464924b4e | [
"MIT"
] | 2 | 2020-06-04T10:54:44.000Z | 2021-12-29T19:57:33.000Z | import requests
import json
from .Graph import Graph
from .FactSheets import FactSheets
from .Users import Users
from .Metrics import Metrics
from .Polls import Polls
class LeanIX:
def __init__(self,api_token="",workspaceid="",baseurl="https://us.leanix.net/"):
""" Authenticates to LeanIX with the given API Token and returns the Authorization header for use in future calls
Retuns a class with subclasses pointing to the other options:
.factsheets
.users
.graph
"""
self.__api_token = api_token
self.workspaceid = workspaceid
self.baseurl = baseurl
if not self.baseurl.endswith("/"):
self.baseurl += "/" # If URL is not passed in with a trailing /, add it
self.auth()
self.graph = Graph(self)
self.factsheets = FactSheets(self)
self.users = Users(self)
self.metrics = Metrics(self)
self.polls = Polls(self)
def __repr__(self):
return f"LeanIX Object for {self.workspaceid}"
def auth(self):
"""Authenticate to LeanIX using the API token in the class"""
auth_url = f"{self.baseurl}/services/mtm/v1/oauth2/token"
response = requests.post(auth_url, auth=('apitoken', self.__api_token),
data={'grant_type': 'client_credentials'})
response.raise_for_status()
self._access_token = response.json()['access_token']
self._auth_header = 'Bearer ' + self._access_token
self.header = {'Authorization': self._auth_header,"Content-Type":"application/json"}
def _sendrequest(self,method,parameters=None,data=None,verb="get"):
api_url =f'{self.baseurl}{method}'
allrows = []
if verb.lower() == "get":
response = requests.get(api_url,headers=self.header,params=parameters)
jresp = response.json()
if jresp['total'] == len(jresp['data']):
allrows = jresp['data']
else:
allrows+=jresp['data']
while jresp['total'] > len(allrows):
parameters['page']+=1
allrows += requests.get(api_url,headers=self.header,params=parameters).json()['data']
elif verb.lower() == "post":
return requests.post(api_url,headers=self.header,data=json.dumps(data),params=parameters)
a=1
return allrows
| 36.088235 | 122 | 0.602689 | 2,283 | 0.930318 | 0 | 0 | 0 | 0 | 0 | 0 | 674 | 0.274654 |
4cf2e7e17245c8fd56f6374f295774ac71d456f6 | 5,752 | py | Python | python/database.py | pm1997/othello | 0dcebdf273d6e4a6b3c6216702bfc4b9a45bfa36 | [
"MIT"
] | null | null | null | python/database.py | pm1997/othello | 0dcebdf273d6e4a6b3c6216702bfc4b9a45bfa36 | [
"MIT"
] | null | null | null | python/database.py | pm1997/othello | 0dcebdf273d6e4a6b3c6216702bfc4b9a45bfa36 | [
"MIT"
] | null | null | null | import os
import numpy as np
from constants import DATABASE_FILE_NAME, PLAYER_ONE, PLAYER_TWO, POSITION_TO_DATABASE
from Agents.random import Random
from othello import Othello
import multiprocessing as mp
class Database:
def __init__(self):
"""
load database data and store them in self._db_data
self._db_data = 3 dim array:
60 turns
9 game categories
[0] : won games of player1
[1] : won games of player2
[2] : total played games
"""
# check if database file exists
if not os.path.isfile(DATABASE_FILE_NAME):
self._create_new_database()
# load csv in self_data as 3 dim. array
csv = np.loadtxt(DATABASE_FILE_NAME, delimiter=';', dtype='int64')
self._db_data = csv.reshape((60, 9, 3))
def _create_new_database(self):
"""
Reset stored played / won games
change self._db_data to array of 0
"""
self._db_data = np.zeros(shape=(60, 9, 3), dtype='int64')
# save modified array in file
self.store_database()
def store_database(self):
"""
store database on filesystem
:return:
"""
with open(DATABASE_FILE_NAME, 'w') as outfile:
# write 3 dim. array as list of 2 dim. array's
for row in self._db_data:
# write one row (turn number) of matrix
np.savetxt(outfile, row, fmt='%d', delimiter=';')
def get_change_of_winning(self, move, turn_nr, current_player):
"""
calculate chance of winning for given move and turn_number
:param move: move is a pair <row, column> in available_moves
:param turn_nr: actual turn_number
:param current_player: use constants PLAYER_ONE and PLAYER_TWO
:return: chance of winning for given field at the given turn number
"""
# translate move to category in array
category = POSITION_TO_DATABASE[move]
# access data of one category in one turn number of the database to compute statistic
won_games_pl1, won_games_pl2, total_games_played = self._db_data[turn_nr][category]
# avoid dividing with 0
if total_games_played == 0:
return 0
# return win probability
if current_player == PLAYER_ONE:
return won_games_pl1 / total_games_played
return won_games_pl2 / total_games_played
def update_field_stat(self, turn_nr, field_type, winner):
"""
update database with new played move
:param turn_nr: turn number of move to store
:param field_type: field category of move
:param winner: winner of whole played game
:return: nothing
update self._db_data at given turn number and field type
"""
# get actual database entry
(won_games_pl1, won_games_pl2, total_games_played) = self._db_data[turn_nr][field_type]
if winner == PLAYER_ONE:
won_games_pl1 += 1
elif winner == PLAYER_TWO:
won_games_pl2 += 1
# store updated entry at same position in database
self._db_data[turn_nr][field_type] = (won_games_pl1, won_games_pl2, total_games_played + 1)
def update_fields_stats_for_single_game(self, moves, winner):
"""
update statistics of each taken move in game
:param moves: list of taken moves
:param winner: PLAYER_ONE or PLAYER_TWO
"""
for turn_nr in enumerate(moves):
# translate move 1,0 to position 8
position = POSITION_TO_DATABASE[moves[turn_nr]]
# update array at position position
self.update_field_stat(turn_nr, position, winner)
@staticmethod
def _play_n_random_games(count):
"""
play count random games
:param count: number of played games
:return: winning statistics
statistics = list of pair <taken moves, winner of this game>
"""
multi_stats = []
for i in range(count):
# print each 100 games actual game played position
if i % 100 == 0:
print(f"Game No: {i}")
g = Othello()
g.init_game()
# play whole game
while not g.game_is_over():
g.play_position(Random.get_move(g))
winner = g.get_winner()
# add winner and taken moves to statistic
multi_stats.append((g.get_taken_mv(), winner))
return multi_stats
def train_db_multi_threaded(self, count):
"""
play count random games and update database winning statistics
:param count: number of games to play
:return:
"""
# Create a pool of worker processes.
# Workload can be distributed equally on the processes when their number is known
number_of_processes = mp.cpu_count()
pool = mp.Pool()
# Use Worker processes asynchronous
# split calculation in number_of_processes parts to calculate multi threaded
list_of_stats = [pool.apply_async(self._play_n_random_games, args=(count // number_of_processes,))
for _ in range(number_of_processes)]
# Collect the result of the first worker
# update statistics of number_of_processes results sequential
for single_process_list in list_of_stats:
list_of_games = single_process_list.get()
for single_game in list_of_games:
moves, winner = single_game
self.update_fields_stats_for_single_game(moves, winner)
# Close the worker pool.
pool.close()
db = Database()
| 39.129252 | 106 | 0.619784 | 5,525 | 0.960535 | 0 | 0 | 793 | 0.137865 | 0 | 0 | 2,632 | 0.45758 |
4cf3f361510485e9fff924874f05f2ada3c665d8 | 5,114 | py | Python | servo/controller.py | MrAI-NDHU/drive_away_pigeons | e10419e9ca9035760ed5e20b26825a972fd18ec4 | [
"MIT"
] | 1 | 2019-06-01T04:06:37.000Z | 2019-06-01T04:06:37.000Z | servo/controller.py | MrAI-NDHU/servo_controller | 42344044c3248a398b834248cf331893c0fedfa2 | [
"MIT"
] | null | null | null | servo/controller.py | MrAI-NDHU/servo_controller | 42344044c3248a398b834248cf331893c0fedfa2 | [
"MIT"
] | null | null | null | import logging
import math
import time
from typing import Dict, List
from servo import Servo
class ControllerForPCA9685:
def __init__(self, servos: Dict[object, Servo], chs: Dict[object, int],
pwm_freq: float, init_angles: Dict[object, float] = None):
import Adafruit_PCA9685 as PCA9685
if list(servos.keys()).sort() != list(chs.keys()).sort():
raise ValueError
if init_angles is None:
init_angles = {k: (v.angle_min_deg + v.angle_max_deg) / 2
for k, v in servos.items()}
elif list(servos.keys()).sort() != list(init_angles.keys()).sort():
raise ValueError
self.servos = servos
self.chs = chs
self.pwm_freq = pwm_freq
self.init_angles = {k: servos[k].fix_angle(v)
for k, v in init_angles.items()}
self.current_angles = self.init_angles.copy()
PCA9685.software_reset()
self.pca9685 = PCA9685.PCA9685()
self.pca9685.set_pwm_freq(self.pwm_freq)
for k in servos:
self.pca9685.set_pwm(self.chs[k], 0, int(round(
self.servos[k].angle_to_pwm_val(self.init_angles[k]))))
time.sleep(self.servos[k].wait_time(self.servos[k].angle_max_deg))
def rotate(self, angles: Dict[object, float], is_relative: bool):
for k, angle in angles.items():
angle = self.servos[k].fix_angle(
angle + (self.current_angles[k] if is_relative else 0.0))
if math.isclose(self.current_angles[k], angle):
continue
angle_diff = abs(self.current_angles[k] - angle)
logging.info("Controller: rotating %s from %f to %f",
k, self.current_angles[k], angle)
self.pca9685.set_pwm(self.chs[k], 0, int(round(
self.servos[k].angle_to_pwm_val(angle))))
time.sleep(self.servos[k].wait_time(angle_diff))
self.current_angles[k] = angle
class ControllerForRPi:
def __init__(self, servos: Dict[object, Servo], pins: Dict[object, int],
init_angles: Dict[object, float] = None):
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
if list(servos.keys()).sort() != list(pins.keys()).sort():
raise ValueError
if init_angles is None:
init_angles = {k: (v.angle_min_deg + v.angle_max_deg) / 2
for k, v in servos.items()}
elif list(servos.keys()).sort() != list(init_angles.keys()).sort():
raise ValueError
self.servos = servos
self.pins = pins
self.active_servos = set()
self.init_angles = {k: servos[k].fix_angle(v)
for k, v in init_angles.items()}
self.current_angles = self.init_angles.copy()
self.pwms = {}
self.gpio = GPIO
for k in servos:
self.gpio.setup(self.pins[k], GPIO.OUT)
self.pwms[k] = self.gpio.PWM(pins[k], servos[k].pwm_freq)
logging.info("Controller: initialized, set pins %s", pins)
def __del__(self):
self.gpio.cleanup()
logging.info("Controller: deleted")
def start(self, servos_key: List[object]):
started = set()
for k in servos_key:
if k in self.servos and not (k in self.active_servos):
init_duty_cycle \
= self.servos[k].angle_to_pwm_val(self.init_angles[k])
self.pwms[k].start(init_duty_cycle)
time.sleep(
self.servos[k].wait_time(self.servos[k].angle_max_deg))
self.active_servos.add(k)
started.add(k)
if len(started) > 0:
logging.info("Controller: started %s", started)
def stop(self, servos_key: List[object]):
stopped = set()
for k in servos_key:
if k in self.servos and k in self.active_servos:
self.pwms[k].stop()
self.active_servos.remove(k)
stopped.add(k)
if len(stopped) > 0:
logging.info("Controller: stopped %s", stopped)
def rotate(self, angles: Dict[object, float], is_relative: bool):
for k, angle in angles.items():
if not (k in self.active_servos):
continue
angle = self.servos[k].fix_angle(
angle + (self.current_angles[k] if is_relative else 0.0))
if math.isclose(self.current_angles[k], angle):
continue
angle_diff = abs(self.current_angles[k] - angle)
logging.info("Controller: rotating %s from %f to %f",
k, self.current_angles[k], angle)
self.pwms[k].ChangeDutyCycle(
self.servos[k].angle_to_pwm_val(angle))
time.sleep(self.servos[k].wait_time(angle_diff))
self.pwms[k].ChangeDutyCycle(0.0)
self.current_angles[k] = angle
| 39.953125 | 78 | 0.555729 | 5,014 | 0.980446 | 0 | 0 | 0 | 0 | 0 | 0 | 185 | 0.036175 |
4cf469984a513806e0e1e0c8ae654d64324c3888 | 990 | py | Python | euler-50.py | TFabijo/euler2 | 7da205ce02ae3bd12754f99c1fe69fbf20b1e3d0 | [
"MIT"
] | null | null | null | euler-50.py | TFabijo/euler2 | 7da205ce02ae3bd12754f99c1fe69fbf20b1e3d0 | [
"MIT"
] | null | null | null | euler-50.py | TFabijo/euler2 | 7da205ce02ae3bd12754f99c1fe69fbf20b1e3d0 | [
"MIT"
] | null | null | null | def prastevila_do_n(n):
pra = [2,3,5,7]
for x in range(8,n+1):
d = True
for y in range(2,int(x ** 0.5) + 1):
if d == False:
break
elif x % y == 0:
d = False
if d == True:
pra.append(x)
return pra
def euler_50():
pra = prastevila_do_n(1000000)
najvecja_vsot_ki_je_prastevilo = 0
stevilo_z_najvec_p = 0
for p in pra:
i = pra.index(p)
if sum(pra[i:i+stevilo_z_najvec_p]) > 1000000:
break
stevilo_p = 0
vsota = pra[i]
for p1 in range(i+1,len(pra)):
stevilo_p += 1
vsota += pra[p1]
if vsota > 1000000:
break
elif vsota in pra and stevilo_z_najvec_p < stevilo_p:
najvecja_vsot_ki_je_prastevilo = vsota
stevilo_z_najvec_p = stevilo_p
return najvecja_vsot_ki_je_prastevilo
euler_50() | 26.052632 | 66 | 0.493939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4cf728f753dc109f252e6d8c8d313c5faac370b8 | 785 | py | Python | scripts/rpc/cmd_parser.py | 5cs/spdk | edd27df26d0764a6486333a431f40efe49521af3 | [
"BSD-3-Clause"
] | 2,107 | 2015-09-23T01:53:51.000Z | 2022-03-29T09:55:13.000Z | scripts/rpc/cmd_parser.py | 5cs/spdk | edd27df26d0764a6486333a431f40efe49521af3 | [
"BSD-3-Clause"
] | 2,382 | 2015-09-24T02:36:59.000Z | 2022-03-31T22:53:45.000Z | scripts/rpc/cmd_parser.py | 5cs/spdk | edd27df26d0764a6486333a431f40efe49521af3 | [
"BSD-3-Clause"
] | 916 | 2015-09-23T03:04:41.000Z | 2022-03-31T05:45:04.000Z | args_global = ['server_addr', 'port', 'timeout', 'verbose', 'dry_run', 'conn_retries',
'is_server', 'rpc_plugin', 'called_rpc_name', 'func', 'client']
def strip_globals(kwargs):
for arg in args_global:
kwargs.pop(arg, None)
def remove_null(kwargs):
keys = []
for key, value in kwargs.items():
if value is None:
keys.append(key)
for key in keys:
kwargs.pop(key, None)
def apply_defaults(kwargs, **defaults):
for key, value in defaults.items():
if key not in kwargs:
kwargs[key] = value
def group_as(kwargs, name, values):
group = {}
for arg in values:
if arg in kwargs and kwargs[arg] is not None:
group[arg] = kwargs.pop(arg, None)
kwargs[name] = group
| 24.53125 | 86 | 0.596178 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 114 | 0.145223 |
4cf793269fc1e46f707bfa6b409a7afeda8934b0 | 606 | py | Python | neighbor/models.py | ShaviyaVictor/nyumbakumi- | 933d825844da139998867594c1e21b09ba5c8e63 | [
"MIT"
] | null | null | null | neighbor/models.py | ShaviyaVictor/nyumbakumi- | 933d825844da139998867594c1e21b09ba5c8e63 | [
"MIT"
] | null | null | null | neighbor/models.py | ShaviyaVictor/nyumbakumi- | 933d825844da139998867594c1e21b09ba5c8e63 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
# Create your models here.
class Neighbor(models.Model) :
n_name = models.CharField(max_length=35)
n_location = models.CharField(max_length=35)
n_image = models.ImageField(upload_to='n_posts/')
n_title = models.CharField(max_length=100)
n_post = models.TextField()
n_author = models.ForeignKey(User, on_delete=models.CASCADE)
n_date_posted = models.DateTimeField(default=timezone.now)
def __str__(self) :
return self.n_title
class Meta :
ordering = ['n_date_posted'] | 26.347826 | 62 | 0.759076 | 468 | 0.772277 | 0 | 0 | 0 | 0 | 0 | 0 | 51 | 0.084158 |
4cf7e3c7c267bf3db92b14c32b49d8243411b974 | 1,237 | py | Python | MetroX_CircuitPython/ir_sensor/code.py | albinger/Adafruit_Learning_System_Guides | 4fe2da261fe5d1ca282b86bd3b93ee1466346fa7 | [
"MIT"
] | null | null | null | MetroX_CircuitPython/ir_sensor/code.py | albinger/Adafruit_Learning_System_Guides | 4fe2da261fe5d1ca282b86bd3b93ee1466346fa7 | [
"MIT"
] | null | null | null | MetroX_CircuitPython/ir_sensor/code.py | albinger/Adafruit_Learning_System_Guides | 4fe2da261fe5d1ca282b86bd3b93ee1466346fa7 | [
"MIT"
] | null | null | null | # SPDX-FileCopyrightText: 2021 Brent Rubell for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""
'ir_sensor.py'.
=================================================
control a LED with an IR Remote
requires:
- adafruit_irremote library
"""
import adafruit_irremote
import board
import digitalio
import pulseio
led = digitalio.DigitalInOut(board.D11)
led.switch_to_output()
pulsein = pulseio.PulseIn(board.D6, maxlen=120, idle_state=True)
decoder = adafruit_irremote.GenericDecode()
# size must match what you are decoding! for NEC use 4
received_code = bytearray(4)
last_code = None
while True:
try:
pulses = decoder.read_pulses(pulsein)
except MemoryError as e:
print("Memory Error Occured: ", e)
continue
try:
code = decoder.decode_bits(pulses, debug=False)
except adafruit_irremote.IRNECRepeatException:
print("NEC Code Repeated, doing last command")
code = last_code
except adafruit_irremote.IRDecodeException as e:
print("Failed to decode: ", e)
except MemoryError as e:
print("Memory Error Occured: ", e)
print(code[2])
if code[2] == 247:
led.value = True
else:
led.value = False
last_code = code
| 23.788462 | 67 | 0.666128 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 403 | 0.325788 |
4cf8442cf94ad2853dab8f12b88955925757d43c | 71 | py | Python | pyheos/__init__.py | WebSpider/pyheos | 1a352d46d2b899a1a78d72b52771585da634f9b8 | [
"Apache-2.0"
] | 1 | 2018-08-29T08:08:03.000Z | 2018-08-29T08:08:03.000Z | pyheos/__init__.py | WebSpider/pyheos | 1a352d46d2b899a1a78d72b52771585da634f9b8 | [
"Apache-2.0"
] | 1 | 2018-09-08T07:40:19.000Z | 2018-09-08T07:50:32.000Z | pyheos/__init__.py | WebSpider/pyheos | 1a352d46d2b899a1a78d72b52771585da634f9b8 | [
"Apache-2.0"
] | null | null | null | from .heos import *
__author__ = 'Nils Vogels'
__version__ = '20180701' | 23.666667 | 26 | 0.746479 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.323944 |
4cfa5a64d9c761e08234b1f78daae57a9df75fb5 | 891 | py | Python | samples/CMU_MoCap_Angles.py | jutanke/pak | 6f3be954ef68804ebe622cafe46f033ccf6eb2e7 | [
"MIT"
] | 20 | 2018-09-19T06:52:01.000Z | 2020-10-02T11:18:00.000Z | samples/CMU_MoCap_Angles.py | jutanke/pak | 6f3be954ef68804ebe622cafe46f033ccf6eb2e7 | [
"MIT"
] | 2 | 2018-06-22T08:22:39.000Z | 2020-07-20T06:50:59.000Z | samples/CMU_MoCap_Angles.py | justayak/pak | 6f3be954ef68804ebe622cafe46f033ccf6eb2e7 | [
"MIT"
] | 3 | 2020-01-12T08:51:09.000Z | 2020-10-06T05:47:24.000Z | import json; from pprint import pprint
Settings = json.load(open('settings.txt'))
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import sys
sys.path.insert(0,'../')
from pak.datasets.CMU_MoCap import CMU_MoCap
import pak.datasets.CMU_MoCap as cmu
data = CMU_MoCap(Settings['data_root'],
z_is_up=False,
store_binary=False)
joints = data.get('01', '01')
human = joints[0]
minv = -20
maxv = 30
joints, motions = data.get_asf_amc('01', '01')
V, joints = cmu.to_rotation_vector_representation(joints, motions)
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(111, projection='3d')
for t in range(0, len(V), 10):
ax.clear()
ax.set_xlim([minv, maxv])
ax.set_ylim([minv, maxv])
ax.set_zlim([minv, maxv])
cmu.plot_vector(ax, V[t], joints, plot_jid=False)
plt.pause(1/120)
plt.show()
| 24.081081 | 66 | 0.684624 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 50 | 0.056117 |
4cfcacd6574609d0c73077a10761daad4bb01dec | 6,249 | py | Python | bin/deploy.py | daniarherikurniawan/hillview | 6dca7261f160364c491c97a4723d2d8dd5950eb3 | [
"Apache-2.0"
] | 1 | 2020-07-25T03:04:23.000Z | 2020-07-25T03:04:23.000Z | bin/deploy.py | daniarherikurniawan/hillview | 6dca7261f160364c491c97a4723d2d8dd5950eb3 | [
"Apache-2.0"
] | null | null | null | bin/deploy.py | daniarherikurniawan/hillview | 6dca7261f160364c491c97a4723d2d8dd5950eb3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
"""This python program deploys the files needed by the Hillview service
on the machines specified in the configuration file."""
# pylint: disable=invalid-name
from argparse import ArgumentParser
import tempfile
import os.path
from hillviewCommon import ClusterConfiguration, get_config, get_logger
logger = get_logger("deploy")
def generate_script(config, rh, template):
"""Generates a shell script based on a template inserting configuration variables"""
logger.info("Generating script for host " + rh.host + " from " + template)
variables = ""
variables += "SERVICE_DIRECTORY=" + config.service_folder + "\n"
variables += "HEAPSIZE=\"" + rh.heapsize + "\"\n"
variables += "USER=" + rh.user + "\n"
variables += "WORKER_PORT=" + str(config.worker_port) + "\n"
variables += "AGGREGATOR_PORT=" + str(config.aggregator_port) + "\n"
variables += "CLEANUP=" + str(1 if config.cleanup_on_install() else 0) + "\n"
variables += "TOMCAT=" + config.tomcat + "\n"
lines = list(open(template))
filename = template.replace("-template", "")
lines = [variables if "REPLACE_WITH_VARIABLES" in x else x for x in lines]
with open(filename, "w") as f:
f.write("# Automatically generated from " + template)
for l in lines:
f.write(l)
os.chmod(filename, 0o770)
def prepare_webserver(config):
"""Deploys files needed by the Hillview web server"""
logger.info("Creating web service folder")
assert isinstance(config, ClusterConfiguration)
rh = config.get_webserver()
message = "Preparing web server " + str(rh)
logger.info(message)
rh.create_remote_folder(config.service_folder)
rh.create_remote_folder(config.service_folder + "/bookmark")
rh.run_remote_shell_command("chown " + config.get_user() + " " + config.service_folder)
major = config.tomcat_version[0:config.tomcat_version.find('.')]
installTomcat = "cd " + config.service_folder + ";" + \
"if [ ! -d " + config.tomcat + " ]; then " + \
"wget http://archive.apache.org/dist/tomcat/tomcat-" + major + "/v" + \
config.tomcat_version + "/bin/" + config.tomcat + ".tar.gz;" + \
"tar xvfz " + config.tomcat + ".tar.gz;" + \
"rm -f " + config.tomcat + ".tar.gz; fi"
tomcatFolder = config.service_folder + "/" + config.tomcat
rh.run_remote_shell_command(installTomcat)
rh.run_remote_shell_command("rm -rf " + tomcatFolder + "/webapps/ROOT")
rh.copy_file_to_remote(
config.scriptFolder +
"/../web/target/web-1.0-SNAPSHOT.war",
tomcatFolder + "/webapps/ROOT.war", "")
tmp = tempfile.NamedTemporaryFile(mode="w", delete=False)
agg = config.get_aggregators()
if agg:
for a in agg:
tmp.write(a.host + ":" + str(config.aggregator_port) + "\n")
else:
for h in config.get_workers():
tmp.write(h.host + ":" + str(config.worker_port) + "\n")
tmp.close()
rh.copy_file_to_remote(tmp.name, config.service_folder + "/serverlist", "")
os.unlink(tmp.name)
generate_script(config, rh, "hillview-webserver-manager-template.sh")
rh.copy_file_to_remote(
"hillview-webserver-manager.sh", config.service_folder, "")
os.unlink("hillview-webserver-manager.sh")
def create_service_folder(config, rh):
assert isinstance(config, ClusterConfiguration)
rh.create_remote_folder(config.service_folder)
rh.run_remote_shell_command("chown " + config.get_user() + " " + config.service_folder)
rh.create_remote_folder(config.service_folder + "/hillview")
def prepare_worker(config, rh):
"""Prepares files needed by a Hillview worker on a remote machine"""
assert isinstance(config, ClusterConfiguration)
message = "Preparing worker " + str(rh)
logger.info(message)
create_service_folder(config, rh)
rh.copy_file_to_remote(
config.scriptFolder +
"/../platform/target/hillview-server-jar-with-dependencies.jar",
config.service_folder, "")
generate_script(config, rh, "hillview-worker-manager-template.sh")
rh.copy_file_to_remote(
"hillview-worker-manager.sh", config.service_folder, "")
rh.copy_file_to_remote("forever.sh", config.service_folder, "")
os.unlink("hillview-worker-manager.sh")
def prepare_aggregator(config, rh):
"""Prepares files needed by a Hillview aggregator on a remote machine"""
assert isinstance(config, ClusterConfiguration)
message = "Preparing aggregator " + str(rh)
logger.info(message)
# Check if the aggregator machine is also a worker machine; if so, skip
# some deployment
isWorker = False
for a in config.get_workers():
if rh.host == a.host:
isWorker = True
break
if not isWorker:
create_service_folder(config, rh)
rh.copy_file_to_remote(
config.scriptFolder +
"/../platform/target/hillview-server-jar-with-dependencies.jar",
config.service_folder + "/hillview", "")
tmp = tempfile.NamedTemporaryFile(mode="w", delete=False)
for h in rh.children:
tmp.write(h + ":" + str(config.worker_port) + "\n")
tmp.close()
rh.copy_file_to_remote(tmp.name, config.service_folder + "/workers", "")
os.unlink(tmp.name)
generate_script(config, rh, "hillview-aggregator-manager-template.sh")
rh.copy_file_to_remote(
"hillview-aggregator-manager.sh", config.service_folder, "")
os.unlink("hillview-aggregator-manager.sh")
def prepare_workers(config):
"""Prepares all Hillview workers"""
assert isinstance(config, ClusterConfiguration)
config.run_on_all_workers(lambda rh: prepare_worker(config, rh))
def prepare_aggregators(config):
"""Prepares all Hillview aggregators"""
assert isinstance(config, ClusterConfiguration)
config.run_on_all_aggregators(lambda rh: prepare_aggregator(config, rh))
def main():
"""Main function"""
parser = ArgumentParser()
parser.add_argument("config", help="json cluster configuration file")
args = parser.parse_args()
config = get_config(parser, args)
prepare_webserver(config)
prepare_aggregators(config)
prepare_workers(config)
if __name__ == "__main__":
main()
| 41.111842 | 91 | 0.678509 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,819 | 0.291087 |
4cfcde29c9ac5aec7a0ddf37cfbc7746d563e594 | 126 | py | Python | nbody/setup.py | libka-b/cython-playground | 8652b5af46b03d30e1c95187aa77f02f4588b0cd | [
"MIT"
] | null | null | null | nbody/setup.py | libka-b/cython-playground | 8652b5af46b03d30e1c95187aa77f02f4588b0cd | [
"MIT"
] | null | null | null | nbody/setup.py | libka-b/cython-playground | 8652b5af46b03d30e1c95187aa77f02f4588b0cd | [
"MIT"
] | null | null | null | from distutils.core import setup
from Cython.Build import cythonize
setup(name="nbody", ext_modules=cythonize("nbody.pyx"))
| 21 | 55 | 0.793651 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 0.142857 |
9800cea79f063843dec0db495c3ac332aebe744e | 837 | py | Python | xv_leak_tools/test_components/firewall/windows/windows_firewall.py | RDTCREW/expressvpn_leak_testing | da710573ccbe6472c4e4588058d9ec887e61e0a9 | [
"MIT"
] | null | null | null | xv_leak_tools/test_components/firewall/windows/windows_firewall.py | RDTCREW/expressvpn_leak_testing | da710573ccbe6472c4e4588058d9ec887e61e0a9 | [
"MIT"
] | null | null | null | xv_leak_tools/test_components/firewall/windows/windows_firewall.py | RDTCREW/expressvpn_leak_testing | da710573ccbe6472c4e4588058d9ec887e61e0a9 | [
"MIT"
] | null | null | null | from xv_leak_tools.log import L
from xv_leak_tools.exception import XVEx
from xv_leak_tools.test_components.firewall.firewall import Firewall
class WindowsFirewall(Firewall):
def __init__(self, device, config):
super().__init__(device, config)
from xv_leak_tools.network.windows.adv_firewall import WindowsAdvFirewall
self._adv_firewall = WindowsAdvFirewall
self._rule_name = None
def block_ip(self, ip):
if self._rule_name is not None:
raise XVEx("Already added block IP rule to firewall!")
L.info("Adding outgoing IP block for {}".format(ip))
self._rule_name = self._adv_firewall.block_ip(ip)
def unblock_ip(self):
if self._rule_name is not None:
self._adv_firewall.delete_rule(self._rule_name)
self._rule_name = None
| 34.875 | 81 | 0.703704 | 693 | 0.827957 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 0.089606 |
98014c1e42f4fbb2a89bc5013d0619e3883fc25d | 1,272 | py | Python | PythonTutor/session-10/stockprice.py | krishnamanchikalapudi/examples.py | 7a373d24df06b8882d07b850435b268a24317b1e | [
"MIT"
] | null | null | null | PythonTutor/session-10/stockprice.py | krishnamanchikalapudi/examples.py | 7a373d24df06b8882d07b850435b268a24317b1e | [
"MIT"
] | 1 | 2020-02-14T13:24:01.000Z | 2020-02-14T13:24:01.000Z | PythonTutor/session-10/stockprice.py | krishnamanchikalapudi/examples.py | 7a373d24df06b8882d07b850435b268a24317b1e | [
"MIT"
] | 2 | 2020-02-14T13:21:20.000Z | 2021-06-30T00:50:33.000Z | import urllib3
from beautifulsoup4 import beautifulsoup4 as bs
def get_historical_data(name, number_of_days):
data = []
url = "https://finance.yahoo.com/quote/" + name + "/history/"
rows = bs(urllib3.urlopen(url).read()).findAll('table')[0].tbody.findAll('tr')
for each_row in rows:
divs = each_row.findAll('td')
if divs[1].span.text != 'Dividend': #Ignore this row in the table
#I'm only interested in 'Open' price; For other values, play with divs[1 - 5]
data.append({'Date': divs[0].span.text, 'Open': float(divs[1].span.text.replace(',',''))})
return data[:number_of_days]
#Test
# print get_historical_data('amzn', 15)
# https://query1.finance.yahoo.com/v7/finance/download/WFC?period1=1561874153&period2=1593496553&interval=1d&events=history
# https://query1.finance.yahoo.com/v7/finance/download/WFC?period1=1561874369&period2=1593496769&interval=1d&events=history
# https://query1.finance.yahoo.com/v7/finance/download/AMZN?period1=1561874338&period2=1593496738&interval=1d&events=history
# max
# https://query1.finance.yahoo.com/v7/finance/download/WFC?period1=76204800&period2=1593388800&interval=1d&events=history
# https://query1.finance.yahoo.com/v7/finance/download/VBIV?period1=1031097600&period2=1593388800&interval=1d&events=history | 45.428571 | 124 | 0.757075 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 857 | 0.673742 |
9801e4aa71a88a3fbe7c467138d27dadd01c0840 | 13,667 | py | Python | scripts/warp/warp.py | lfmontesanto/WoloxAndroidBootsrap | 5e910bcf75c3c730096fc2beaad0fcea8355d24d | [
"MIT"
] | 9 | 2018-10-10T14:11:48.000Z | 2021-08-25T14:11:14.000Z | scripts/warp/warp.py | lfmontesanto/WoloxAndroidBootsrap | 5e910bcf75c3c730096fc2beaad0fcea8355d24d | [
"MIT"
] | 10 | 2019-07-22T20:02:09.000Z | 2019-08-13T14:41:38.000Z | scripts/warp/warp.py | lfmontesanto/WoloxAndroidBootsrap | 5e910bcf75c3c730096fc2beaad0fcea8355d24d | [
"MIT"
] | 10 | 2019-04-11T00:23:47.000Z | 2022-03-07T18:54:32.000Z | #!/usr/bin/python
import os, glob, hashlib, pickle, argparse, shutil, ntpath
import os, glob, hashlib, pickle, argparse, shutil, multiprocessing, signal, sys
from multiprocessing import Pool
from functools import partial
######################### Classes ##############################
class AndroidDensity:
def __init__(self, name, path, scaleFactor):
self.name = name
self.path = path
self.scaleFactor = scaleFactor
class IosDensity:
def __init__(self, name, suffix, scaleFactor):
self.name = name
self.suffix = suffix
self.scaleFactor = scaleFactor
class Colors:
PURPLE = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
################################################################
################# Directories configuration ####################
dirRoot = "./"
dirRaw = dirRoot + "raw/"
dirAssets = dirRoot + "drawables/"
# ScaleFactor with origin in XXXHDPI density. Source: http://jennift.com/dpical.html
androidDensities = [
AndroidDensity("HDPI", "drawable-hdpi/", 0.375),
AndroidDensity("X-HDPI", "drawable-xhdpi/", 0.5),
AndroidDensity("XX-HDPI", "drawable-xxhdpi/", 0.75),
AndroidDensity("XXX-HDPI", "drawable-xxxhdpi/", 1.0)
]
# ScaleFactor with origin in @3X density.
iosDensities = [
IosDensity("@1X", "", 0.333333),
IosDensity("@2X", "@2X", 0.666666),
IosDensity("@3X", "@3X", 1.0)
]
################################################################
# Constants
STORAGE_FILE_NAME = ".warp_storage"
TARGET_ANDROID = "android"
TARGET_IOS = "ios"
# Variables with default values
poolThreads = multiprocessing.cpu_count() + 1
upToDateFiles = []
deletedFiles = []
newFiles = []
modifiedFiles = []
targetPlatform = ""
shouldCleanProject = False
shouldRunSilently = False
versionName = "1.0.1"
# Script entry point
def main():
parseCommandLineOptions()
greet()
setUpPathVariables()
if shouldCleanProject or shouldForceCleanProject:
cleanProject()
else:
makeRequiredDirectories()
classifyRawFiles(upToDateFiles, deletedFiles, newFiles, modifiedFiles)
processUpToDateAssets(upToDateFiles)
processNewAssets(newFiles)
processModifiedAssets(modifiedFiles)
processDeletedAssets(deletedFiles)
goodbye()
# Parse command line options and store them in variables
def parseCommandLineOptions():
parser = argparse.ArgumentParser(description="Seamless mobile assets management")
baseGroup = parser.add_argument_group('Basic usage')
baseGroup.add_argument("-t", "--target",
dest="target",
required=True,
choices=[TARGET_ANDROID, TARGET_IOS],
help="specifies the platform where the assets will be used",
metavar=TARGET_ANDROID +"/" + TARGET_IOS)
baseGroup.add_argument("-i", "--input",
dest="input",
help="directory where the raw assets are located",
metavar="\"raw/assets/path\"")
baseGroup.add_argument("-o", "--output",
dest="output",
help="directory where the processed assets will be placed",
metavar="\"proccesed/assets/path\"")
baseGroup.add_argument("-v", "--version",
action='version',
version='%(prog)s ' + versionName)
baseGroup.add_argument("-T", "--threads",
dest="threads",
help="number of threads to use while processing the assets",
metavar="N",
default=multiprocessing.cpu_count() + 1,
type=int)
buildGroup = parser.add_argument_group('Processing options')
buildGroup.add_argument("-c", "--clean",
action="store_true",
default=False,
dest="clean",
help="remove every generated asset")
buildGroup.add_argument("-f", "--force-clean",
action="store_true",
default=False,
dest="force_clean",
help="forces the removal of the output folder")
uiGroup = parser.add_argument_group('UI')
uiGroup.add_argument("-s", "--silent",
action="store_true",
default=False,
dest="silent",
help="doesn't show the welcome message")
# Save parsed options as global variables
global targetPlatform
global dirRaw
global dirAssets
global shouldCleanProject
global shouldForceCleanProject
global shouldRunSilently
global poolThreads
args = parser.parse_args()
targetPlatform = args.target
if args.input: dirRaw = args.input
if args.output: dirAssets = args.output
shouldCleanProject = args.clean
shouldForceCleanProject = args.force_clean
shouldRunSilently = args.silent
poolThreads = args.threads if args.threads > 0 else 1
# Greet
def greet():
logo = [
" ",
" **********************************",
" * _ _____ ____ ____ *",
" * | | / / | / __ \/ __ \\ *",
" * | | /| / / /| | / /_/ / /_/ / *",
" * | |/ |/ / ___ |/ _, _/ ____/ *",
" * |__/|__/_/ |_/_/ |_/_/ *",
" * *",
" * Wolox Assets Rapid Processor *",
" **********************************",
" v."+ versionName +" ",
" "
]
if not shouldRunSilently:
for line in logo:
print(Colors.PURPLE + line + Colors.ENDC)
# Adds neccesary PATH variables. Useful when running the script from a non
# user shell (like with Gradle in Android)
def setUpPathVariables():
os.environ['PATH'] = os.environ['PATH'] + ":/usr/local/bin"
# Clears previously processed assets and the hash storage file
def cleanProject():
print(Colors.YELLOW + "Cleaning previously processed assets..." + Colors.ENDC)
# Dictionary of previously hashed files: <file path, MD5 hash>
storedHashedFiles = loadHashedFiles()
# Delete all the stored files
for path, md5 in storedHashedFiles.iteritems():
assetToClean = ntpath.basename(path)
print(Colors.BLUE + "DELETING ASSET: " + assetToClean + Colors.ENDC)
deleteAsset(assetToClean)
# Remove generated density folders if empty
for density in androidDensities:
densityDir = dirAssets + density.path
if os.path.exists(densityDir) and (os.listdir(densityDir) == [] or shouldForceCleanProject) :
print(Colors.BLUE + "DELETING ASSET DIRECTORY: " + densityDir + Colors.ENDC)
if shouldForceCleanProject:
shutil.rmtree(densityDir)
else :
os.rmdir(densityDir)
# Remove assets output folder if empty
if os.path.exists(dirAssets) and os.listdir(dirAssets) == [] :
print(Colors.BLUE + "DELETING EMPTY OUTPUT DIRECTORY: " + dirAssets + Colors.ENDC)
os.rmdir(dirAssets)
# Remove storage file
if os.path.exists(dirRaw + STORAGE_FILE_NAME):
os.remove(dirRaw + STORAGE_FILE_NAME)
print(Colors.YELLOW + "Assets cleared" + Colors.ENDC)
# Make the required directories to process asssets if they doesn't exist already
def makeRequiredDirectories():
# Make raw directory if needed
if not os.path.exists(dirRaw):
print("Making directory for raw assets: " + dirRaw)
os.makedirs(dirRaw)
# Make directories for Android processed assets
if targetPlatform == TARGET_ANDROID:
for density in androidDensities:
if not os.path.exists(dirAssets + density.path):
print("Making directory for Android assets: " + dirAssets + density.path)
os.makedirs(dirAssets + density.path)
# Make directories for iOS processed assets
else:
if not os.path.exists(dirAssets):
print("Making directory for iOS assets:" + dirAssets)
os.makedirs(dirAssets)
# Classify raw files into collections of up to date, deleted, new and modified files
def classifyRawFiles(upToDateFiles, deletedFiles, newFiles, modifiedFiles):
# Dictionary of previously hashed files: <file path, MD5 hash>
storedHashedFiles = loadHashedFiles()
# Dictionary of newly hashed files and ready to compare for diff: <file path, MD5 hash>
recentlyHashedFiles = hashRawFiles()
saveHashedFiles(recentlyHashedFiles)
# Classify files by comparing recent hashes with previously hased files
for path, md5 in recentlyHashedFiles.iteritems():
if path in storedHashedFiles:
# CASE 1: The file is present and the hashes are the same (the file is the same)
if md5 == recentlyHashedFiles[path]:
upToDateFiles.append(path)
# CASE 2: The file is present, but the hashes doesn't match (the file has been modified)
else:
modifiedFiles.append(path)
del storedHashedFiles[path] # Removed the processed entry
# CASE 3: The file isn't present on the previous hash dictionary, it must be a new file
else:
newFiles.append(path)
# The leftovers in the previous hash dictionary must be the deleted files
for path in storedHashedFiles:
deletedFiles.append(path)
# Hash (MD5) files in the raw directory and return them as a dictionary <file path, MD5 hash>
def hashRawFiles():
BLOCKSIZE = 65536
hashedFiles = {}
# Hash files in the raw directory
for filePath in glob.glob(dirRaw + "*.png"):
hasher = hashlib.md5()
with open(filePath, 'rb') as fileToHash:
buf = fileToHash.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = fileToHash.read(BLOCKSIZE)
hashedFiles.update({filePath:hasher.hexdigest()})
return hashedFiles
# Store a dictionary of files to Hash
def saveHashedFiles(filesToHash):
with open(dirRaw + STORAGE_FILE_NAME, "wb") as hashStorage:
pickle.dump(filesToHash, hashStorage, pickle.HIGHEST_PROTOCOL)
# Retrieve a dictionary of hashed files
def loadHashedFiles():
try:
with open(dirRaw + STORAGE_FILE_NAME, "rb") as hashStorage:
return pickle.load(hashStorage)
except IOError:
return {}
# Process files that we found in a previous run by the script
def processUpToDateAssets(upToDateFiles):
for path in upToDateFiles:
print(Colors.BLUE + os.path.basename(path) + ": STATE > UP TO DATE" + Colors.ENDC)
# Execute a specific function in a pool of workers for every "argument" in mapArguments.
def mapInWorkers(function, mapArguments):
pool = Pool(poolThreads)
try:
pool.map_async(function, mapArguments).get(0xFFFF)
pool.close()
except KeyboardInterrupt:
print(Colors.RED + "Interrupted" + Colors.ENDC)
pool.terminate()
sys.exit(1)
# Process files that are new to the project
def processNewAssets(newFiles):
processNew = partial(processRawPngAssetWithTitle, "{}: STATE > NEW")
mapInWorkers(processNew, newFiles)
# Process files that were modified in the project
def processModifiedAssets(modifiedFiles):
processModified = partial(processRawPngAssetWithTitle, "{}: STATE > UPDATED")
mapInWorkers(processModified, modifiedFiles)
# Process files that were deleted from the project
def processDeletedAssets(deletedFiles):
for path in deletedFiles:
assetName = os.path.basename(path)
print(Colors.BLUE + assetName + ": STATE > REMOVED" + Colors.ENDC)
deleteAsset(assetName)
# Prints the title, replacing the keyword for the path basename, scale and compress the asset for every screen density
def processRawPngAssetWithTitle(title, rawAssetPath):
print (Colors.BLUE + title.format(os.path.basename(rawAssetPath)) + Colors.ENDC)
processRawPngAsset(rawAssetPath)
# Scale and compress the asset for every screen density
def processRawPngAsset(rawAssetPath):
filename = os.path.basename(rawAssetPath)
filenameAndExtension = os.path.splitext(filename) # "example.png" -> ["example", ".png"]
# Process assets for Android (e.g: /drawable-xxhdpi/...)
if targetPlatform == TARGET_ANDROID:
for density in androidDensities:
processedAssetPath = dirAssets + density.path + filename
sendAssetToPngPipeline(rawAssetPath, density, processedAssetPath)
# Process assets for iOS (e.g: ...@3X)
else:
for density in iosDensities:
processedAssetPath = dirAssets + filenameAndExtension[0] + density.suffix + filenameAndExtension[1]
sendAssetToPngPipeline(rawAssetPath, density, processedAssetPath)
print(filename + ": Processed the asset for every screen density")
def sendAssetToPngPipeline(rawAssetPath, density, processedAssetPath):
filename = os.path.basename(rawAssetPath)
print("{0}: SCALING to {1}".format(filename, density.name))
scaleImage(rawAssetPath, density.scaleFactor, processedAssetPath)
print(filename + ": COMPRESSING for " + density.name)
compressPNG(processedAssetPath)
# Scale the asset for a given screen density using FFMPEG
def scaleImage(inputPath, scaleFactor, outputPath):
os.system("ffmpeg -loglevel error -y -i \"{0}\" -vf scale=iw*{1}:-1 \"{2}\"".format(inputPath, scaleFactor, outputPath))
# Compress a PNG asset using PNGQuant
def compressPNG(inputPath):
os.system("pngquant \"{0}\" --force --ext .png".format(inputPath))
# Remove asset in every screen density
def deleteAsset(assetName):
for density in androidDensities:
if os.path.exists(dirAssets + density.path + assetName):
os.remove(dirAssets + density.path + assetName)
print(assetName + ": DELETED asset for " + density.name)
# Goodbye
def goodbye():
print(Colors.GREEN + "WARP complete!" + Colors.ENDC)
# Main call
main()
| 36.640751 | 124 | 0.655813 | 515 | 0.037682 | 0 | 0 | 0 | 0 | 0 | 0 | 4,781 | 0.349821 |
9806a9bda8cab4a2b412b1e85490eb2a071b19ed | 888 | py | Python | moviecritic/models.py | mdameenh/elysia | ff173f036d13c179191a75c3d54e47314435bc28 | [
"BSD-3-Clause"
] | null | null | null | moviecritic/models.py | mdameenh/elysia | ff173f036d13c179191a75c3d54e47314435bc28 | [
"BSD-3-Clause"
] | 3 | 2020-02-11T23:32:55.000Z | 2021-06-10T19:02:19.000Z | moviecritic/models.py | mdameenh/elysia | ff173f036d13c179191a75c3d54e47314435bc28 | [
"BSD-3-Clause"
] | null | null | null | from django.db import models
from django.contrib.postgres.fields import ArrayField
# Create your models here.
class Movie_Details(models.Model):
name = models.CharField(max_length=100)
year = models.IntegerField(default=0)
boxoffice = models.BigIntegerField(default=0)
imdb = models.IntegerField(default=0)
metacritic = models.IntegerField(default=0)
rottentomatoes = models.IntegerField(default=0)
genre = ArrayField(models.CharField(max_length=40), default=list, size=50)
director = ArrayField(models.CharField(max_length=40), default=list, size=50)
lang = ArrayField(models.CharField(max_length=40), default=list, size=50)
country = ArrayField(models.CharField(max_length=40), default=list, size=50)
prod = ArrayField(models.CharField(max_length=40), default=list, size=50)
def __str__(self):
return self.name | 42.285714 | 81 | 0.734234 | 777 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.029279 |
9806d568292fc34f46e2f6473bf682841aa7e86b | 399 | py | Python | djangular/tests/utils.py | jianglb-alibaba/djangular-0.2.7 | d1e2d188cf4ab8ae757bd9bc3069ffef8f0fc753 | [
"Apache-2.0"
] | 145 | 2015-01-01T12:09:30.000Z | 2022-01-28T13:59:50.000Z | djangular/tests/utils.py | jianglb-alibaba/djangular-0.2.7 | d1e2d188cf4ab8ae757bd9bc3069ffef8f0fc753 | [
"Apache-2.0"
] | 25 | 2015-01-07T11:42:21.000Z | 2016-12-14T19:23:45.000Z | djangular/tests/utils.py | jianglb-alibaba/djangular-0.2.7 | d1e2d188cf4ab8ae757bd9bc3069ffef8f0fc753 | [
"Apache-2.0"
] | 40 | 2015-02-07T13:23:09.000Z | 2022-01-28T13:59:53.000Z | import os
from djangular import utils
from django.test import SimpleTestCase
class SiteAndPathUtilsTest(SimpleTestCase):
site_utils = utils.SiteAndPathUtils()
def test_djangular_root(self):
current_dir = os.path.dirname(os.path.abspath(__file__))
djangular_dir = os.path.dirname(current_dir)
self.assertEqual(djangular_dir, self.site_utils.get_djangular_root())
| 26.6 | 77 | 0.761905 | 318 | 0.796992 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
980702341198aa7e69a66076a811f979a1f20f3d | 816 | py | Python | BA1E.py | tyleryasaka/rosalind-solutions | bfd6178450523587c75a5fa42c8af617a2c0cae0 | [
"MIT"
] | null | null | null | BA1E.py | tyleryasaka/rosalind-solutions | bfd6178450523587c75a5fa42c8af617a2c0cae0 | [
"MIT"
] | null | null | null | BA1E.py | tyleryasaka/rosalind-solutions | bfd6178450523587c75a5fa42c8af617a2c0cae0 | [
"MIT"
] | null | null | null | datasetFile = open("datasets/rosalind_ba1e.txt", "r")
genome = datasetFile.readline().strip()
otherArgs = datasetFile.readline().strip()
k, L, t = map(lambda x: int(x), otherArgs.split(" "))
def findClumps(genome, k, L, t):
kmerIndex = {}
clumpedKmers = set()
for i in range(len(genome) - k + 1):
kmer = genome[i:i+k]
if kmer in kmerIndex:
currentIndex = kmerIndex[kmer]
currentIndex.append(i)
if len(currentIndex) >= t:
clumpStart = currentIndex[-t]
if i - clumpStart <= L:
clumpedKmers.add(kmer)
else:
kmerIndex[kmer] = [i]
return clumpedKmers
solution = " ".join(findClumps(genome, k, L, t))
outputFile = open("output/rosalind_ba1e.txt", "a")
outputFile.write(solution)
| 31.384615 | 53 | 0.584559 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 66 | 0.080882 |
98070a04422061ac22173ccd227116ef553e0ba2 | 1,790 | py | Python | src/wee/urls.py | dipkakwani/wee_app | a0f15053ec64a49611d759eaae6d780d608bea46 | [
"MIT"
] | 2 | 2016-11-18T18:43:10.000Z | 2018-10-17T18:31:52.000Z | src/wee/urls.py | dipkakwani/wee_app | a0f15053ec64a49611d759eaae6d780d608bea46 | [
"MIT"
] | null | null | null | src/wee/urls.py | dipkakwani/wee_app | a0f15053ec64a49611d759eaae6d780d608bea46 | [
"MIT"
] | null | null | null | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf.urls.static import static
from userModule.views import home
from userModule.views import userSettings
from userModule.views import logout
from groupModule.views import createGroup
from groupModule.views import group
from groupModule.views import selectgroup
from groupModule.views import groupSettings
from wee.views import *
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
import settings
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'wee.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^home/$', home),
url(r'^newsfeed/$', newsfeed),
url(r'^logout/$', logout),
url(r'^post/$', newPost),
url(r'^newgroup/$', createGroup),
url(r'^settings/$', userSettings),
url(r'^group/(?P<groupId>\d+)/$', group),
url(r'^groups/$' , selectgroup),
url(r'^group/(?P<groupId>\d+)/settings/$', groupSettings),
url(r'^friends/$' , friends) ,
url(r'^timeline/(?P<profileUserId>\d+)/(?P<change>\w)/friend/$', updateFriend),
url(r'^timeline/(?P<profileUserId>\d+)/follow/$', updateFollow),
url(r'^timeline/(?P<profileUserId>\d+)/$', timeline),
url(r'^search/$', search),
url(r'^like/(?P<postId>\d+)/$', like),
url(r'^getlike/(?P<postId>\d+)/$', getLike),
url(r'^comment/(?P<postId>\d+)/$', comment),
url(r'^getcomment/(?P<postId>\d+)/$', getComment),
url(r'^share/(?P<postId>\d+)/$', share),
url(r'^getshare/(?P<postId>\d+)/$', getShare),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('', url(r'^.*/$', notfound), )
| 37.291667 | 83 | 0.663128 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 605 | 0.337989 |
98084799ad8be61ab260536f6d3ffcf5c84ac7c4 | 858 | py | Python | src/encoded/searches/responses.py | procha2/encoded | e9f122362b71f3b8641023b8d2d5ad531d3484b7 | [
"MIT"
] | 102 | 2015-05-20T01:17:43.000Z | 2022-03-07T06:03:55.000Z | src/encoded/searches/responses.py | procha2/encoded | e9f122362b71f3b8641023b8d2d5ad531d3484b7 | [
"MIT"
] | 901 | 2015-01-07T23:11:57.000Z | 2022-03-18T13:56:12.000Z | src/encoded/searches/responses.py | procha2/encoded | e9f122362b71f3b8641023b8d2d5ad531d3484b7 | [
"MIT"
] | 65 | 2015-02-06T23:00:26.000Z | 2022-01-22T07:58:44.000Z | from encoded.searches.mixins import CartAggsToFacetsMixin
from snosearch.responses import BasicQueryResponseWithFacets
from snosearch.responses import BasicMatrixResponseWithFacets
class CartQueryResponseWithFacets(CartAggsToFacetsMixin, BasicQueryResponseWithFacets):
'''
Like BasicQueryResponseWithFacets but uses CartAggsToFacetsMixin instead of AggsToFacetsMixin.
'''
def __init__(self, results, query_builder, *args, **kwargs):
super().__init__(results, query_builder, *args, **kwargs)
class CartMatrixResponseWithFacets(CartAggsToFacetsMixin, BasicMatrixResponseWithFacets):
'''
Like BasicMatrixResponseWithFacets but uses CartAggsToFacetsMixin instead of AggsToFacetsMixin.
'''
def __init__(self, results, query_builder, *args, **kwargs):
super().__init__(results, query_builder, *args, **kwargs)
| 39 | 99 | 0.79021 | 671 | 0.782051 | 0 | 0 | 0 | 0 | 0 | 0 | 221 | 0.257576 |
98091677a4abdf5ae7609e479582ef5c7e59c7ee | 4,215 | py | Python | unit-tests/live/frames/test-t2ff-sensor.py | ksvbka/librealsense | 72ada08ab2f6e9876d47135eea12325963205a04 | [
"Apache-2.0"
] | 6,457 | 2016-01-21T03:56:07.000Z | 2022-03-31T11:57:15.000Z | unit-tests/live/frames/test-t2ff-sensor.py | ksvbka/librealsense | 72ada08ab2f6e9876d47135eea12325963205a04 | [
"Apache-2.0"
] | 8,393 | 2016-01-21T09:47:28.000Z | 2022-03-31T22:21:42.000Z | unit-tests/live/frames/test-t2ff-sensor.py | ksvbka/librealsense | 72ada08ab2f6e9876d47135eea12325963205a04 | [
"Apache-2.0"
] | 4,874 | 2016-01-21T09:20:08.000Z | 2022-03-31T15:18:00.000Z | # License: Apache 2.0. See LICENSE file in root directory.
# Copyright(c) 2021 Intel Corporation. All Rights Reserved.
# test:device L500*
# test:device D400*
import pyrealsense2 as rs
from rspy.stopwatch import Stopwatch
from rspy import test, log
import time
import platform
# Start depth + color streams and measure the time from stream opened until first frame arrived using sensor API.
# Verify that the time do not exceeds the maximum time allowed
# Note - Using Windows Media Foundation to handle power management between USB actions take time (~27 ms)
def time_to_first_frame(sensor, profile, max_delay_allowed):
"""
Wait for the first frame for 'max_delay_allowed' + 1 extra second
If the frame arrives it will return the seconds it took since open() call
If no frame it will return 'max_delay_allowed'
"""
first_frame_time = max_delay_allowed
open_call_stopwatch = Stopwatch()
def frame_cb(frame):
nonlocal first_frame_time, open_call_stopwatch
if first_frame_time == max_delay_allowed:
first_frame_time = open_call_stopwatch.get_elapsed()
open_call_stopwatch.reset()
sensor.open(profile)
sensor.start(frame_cb)
# Wait condition:
# 1. first frame did not arrive yet
# 2. timeout of 'max_delay_allowed' + 1 extra second reached.
while first_frame_time == max_delay_allowed and open_call_stopwatch.get_elapsed() < max_delay_allowed + 1:
time.sleep(0.05)
sensor.stop()
sensor.close()
return first_frame_time
# The device starts at D0 (Operational) state, allow time for it to get into idle state
time.sleep(3)
#####################################################################################################
test.start("Testing device creation time on " + platform.system() + " OS")
device_creation_stopwatch = Stopwatch()
dev = test.find_first_device_or_exit()
device_creation_time = device_creation_stopwatch.get_elapsed()
max_time_for_device_creation = 1.5
print("Device creation time is: {:.3f} [sec] max allowed is: {:.1f} [sec] ".format(device_creation_time, max_time_for_device_creation))
test.check(device_creation_time < max_time_for_device_creation)
test.finish()
# Set maximum delay for first frame according to product line
product_line = dev.get_info(rs.camera_info.product_line)
if product_line == "D400":
max_delay_for_depth_frame = 1.5
max_delay_for_color_frame = 1.5
elif product_line == "L500":
max_delay_for_depth_frame = 2.5 # L515 depth frame has a 1.5 seconds built in delay at the FW side + 1.0 second for LRS
max_delay_for_color_frame = 1.5
else:
log.f( "This test support only D400 + L515 devices" )
ds = dev.first_depth_sensor()
cs = dev.first_color_sensor()
dp = next(p for p in
ds.profiles if p.fps() == 30
and p.stream_type() == rs.stream.depth
and p.format() == rs.format.z16)
cp = next(p for p in
cs.profiles if p.fps() == 30
and p.stream_type() == rs.stream.color
and p.format() == rs.format.rgb8)
#####################################################################################################
test.start("Testing first depth frame delay on " + product_line + " device - "+ platform.system() + " OS")
first_depth_frame_delay = time_to_first_frame(ds, dp, max_delay_for_depth_frame)
print("Time until first depth frame is: {:.3f} [sec] max allowed is: {:.1f} [sec] ".format(first_depth_frame_delay, max_delay_for_depth_frame))
test.check(first_depth_frame_delay < max_delay_for_depth_frame)
test.finish()
#####################################################################################################
test.start("Testing first color frame delay on " + product_line + " device - "+ platform.system() + " OS")
first_color_frame_delay = time_to_first_frame(cs, cp, max_delay_for_color_frame)
print("Time until first color frame is: {:.3f} [sec] max allowed is: {:.1f} [sec] ".format(first_color_frame_delay, max_delay_for_color_frame))
test.check(first_color_frame_delay < max_delay_for_color_frame)
test.finish()
#####################################################################################################
test.print_results_and_exit()
| 39.392523 | 143 | 0.66382 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,823 | 0.432503 |
9809d378624d6c3ff7910c3c40281d8828a87c73 | 379 | py | Python | test_apps/python_app/tests/_shared/for_proc.py | Origen-SDK/o2 | 5b0f9a6d113ddebc73c7ee224931e8b2d0301794 | [
"MIT"
] | null | null | null | test_apps/python_app/tests/_shared/for_proc.py | Origen-SDK/o2 | 5b0f9a6d113ddebc73c7ee224931e8b2d0301794 | [
"MIT"
] | 127 | 2019-11-23T17:09:35.000Z | 2021-09-02T11:06:20.000Z | test_apps/python_app/tests/_shared/for_proc.py | Origen-SDK/o2 | 5b0f9a6d113ddebc73c7ee224931e8b2d0301794 | [
"MIT"
] | null | null | null | def setenv(config_root, config_name=None, bypass_config_lookup=None):
import os, inspect, pathlib, sys
if bypass_config_lookup:
os.environ['origen_bypass_config_lookup'] = "1"
if config_name is None:
config_name = inspect.stack()[1].function
os.environ['origen_config_paths'] = str(
config_root.joinpath(f"{config_name}.toml").absolute())
| 42.111111 | 69 | 0.707124 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.195251 |
980de4c0b0b2bdc8da8fd8f2177a3dbda0b77669 | 525 | py | Python | example/serializers.py | divipayhq/drf_model_pusher | 37f2d7c61839f641675da8db2bd4ec4932d4bd47 | [
"MIT"
] | 5 | 2018-07-23T00:21:29.000Z | 2020-01-16T15:42:10.000Z | example/serializers.py | divipayhq/drf_model_pusher | 37f2d7c61839f641675da8db2bd4ec4932d4bd47 | [
"MIT"
] | 39 | 2018-07-25T03:42:25.000Z | 2021-12-20T03:51:13.000Z | example/serializers.py | divipayhq/drf_model_pusher | 37f2d7c61839f641675da8db2bd4ec4932d4bd47 | [
"MIT"
] | 3 | 2018-10-02T16:29:34.000Z | 2022-02-22T03:04:50.000Z | from rest_framework import serializers
from example.models import MyPublicModel, MyPrivateModel, MyPresenceModel
class MyPublicModelSerializer(serializers.ModelSerializer):
class Meta:
model = MyPublicModel
fields = ("name",)
class MyPrivateModelSerializer(serializers.ModelSerializer):
class Meta:
model = MyPrivateModel
fields = ("name",)
class MyPresenceModelSerializer(serializers.ModelSerializer):
class Meta:
model = MyPresenceModel
fields = ("name",)
| 23.863636 | 73 | 0.721905 | 402 | 0.765714 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 0.034286 |
980e6c10d5ee3bf2bd644b6f6b236ff8581231c5 | 1,164 | py | Python | src/analyzer/token.py | betoSolares/minicomp | 4fb05b94acdf085135924c717a470fe518e462ff | [
"MIT"
] | 1 | 2020-08-18T16:02:38.000Z | 2020-08-18T16:02:38.000Z | src/analyzer/token.py | betoSolares/minicomp | 4fb05b94acdf085135924c717a470fe518e462ff | [
"MIT"
] | 27 | 2020-08-18T17:06:50.000Z | 2020-11-21T19:07:22.000Z | src/analyzer/token.py | betoSolares/minicomp | 4fb05b94acdf085135924c717a470fe518e462ff | [
"MIT"
] | 3 | 2020-08-24T13:54:23.000Z | 2020-10-15T02:20:07.000Z | class Token:
def __init__(self, word, line, start, finish, category, reason=None):
self.__word__ = word
self.__line__ = line
self.__start__ = start
self.__finish__ = finish
self.__category__ = category
self.__reason__ = reason
@property
def word(self):
return self.__word__
@word.setter
def word(self, word):
self.__word__ = word
@property
def line(self):
return self.__line__
@line.setter
def line(self, line):
self.__line__ = line
@property
def start(self):
return self.__start__
@start.setter
def start(self, start):
self.__start__ = start
@property
def finish(self):
return self.__finish__
@finish.setter
def finish(self, finish):
self.__finish__ = finish
@property
def category(self):
return self.__category__
@category.setter
def category(self, category):
self.__category__ = category
@property
def reason(self):
return self.__reason__
@reason.setter
def reason(self, reason):
self.__reason__ = reason
| 20.421053 | 73 | 0.607388 | 1,163 | 0.999141 | 0 | 0 | 813 | 0.698454 | 0 | 0 | 0 | 0 |
980e80bc65953c70a1323e043e388be210aa7e04 | 5,033 | py | Python | spektral/layers/convolutional/graph_conv.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | 1 | 2020-06-25T02:08:40.000Z | 2020-06-25T02:08:40.000Z | spektral/layers/convolutional/graph_conv.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | null | null | null | spektral/layers/convolutional/graph_conv.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | null | null | null | from tensorflow.keras import activations, initializers, regularizers, constraints
from tensorflow.keras import backend as K
from tensorflow.keras.layers import Layer
from spektral.layers import ops
from spektral.utils import localpooling_filter
class GraphConv(Layer):
r"""
A graph convolutional layer (GCN) as presented by
[Kipf & Welling (2016)](https://arxiv.org/abs/1609.02907).
**Mode**: single, disjoint, mixed, batch.
This layer computes:
$$
\Z = \hat \D^{-1/2} \hat \A \hat \D^{-1/2} \X \W + \b
$$
where \( \hat \A = \A + \I \) is the adjacency matrix with added self-loops
and \(\hat\D\) is its degree matrix.
**Input**
- Node features of shape `([batch], N, F)`;
- Modified Laplacian of shape `([batch], N, N)`; can be computed with
`spektral.utils.convolution.localpooling_filter`.
**Output**
- Node features with the same shape as the input, but with the last
dimension changed to `channels`.
**Arguments**
- `channels`: number of output channels;
- `activation`: activation function to use;
- `use_bias`: bool, add a bias vector to the output;
- `kernel_initializer`: initializer for the weights;
- `bias_initializer`: initializer for the bias vector;
- `kernel_regularizer`: regularization applied to the weights;
- `bias_regularizer`: regularization applied to the bias vector;
- `activity_regularizer`: regularization applied to the output;
- `kernel_constraint`: constraint applied to the weights;
- `bias_constraint`: constraint applied to the bias vector.
"""
def __init__(self,
channels,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super().__init__(activity_regularizer=activity_regularizer, **kwargs)
self.channels = channels
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.supports_masking = False
def build(self, input_shape):
assert len(input_shape) >= 2
input_dim = input_shape[0][-1]
self.kernel = self.add_weight(shape=(input_dim, self.channels),
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
if self.use_bias:
self.bias = self.add_weight(shape=(self.channels,),
initializer=self.bias_initializer,
name='bias',
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
else:
self.bias = None
self.built = True
def call(self, inputs):
features = inputs[0]
fltr = inputs[1]
# Convolution
output = ops.dot(features, self.kernel)
output = ops.filter_dot(fltr, output)
if self.use_bias:
output = K.bias_add(output, self.bias)
if self.activation is not None:
output = self.activation(output)
return output
def compute_output_shape(self, input_shape):
features_shape = input_shape[0]
output_shape = features_shape[:-1] + (self.channels,)
return output_shape
def get_config(self):
config = {
'channels': self.channels,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))
@staticmethod
def preprocess(A):
return localpooling_filter(A) | 39.629921 | 82 | 0.617326 | 4,785 | 0.950725 | 0 | 0 | 74 | 0.014703 | 0 | 0 | 1,527 | 0.303398 |
980f19d193d1b32136b75cbaadce101f548347ae | 31 | py | Python | test.py | s-rohil/infrastructure-as-a-code | 9919a9ea4a0a4f92cf7859cdc1919962ed6ba345 | [
"MIT"
] | null | null | null | test.py | s-rohil/infrastructure-as-a-code | 9919a9ea4a0a4f92cf7859cdc1919962ed6ba345 | [
"MIT"
] | null | null | null | test.py | s-rohil/infrastructure-as-a-code | 9919a9ea4a0a4f92cf7859cdc1919962ed6ba345 | [
"MIT"
] | null | null | null | Test
Dummy code to make commit
| 10.333333 | 25 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
980f957ee23d0d452e8c9e89e704668d40c8799c | 28,488 | py | Python | pyBirdcagebuilder.py | umcu7tcoillab/pyBirdcagebuilder | ae4ee3c6f41888e358cd390e9ec996f3ad9b07b7 | [
"MIT"
] | null | null | null | pyBirdcagebuilder.py | umcu7tcoillab/pyBirdcagebuilder | ae4ee3c6f41888e358cd390e9ec996f3ad9b07b7 | [
"MIT"
] | null | null | null | pyBirdcagebuilder.py | umcu7tcoillab/pyBirdcagebuilder | ae4ee3c6f41888e358cd390e9ec996f3ad9b07b7 | [
"MIT"
] | 2 | 2020-09-01T15:51:18.000Z | 2022-03-08T03:15:41.000Z | #!/usr/bin/env python3
"""
Description: pyBirdcagebuilder is a program that calculates ideal capacitor values for birdcage coil designs.
This program is based on the original Birdcage Builder program provided by PennState Health.
(https://research.med.psu.edu/departments/center-for-nmr-research/software/birdcage-builder-web-app/)
Author: Dimitri Welting
Website: http://github.com/dwelting/pyBirdcagebuilder
License: Copyright (c) 2020 Dimitri Welting. All rights reserved.
Distributed under the MIT license. The full text of the license can be found in the LICENSE file or on the above-mentioned website.
This code is free to download and use. Any paid service providing this code is not endorsed by the author.
"""
import tkinter as tk
from tkinter import ttk, font
from tkinter import messagebox as mb
import math
import os
import lib.my_tk as my_tk
from lib.logging import logger
from lib.birdcage_math import CalculateBirdcage
from lib.config import MyConfig
PROGRAM_NAME = "pyBirdcagebuilder"
VERSION = "v0.1"
WEBSITE = "github.com/dwelting/pyBirdcagebuilder"
ICON_FOLDER = os.path.join(os.getcwd(), "icon", "")
MAX_PRECISION = 2
class MainApplication:
WINDOWSIZEx = 500
WINDOWSIZEy = 550
HIGHPASS = 1
LOWPASS = 2
BANDPASS = 3
RECT = 1
TUBE = 2
LEG = 1
ER = 2
ELLIPSE = 0
CIRCLE = 1
SHORT = 1
LONG = 0
def __init__(self, window):
self._setWindow(window)
self.window = window
self.Config = MyConfig(self)
# menubar
self.menuBar = MyMenuBar(self)
window.config(menu=self.menuBar.menu)
self.tab_control = ttk.Notebook(window) # tabs
self.calcCapacitance = CalculateBirdcage(self)
self.guiTabSettings = MySettingsTab(self)
self.guiTabResults = MyResultsTab(self)
self.guiTabMoreInfo = MyMoreInfoTab(self)
self.tab_control.add(self.guiTabSettings.tab, text=' Settings ')
self.tab_control.add(self.guiTabResults.tab, text=' Results ')
self.tab_control.add(self.guiTabMoreInfo.tab, text=' More Information ')
self.tab_control.pack(expand=1, fill='both', padx=(5, 5), pady=(5, 5))
if os.name == 'posix':
bg = ttk.Style().lookup('TFrame', 'background')
self.guiTabSettings.tab.tk_setPalette(background=bg) # only useful in linux. Hides color inconsistencies between widget bg and frame bg
def _setWindow(self, window):
window.title(f"{PROGRAM_NAME}") # {VERSION}")
my_tk.centerWindow(window, self.WINDOWSIZEx, self.WINDOWSIZEy)
window.maxsize(self.WINDOWSIZEx, self.WINDOWSIZEy)
window.minsize(self.WINDOWSIZEx, self.WINDOWSIZEy)
window.resizable(0, 0)
try:
if os.name == 'posix': # windows doesn't like 32px icons
icon = "icon32.png"
else:
icon = "icon16.png"
window.iconphoto(True, tk.PhotoImage(file=ICON_FOLDER + icon))
except tk.TclError:
logger.warn("Icon error: no application icon found?")
def startCalculation(self):
inputs_ = self.guiTabSettings.validateInputs()
if not inputs_:
return
logger.info("Calculation started with values:\n\t\t\t" + "\n\t\t\t".join("{}: {}".format(k, v) for k, v in inputs_.items()))
self.calcCapacitance.calculate()
self.guiTabResults.drawCapacitors()
self.guiTabResults.drawGraph()
self.tab_control.select(1) # At end switch tab to results
class MyMenuBar:
def __init__(self, parent):
self.parent = parent
self.coil_shape = tk.BooleanVar() # selects which coil type
self.coil_shape.set(True)
self.menu = tk.Menu(parent.window)
self._fileMenu()
# self._optionMenu() #todo add when elliptical is fixed
self._helpMenu()
def _optionMenu(self):
optionmenu = tk.Menu(self.menu, tearoff=0)
self.menu.add_cascade(label="Options", menu=optionmenu)
optionmenu.add_checkbutton(label="Circular", onvalue=self.parent.CIRCLE, offvalue=self.parent.ELLIPSE, command=lambda e: print("Not yet implemented"), variable=self.coil_shape)
optionmenu.add_checkbutton(label="Eliptical", onvalue=self.parent.ELLIPSE, offvalue=self.parent.CIRCLE, command=lambda e: print("Not yet implemented"), variable=self.coil_shape)
def _helpMenu(self):
helpmenu = tk.Menu(self.menu, tearoff=0)
self.menu.add_cascade(label="Help", menu=helpmenu)
helpmenu.add_command(label=f"About {PROGRAM_NAME}", command=MyAboutWindow)
def _fileMenu(self):
filemenu = tk.Menu(self.menu, tearoff=0)
self.menu.add_cascade(label="File", menu=filemenu)
# filemenu.add_command(label="Save config", command=self.parent.Config.save)
# filemenu.add_command(label="Save config as...", command=self.parent.Config.saveAs)
# filemenu.add_command(label="Load config", command=self.parent.Config.load)
# filemenu.add_separator()
filemenu.add_command(label="Exit", command=self.parent.window.quit)
class MyMoreInfoTab:
def __init__(self, parent):
self.parent = parent
self.tab = ttk.Frame(parent.tab_control)
self.v_ind_self_legs = tk.DoubleVar() # calculated self inductance of legs
self.v_ind_self_er = tk.DoubleVar() # calculated self inductance of end ring
self.v_ind_eff_legs = tk.DoubleVar() # calculated effective inductance of legs
self.v_ind_eff_er = tk.DoubleVar() # calculated effective inductance of end ring
lf_ind_calc = tk.LabelFrame(self.tab, text="Inductance Calculations", font=myfont_bold)
lbl_self = tk.Label(lf_ind_calc, text="Self (nH)", font=myfont_bold, foreground="blue")
lbl_eff = tk.Label(lf_ind_calc, text="Effective (nH)", font=myfont_bold, foreground="blue")
lb_legs = tk.Label(lf_ind_calc, text="Legs", font=myfont_bold)
lb_er = tk.Label(lf_ind_calc, text="ER Seg.", font=myfont_bold)
txt_self_legs = my_tk.MyEntry(lf_ind_calc, text=self.v_ind_self_legs, read_only=True, decimals=MAX_PRECISION)
txt_self_er = my_tk.MyEntry(lf_ind_calc, text=self.v_ind_self_er, read_only=True, decimals=MAX_PRECISION)
txt_eff_legs = my_tk.MyEntry(lf_ind_calc, text=self.v_ind_eff_legs, read_only=True, decimals=MAX_PRECISION)
txt_eff_er = my_tk.MyEntry(lf_ind_calc, text=self.v_ind_eff_er, read_only=True, decimals=MAX_PRECISION)
lbl_self.grid(column=1, row=0, sticky=tk.NSEW, padx=(0, 20), pady=(0, 5))
lbl_eff.grid(column=2, row=0, sticky=tk.NSEW, padx=(0, 5), pady=(0, 5))
lb_legs.grid(column=0, row=1, sticky=tk.NSEW, pady=(0, 10))
lb_er.grid(column=0, row=2, sticky=tk.NSEW, pady=(0, 10))
txt_self_legs.grid(column=1, row=1, sticky=tk.NW)
txt_self_er.grid(column=1, row=2, sticky=tk.NW)
txt_eff_legs.grid(column=2, row=1, sticky=tk.NW)
txt_eff_er.grid(column=2, row=2, sticky=tk.NW)
tk.Grid.columnconfigure(self.tab, 0, weight=0)
tk.Grid.columnconfigure(self.tab, 3, weight=1)
lf_ind_calc.grid(column=0, row=0, sticky=tk.NSEW, pady=(5, 5), padx=(5, 0))
class MyResultsTab:
def __init__(self, parent):
self.parent = parent
self.tab = ttk.Frame(parent.tab_control)
self.v_cap_res = tk.DoubleVar() # calculated cap value
lbl_cap = tk.Label(self.tab, text="Calculated Capacitance (pF)", font=myfont_bold, foreground="blue")
txt_cap_res = my_tk.MyEntry(self.tab, text=self.v_cap_res, read_only=True, decimals=MAX_PRECISION)
tk.Grid.columnconfigure(self.tab, 1, weight=0)
tk.Grid.columnconfigure(self.tab, 0, weight=1)
tk.Grid.columnconfigure(self.tab, 3, weight=1)
lbl_cap.grid(column=1, row=0, columnspan=2, sticky=tk.NW, pady=(5, 5), padx=(5, 0))
txt_cap_res.grid(column=1, row=1, sticky=tk.NW, pady=(0, 50), padx=(5, 0))
self._initializeGraphs()
def _initializeGraphs(self):
# current graph
self.canvas_size = 200
frm_curr_plot = tk.LabelFrame(self.tab)
lbl_curr_plot = tk.Label(frm_curr_plot, text="Current distribution in the legs", font=myfont_small_bold, foreground="blue")
lbl_curr_plot.pack(anchor='nw')
lbl_curr_plot2 = tk.Label(frm_curr_plot, text="Normalized current intensity", font=myfont_small, foreground="black")
lbl_curr_plot2.pack(anchor='nw')
self.canvas_curr = tk.Canvas(frm_curr_plot, width=self.canvas_size, height=self.canvas_size, borderwidth=0, highlightbackground="grey")
self._drawGraphAxis()
self.canvas_curr.pack(pady=(10, 10))
lbl_curr_plot2 = tk.Label(frm_curr_plot, text="Angular position of leg (degree)\nZero beginning at +X direction", font=myfont_small, foreground="black")
lbl_curr_plot2.pack(anchor='nw')
# capacitor graph
frm_cap_pos = tk.LabelFrame(self.tab)
lbl_cap_pos = tk.Label(frm_cap_pos, text="Position of legs and capacitors", font=myfont_small_bold, foreground="blue")
lbl_cap_pos.pack(anchor='nw')
lbl_curr_plot2 = tk.Label(frm_cap_pos, text="C: Capacitor", font=myfont_small, foreground="black")
lbl_curr_plot2.pack(anchor='nw')
self.canvas_cap = tk.Canvas(frm_cap_pos, width=self.canvas_size, height=self.canvas_size, borderwidth=1, highlightbackground="grey")
self._drawCapacitorAxis()
self.canvas_cap.pack(pady=(10, 10))
frm_curr_plot.grid(column=1, row=2, sticky=tk.NW, padx=(5, 10))
frm_cap_pos.grid(column=2, row=2, sticky=tk.NW)
def _drawCapacitorAxis(self):
if self.parent.menuBar.coil_shape.get() == self.parent.CIRCLE:
self.canvas_cap.create_oval(20, 20, self.canvas_size-20, self.canvas_size-20, outline="green", width=1)
else:
ratio = self.parent.guiTabSettings.v_coil_short_diameter.get() / self.parent.guiTabSettings.v_coil_long_diameter.get()
x_min = 20
x_max = self.canvas_size - 20
y_min = self.canvas_size/2 - ((self.canvas_size/2 - 20) * ratio)
y_max = self.canvas_size/2 + ((self.canvas_size/2 - 20) * ratio)
self.canvas_cap.create_oval(x_min, y_min, x_max, y_max, outline="green", width=1)
self.canvas_cap.create_line(self.canvas_size/2, 0, self.canvas_size/2, self.canvas_size, fill="blue", width=1)
self.canvas_cap.create_line(0, self.canvas_size/2, self.canvas_size, self.canvas_size/2, fill="blue", width=1)
self.canvas_cap.create_text(self.canvas_size/2+((self.canvas_size-40)/2)-20, self.canvas_size/2+10, text="+X", font='freemono 9', fill='black')
def drawCapacitors(self):
self.canvas_cap.delete("all")
self._drawCapacitorAxis()
thetas = self.parent.calcCapacitance.thetas
bc_mode = self.parent.guiTabSettings.v_rb_config_selected.get()
# if self.parent.menuBar.coil_shape.get() == ELLIPSE:
# ratio = self.parent.guiTabSettings.v_coil_short_diameter.get() / self.parent.guiTabSettings.v_coil_long_diameter.get()
# else:
# ratio = 1
r = ((self.canvas_size-40)/2)
for theta in thetas:
x = r * math.sin(theta) + self.canvas_size/2
y = r * math.cos(theta) + self.canvas_size/2
self.canvas_cap.create_oval(x - 4, y - 4, x + 4, y + 4, fill='red')
r_hp = ((self.canvas_size-40)/2) + 6 # radius
r_lp = r_hp + 4 # radius
offset = thetas[0] # rotate the c's to be in between the dots
for theta in thetas: # angle and radius to x/y coords
if bc_mode != self.parent.LOWPASS: # if highpass or bandpass
x = r_hp * math.sin(theta - offset) + (self.canvas_size/2) + 1
y = r_hp * math.cos(theta - offset) + (self.canvas_size/2) - 1
self.canvas_cap.create_text(x, y, text="c", fill='blue')
if bc_mode != self.parent.HIGHPASS: # if lowpass or bandpass
x = r_lp * math.sin(theta) + (self.canvas_size/2) + 1
y = r_lp * math.cos(theta) + (self.canvas_size/2) - 1
self.canvas_cap.create_text(x, y, text="c", fill='blue')
def _drawGraphAxis(self):
from_edge_l = 10
from_edge_r = self.canvas_size-10
self.canvas_curr.create_line(from_edge_l, self.canvas_size/2, from_edge_r, self.canvas_size/2, fill="blue", width=2) # middle line
self.canvas_curr.create_line(from_edge_l, self.canvas_size/2-5, from_edge_l, self.canvas_size/2+5, fill="blue", width=2) # left
self.canvas_curr.create_line((from_edge_r-from_edge_l)/4+from_edge_l, self.canvas_size/2-5, (from_edge_r-from_edge_l)/4+from_edge_l, self.canvas_size/2+5, fill="blue", width=2) # leftmiddle
self.canvas_curr.create_line((from_edge_r-from_edge_l)/2+from_edge_l, self.canvas_size/2-5, (from_edge_r-from_edge_l)/2+from_edge_l, self.canvas_size/2+5, fill="blue", width=2) # middle
self.canvas_curr.create_line(from_edge_r-(from_edge_r-from_edge_l)/4, self.canvas_size/2-5, from_edge_r-(from_edge_r-from_edge_l)/4, self.canvas_size/2+5, fill="blue", width=2) # rightmiddle
self.canvas_curr.create_line(from_edge_r, self.canvas_size/2-5, from_edge_r, self.canvas_size/2+5, fill="blue", width=2) # right
self.canvas_curr.create_text(from_edge_l+2, self.canvas_size/2+15, text="0", font='freemono 8')
self.canvas_curr.create_text((from_edge_r-from_edge_l)/2+from_edge_l, self.canvas_size/2+15, text="180", font='freemono 9')
self.canvas_curr.create_text(self.canvas_size-10, self.canvas_size/2+15, text="360", font='freemono 9')
def drawGraph(self):
self.canvas_curr.delete("all")
self._drawGraphAxis()
legcurrs = self.parent.calcCapacitance.legcurrs
nr_of_legs = self.parent.calcCapacitance.nr_of_legs
offset = 10
highest_curr = 0
for curr in legcurrs:
if abs(curr) > highest_curr:
highest_curr = abs(curr)
scale = self.canvas_size/2 / highest_curr / 1.3
old_x = 0
old_y = 0
for i, curr in enumerate(legcurrs): # create the lines
x = ((self.canvas_size-20) / (nr_of_legs-1)) * i + offset
y = -curr * scale + self.canvas_size/2
if i > 0:
self.canvas_curr.create_line(x, y, old_x, old_y, width=1, fill='green')
old_x = x
old_y = y
for i, curr in enumerate(legcurrs): # create the dots
x = ((self.canvas_size-20) / (nr_of_legs-1)) * i + offset
y = -curr * scale + self.canvas_size/2
self.canvas_curr.create_oval(x - 4, y - 4, x + 4, y + 4, fill='red')
class MySettingsTab:
def __init__(self, parent):
self.parent = parent
self.tab = ttk.Frame(parent.tab_control)
self.v_res_freq = tk.DoubleVar() # variable for Resonance frequency
self.v_rb_legs_selected = tk.IntVar() # variable for state of radiobuttons
self.v_rb_er_selected = tk.IntVar() # variable for state of radiobuttons
self.v_rb_config_selected = tk.IntVar() # variable for state of radiobuttons
self.v_nr_of_legs = tk.IntVar()
self.v_coil_diameter = tk.DoubleVar()
self.v_shield_diameter = tk.DoubleVar()
self.v_bp_cap = tk.DoubleVar()
self.v_rb_bp = tk.IntVar()
self.v_leg_length = tk.DoubleVar()
self.v_er_width = tk.DoubleVar()
self.v_leg_width = tk.DoubleVar()
self.v_er_od = tk.DoubleVar()
self.v_er_id = tk.DoubleVar()
self.v_leg_od = tk.DoubleVar()
self.v_leg_id = tk.DoubleVar()
self.v_coil_shortaxis = tk.IntVar()
self.v_coil_long_diameter = tk.DoubleVar()
self.v_coil_short_diameter = tk.DoubleVar()
self.v_rb_legs_selected.set(self.parent.RECT)
self.v_rb_er_selected.set(self.parent.RECT)
self.v_rb_config_selected.set(self.parent.HIGHPASS)
self.v_rb_bp.set(self.parent.LEG)
self.v_er_seg_length = tk.DoubleVar() # Calculated segment length
self._setGui()
self.v_rb_config_selected.trace("w", lambda *args: self._guiSettingsAdjust())
self.v_rb_legs_selected.trace("w", lambda *args: self._guiSettingsAdjust())
self.v_rb_er_selected.trace("w", lambda *args: self._guiSettingsAdjust())
self._guiSettingsAdjust()
self.setDefaults()
def _setGui(self):
#todo make sub functions for each gui part
lbl_title = tk.Label(self.tab, text="Circular Birdcage Coil ", font=myfont_bold, foreground="blue")
lf_type_of_legs = tk.LabelFrame(self.tab, text="Type of Leg", font=myfont_bold)
rb_leg_r = tk.Radiobutton(lf_type_of_legs, text='Rectangular', value=self.parent.RECT, variable=self.v_rb_legs_selected)
rb_leg_t = tk.Radiobutton(lf_type_of_legs, text='Tubular', value=self.parent.TUBE, variable=self.v_rb_legs_selected)
rb_leg_r.pack(anchor="w")
rb_leg_t.pack(anchor="w")
lf_type_of_er = tk.LabelFrame(self.tab, text="Type of ER", font=myfont_bold)
rb_er_r = tk.Radiobutton(lf_type_of_er, text='Rectangular', value=self.parent.RECT, variable=self.v_rb_er_selected)
rb_er_t = tk.Radiobutton(lf_type_of_er, text='Tubular', value=self.parent.TUBE, variable=self.v_rb_er_selected)
rb_er_r.pack(anchor="w")
rb_er_t.pack(anchor="w")
lf_config = tk.LabelFrame(self.tab, text="Configuration", font=myfont_bold)
rb_config_hp = tk.Radiobutton(lf_config, text='High-Pass', value=self.parent.HIGHPASS, variable=self.v_rb_config_selected)
rb_config_lp = tk.Radiobutton(lf_config, text='Low-Pass', value=self.parent.LOWPASS, variable=self.v_rb_config_selected)
rb_config_bp = tk.Radiobutton(lf_config, text='Band-Pass', value=self.parent.BANDPASS, variable=self.v_rb_config_selected)
rb_config_hp.pack(anchor="w")
rb_config_lp.pack(anchor="w")
rb_config_bp.pack(anchor="w")
self.frm_bp = tk.LabelFrame(lf_config)
lb_bp_cap = tk.Label(self.frm_bp, text="Predetermined\ncapacitor (pF)", justify='left', fg='blue')
txt_bp_cap = my_tk.NumInput(self.frm_bp, text=self.v_bp_cap, width=5, bg="white", min_value=0.001)
rb_bp_leg = tk.Radiobutton(self.frm_bp, text='On Leg', font=myfont_bold, value=self.parent.LEG, variable=self.v_rb_bp)
rb_bp_er = tk.Radiobutton(self.frm_bp, text='On ER', font=myfont_bold, value=self.parent.ER, variable=self.v_rb_bp)
lb_bp_cap.grid(row=0, sticky=tk.W, columnspan=2)
txt_bp_cap.grid(row=1, column=1, rowspan=2, sticky=tk.W)
rb_bp_leg.grid(row=1, sticky=tk.NW, padx=(0, 5))
rb_bp_er.grid(row=2, sticky=tk.NW)
self.frm_bp.pack(anchor="w")
lf_nr_of_legs = tk.LabelFrame(self.tab, text="Number of Legs", font=myfont_bold)
scale_nr_of_legs = my_tk.MyScale(lf_nr_of_legs, from_=8, to=32, resolution=4, tickinterval=4, orient=tk.HORIZONTAL, length=250,
variable=self.v_nr_of_legs, command=lambda e: self.v_nr_of_legs.set(scale_nr_of_legs.get()))
scale_nr_of_legs.pack()
lf_dimensions = tk.LabelFrame(self.tab, text="Dimensions (cm)", font=myfont_bold)
lb_leg_length = tk.Label(lf_dimensions, text="Leg Length")
lb_coil_radius = tk.Label(lf_dimensions, text="Coil Diameter ")
lb_shield_radius = tk.Label(lf_dimensions, text="RF shield Diameter ")
txt_leg_length = my_tk.NumInput(lf_dimensions, text=self.v_leg_length, width=7, bg="white", min_value=0)
txt_coil_radius = my_tk.NumInput(lf_dimensions, text=self.v_coil_diameter, width=7, bg="white", min_value=0)
txt_shield_radius = my_tk.NumInput(lf_dimensions, text=self.v_shield_diameter, width=7, bg="white", min_value=0)
self.lb_leg_width = tk.Label(lf_dimensions, text="Leg Width")
self.lb_er_width = tk.Label(lf_dimensions, text="ER Seg. Width")
self.txt_leg_width = my_tk.NumInput(lf_dimensions, text=self.v_leg_width, width=7, bg="white", min_value=0)
self.txt_er_width = my_tk.NumInput(lf_dimensions, text=self.v_er_width, width=7, bg="white", min_value=0)
self.lb_leg_od = tk.Label(lf_dimensions, text="Leg O.D.")
self.lb_leg_id = tk.Label(lf_dimensions, text="Leg I.D.")
self.lb_er_od = tk.Label(lf_dimensions, text="ER O.D.")
self.lb_er_id = tk.Label(lf_dimensions, text="ER I.D.")
self.txt_leg_od = my_tk.NumInput(lf_dimensions, text=self.v_leg_od, width=7, bg="white", min_value=0)
self.txt_leg_id = my_tk.NumInput(lf_dimensions, text=self.v_leg_id, width=7, bg="white", min_value=0)
self.txt_er_od = my_tk.NumInput(lf_dimensions, text=self.v_er_od, width=7, bg="white", min_value=0)
self.txt_er_id = my_tk.NumInput(lf_dimensions, text=self.v_er_id, width=7, bg="white", min_value=0)
# automatic segment length calculation textbox, label
self.lb_seg_length = tk.Label(lf_dimensions, text="ER Seg. length", foreground='blue')
self.txt_seg_length = my_tk.MyEntry(lf_dimensions, text=self.v_er_seg_length, fg='grey', read_only=True, decimals=MAX_PRECISION)
self.txt_seg_length.setValue("-")
lb_leg_length.grid(column=0, row=1, sticky=tk.W, pady=(0, 10))
txt_leg_length.grid(column=1, row=1, sticky=tk.W, pady=(0, 10), padx=(0, 10))
lb_coil_radius.grid(column=0, row=0, sticky=tk.W, pady=(0, 10))
txt_coil_radius.grid(column=1, row=0, sticky=tk.W, pady=(0, 10))
lb_shield_radius.grid(column=2, row=0, sticky=tk.W, pady=(0, 10))
txt_shield_radius.grid(column=3, row=0, sticky=tk.W, pady=(0, 10), padx=(0, 10))
self.lb_seg_length.grid(column=2, row=3, sticky=tk.W, pady=(0, 10))
self.txt_seg_length.grid(column=3, row=3, sticky=tk.W, pady=(0, 10), padx=(0, 10))
self.lb_leg_width.grid(column=0, row=2, sticky=tk.W, pady=(0, 10))
self.txt_leg_width.grid(column=1, row=2, sticky=tk.W, pady=(0, 10), padx=(0, 10))
self.lb_er_width.grid(column=2, row=1, sticky=tk.W, pady=(0, 10))
self.txt_er_width.grid(column=3, row=1, sticky=tk.W, pady=(0, 10), padx=(0, 10))
self.lb_leg_od.grid(column=0, row=2, sticky=tk.W, pady=(0, 10))
self.txt_leg_od.grid(column=1, row=2, sticky=tk.W, pady=(0, 10), padx=(0, 10))
self.lb_leg_id.grid(column=0, row=3, sticky=tk.W, pady=(0, 10))
self.txt_leg_id.grid(column=1, row=3, sticky=tk.W, pady=(0, 10), padx=(0, 10))
self.lb_er_od.grid(column=2, row=1, sticky=tk.W, pady=(0, 10))
self.txt_er_od.grid(column=3, row=1, sticky=tk.W, pady=(0, 10), padx=(0, 10))
self.lb_er_id.grid(column=2, row=2, sticky=tk.W, pady=(0, 10))
self.txt_er_id.grid(column=3, row=2, sticky=tk.W, pady=(0, 10), padx=(0, 10))
frm_f0 = tk.Frame(self.tab)
lb_res_freq = tk.Label(frm_f0, font=myfont_bold, text="Resonant\nFreq. (MHz)", justify='left')
txt_res_freq = my_tk.NumInput(frm_f0, text=self.v_res_freq, width=7, bg="white", min_value=0)
lb_res_freq.grid(row=0, sticky=tk.W)
txt_res_freq.grid(row=2, sticky=tk.W)
btn = ttk.Button(self.tab, text="Calculate", command=self.parent.startCalculation)
tk.Grid.columnconfigure(self.tab, 1, weight=0)
tk.Grid.columnconfigure(self.tab, 0, weight=100)
tk.Grid.columnconfigure(self.tab, 3, weight=1)
tk.Grid.columnconfigure(self.tab, 4, weight=100)
tk.Grid.rowconfigure(self.tab, 1, weight=1)
tk.Grid.rowconfigure(self.tab, 10, weight=100)
lbl_title.grid(column=1, row=0, sticky=tk.N+tk.EW, pady=(5, 10))
lf_type_of_legs.grid(column=1, row=1, sticky=tk.NSEW, pady=(0, 10), padx=(5, 10))
lf_type_of_er.grid(column=1, row=2, sticky=tk.NSEW, pady=(0, 10), padx=(5, 10))
lf_config.grid(column=2, row=0, rowspan=2, sticky=tk.NSEW, pady=(0, 10), padx=(0, 10))
lf_nr_of_legs.grid(column=2, row=2, columnspan=2, sticky=tk.NSEW, pady=(0, 10))
lf_dimensions.grid(column=1, row=3, columnspan=3, sticky=tk.NSEW, pady=(0, 10), padx=(5, 0))
frm_f0.grid(column=3, row=0, rowspan=2, sticky=tk.NSEW)
btn.grid(column=1, row=4, columnspan=3)
def validateInputs(self):
inputs_ = {"Resonance Frequency": self.v_res_freq.get(),
"Coil Diameter": self.v_coil_diameter.get(),
"Shield Diameter": self.v_shield_diameter.get(),
"Nr Of Legs": self.v_nr_of_legs.get(),
"Leg Length": self.v_leg_length.get(),
"Leg Width": self.v_leg_width.get(),
"Leg OD": self.v_leg_od.get(),
"Leg ID": self.v_leg_id.get(),
"ER Width": self.v_er_width.get(),
"ER OD": self.v_er_od.get(),
"ER ID": self.v_er_id.get(),
"Bandpass Capacitor": self.v_bp_cap.get(),
"Long Diameter": self.v_coil_long_diameter.get(),
"Short Diameter": self.v_coil_short_diameter.get()}
for key, value in inputs_.items():
if key == "Shield Diameter":
continue
if value == 0:
mb.showwarning("Input zero", "One or more inputs are zero.\nPlease input valid values.")
return False
if inputs_["Shield Diameter"] == inputs_["Coil Diameter"]:
mb.showwarning("Zero shield distance", "Shield distance is equal to coil diameter.\nPlease input valid values.")
return False
return inputs_
def setDefaults(self):
self.v_res_freq.set(298)
self.v_nr_of_legs.set(12)
self.v_coil_diameter.set(30)
self.v_shield_diameter.set(34)
self.v_leg_length.set(20)
self.v_leg_width.set(0.5)
self.v_leg_od.set(1)
self.v_leg_id.set(0.6)
self.v_er_width.set(0.5)
self.v_er_od.set(1)
self.v_er_id.set(0.6)
self.v_bp_cap.set(56)
self.v_coil_shortaxis.set(self.parent.SHORT)
self.v_coil_long_diameter.set(40)
self.v_coil_short_diameter.set(30)
def _guiSettingsAdjust(self):
# adjusts the settings tab when clicking radiobuttons
if self.v_rb_legs_selected.get() == self.parent.RECT:
self.lb_leg_width.grid()
self.txt_leg_width.grid()
self.lb_leg_od.grid_remove()
self.txt_leg_od.grid_remove()
self.lb_leg_id.grid_remove()
self.txt_leg_id.grid_remove()
else:
self.lb_leg_width.grid_remove()
self.txt_leg_width.grid_remove()
self.lb_leg_od.grid()
self.txt_leg_od.grid()
self.lb_leg_id.grid()
self.txt_leg_id.grid()
if self.v_rb_er_selected.get() == self.parent.RECT:
self.lb_er_width.grid()
self.txt_er_width.grid()
self.lb_er_od.grid_remove()
self.txt_er_od.grid_remove()
self.lb_er_id.grid_remove()
self.txt_er_id.grid_remove()
self.lb_seg_length.grid(row=2)
self.txt_seg_length.grid(row=2)
else:
self.lb_er_width.grid_remove()
self.txt_er_width.grid_remove()
self.lb_er_od.grid()
self.txt_er_od.grid()
self.lb_er_id.grid()
self.txt_er_id.grid()
self.lb_seg_length.grid(row=3)
self.txt_seg_length.grid(row=3)
if self.v_rb_config_selected.get() == self.parent.BANDPASS:
self.frm_bp.pack()
else:
self.frm_bp.pack_forget()
class MyAboutWindow:
def __init__(self):
self.top = tk.Toplevel()
self.top.transient(root)
self.top.update_idletasks()
self.top.withdraw()
self.top.title(f"About {PROGRAM_NAME}") # {VERSION}")
self.top.maxsize(400, 450)
self.top.resizable(0, 0)
self._setGui()
self._centerWindow(self.top)
self.top.deiconify()
def _setGui(self):
# logo
self.top.img = img = tk.PhotoImage(file=ICON_FOLDER + "icon32.png")
self.top.img = img = img.zoom(2) # needs top.img = to not get garbage collected
image = tk.Label(self.top, image=img)
lb_prog = tk.Label(self.top, text=f"{PROGRAM_NAME} {VERSION}", anchor='w')
import webbrowser
myfont_underlined = default_font.copy()
myfont_underlined.configure(underline=True)
lb_site = tk.Label(self.top, text=f"{WEBSITE}", fg="blue", cursor="hand2", anchor='w', font=myfont_underlined)
lb_site.bind("<Button-1>", lambda e: webbrowser.open("http://"+WEBSITE, new=0, autoraise=True))
txt = f"{PROGRAM_NAME} is a program to easily determine the ideal capacitor to be used in a birdcage coil design.\n\n" \
"An acknowledgement goes to PennState Health for the original version and the math used in this program."
msg = tk.Message(self.top, text=txt, justify='left', anchor='nw')
msg.bind("<Configure>", lambda e: msg.configure(width=e.width-10))
# Buttons
frm_button = tk.Frame(self.top)
btn_license = ttk.Button(frm_button, text="License", command=self._showLicense, width=10)
btn_ok = ttk.Button(frm_button, text="Ok", command=self.top.destroy, width=10)
btn_license.pack(side="left", padx=(0, 10))
btn_ok.pack(side="right")
tk.Grid.columnconfigure(self.top, 1, weight=1)
tk.Grid.rowconfigure(self.top, 3, weight=1)
image.grid(column=0, row=0, rowspan=1, sticky=tk.NSEW, padx=(10, 10), pady=(10, 10))
lb_prog.grid(column=1, row=0, sticky=tk.NSEW)
lb_site.grid(column=0, row=1, columnspan=2, sticky=tk.NSEW, pady=(0, 10), padx=(10, 10))
msg.grid(column=0, row=2, columnspan=2, sticky=tk.NSEW, pady=(0, 10), padx=(5, 5))
frm_button.grid(column=0, row=4, columnspan=2, sticky=tk.N, pady=(10, 10))
@staticmethod
def _centerWindow(win):
win.update_idletasks()
width = win.winfo_width()
height = win.winfo_height()
my_tk.centerWindow(win, width, height)
def _showLicense(self):
with open(os.path.join(os.getcwd(), "LICENSE.md"), 'r') as license_file:
license_ = license_file.read()
top = tk.Toplevel()
top.transient(root)
top.update_idletasks()
top.withdraw()
top.title(f"License") # {VERSION}")
top.resizable(0, 0)
# scrollable textbox
text = tk.Text(top, wrap=tk.WORD)
text.insert(tk.INSERT, license_)
text.pack(side="left", expand=True)
scroll_y = tk.Scrollbar(top, orient="vertical", command=text.yview)
scroll_y.pack(side="left", expand=True, fill="y")
text.configure(yscrollcommand=scroll_y.set, width=80, height=21)
text.configure(state='disabled') # make readonly
text.pack()
self._centerWindow(top)
top.deiconify()
if __name__ == "__main__":
root = tk.Tk()
root.withdraw()
default_font = font.nametofont("TkDefaultFont") # set default font
default_font.configure(family='freemono', size=11)
myfont_bold = default_font.copy() # bold font
myfont_bold.configure(weight="bold")
myfont_small_bold = default_font.copy() # small font
myfont_small_bold.configure(weight="bold", size=9)
myfont_small = default_font.copy() # small font
myfont_small.configure(size=8)
mygui = MainApplication(root)
root.deiconify()
logger.info("Program start successfully")
root.mainloop()
| 42.329866 | 193 | 0.726271 | 26,729 | 0.938255 | 0 | 0 | 163 | 0.005722 | 0 | 0 | 4,769 | 0.167404 |
9811d96bc8f9c0a47fda662e895dddafdce0f7bd | 1,057 | py | Python | tests/semver/constraints/test_multi_constraint.py | batisteo/poetry | 0667c67a9ebcc9250ad8d70f74f0905cc9f20ab2 | [
"MIT"
] | null | null | null | tests/semver/constraints/test_multi_constraint.py | batisteo/poetry | 0667c67a9ebcc9250ad8d70f74f0905cc9f20ab2 | [
"MIT"
] | null | null | null | tests/semver/constraints/test_multi_constraint.py | batisteo/poetry | 0667c67a9ebcc9250ad8d70f74f0905cc9f20ab2 | [
"MIT"
] | null | null | null | from poetry.semver.constraints.constraint import Constraint
from poetry.semver.constraints.multi_constraint import MultiConstraint
def test_multi_version_match_succeeds():
require_start = Constraint('>', '1.0')
require_end = Constraint('<', '1.2')
provider = Constraint('==', '1.1')
multi = MultiConstraint((require_start, require_end))
assert multi.matches(provider)
def test_multi_version_provided_match_succeeds():
require_start = Constraint('>', '1.0')
require_end = Constraint('<', '1.2')
provide_start = Constraint('>=', '1.1')
provide_end = Constraint('<', '2.0')
multi_require = MultiConstraint((require_start, require_end))
multi_provide = MultiConstraint((provide_start, provide_end))
assert multi_require.matches(multi_provide)
def test_multi_version_match_fails():
require_start = Constraint('>', '1.0')
require_end = Constraint('<', '1.2')
provider = Constraint('==', '1.2')
multi = MultiConstraint((require_start, require_end))
assert not multi.matches(provider)
| 30.2 | 70 | 0.708609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 83 | 0.078524 |
981358a60d12ba10bedc463e2907dbad81cfa191 | 1,683 | py | Python | epuap_watchdog/institutions/serializers.py | ad-m/epuap-watchdog | ff2dbbfe6c999e825dbf3f2bf2a94d8baa0a08ea | [
"MIT"
] | 2 | 2017-07-30T16:41:41.000Z | 2020-03-28T12:20:56.000Z | epuap_watchdog/institutions/serializers.py | ad-m/epuap-watchdog | ff2dbbfe6c999e825dbf3f2bf2a94d8baa0a08ea | [
"MIT"
] | 5 | 2017-07-18T12:13:46.000Z | 2017-07-28T15:48:38.000Z | epuap_watchdog/institutions/serializers.py | ad-m/epuap-watchdog | ff2dbbfe6c999e825dbf3f2bf2a94d8baa0a08ea | [
"MIT"
] | null | null | null | from rest_framework import serializers
from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer
from .models import RESP, REGONError, REGON, JSTConnection, Institution, ESP
class RESPSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = RESP
fields = ['id', 'created', 'modified', 'institution_id', 'name', 'data']
class REGONErrorSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = REGONError
fields = ['id', 'created', 'modified', 'regon_id', 'exception']
class REGONSerializer(serializers.HyperlinkedModelSerializer):
regonerror_set = REGONErrorSerializer(many=True)
class Meta:
model = REGON
fields = ['id', 'created', 'modified', 'institution_id', 'name', 'regon', 'regonerror_set', 'data']
class JSTConnectionSerializer(serializers.HyperlinkedModelSerializer):
# jst = JednostkaAdministracyjnaSerializer()
class Meta:
model = JSTConnection
fields = ['id', 'created', 'modified', 'institution_id', 'jst_id']
class ESPSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ESP
fields = ['id', 'created', 'modified', 'institution_id', 'name', 'active']
class InstitutionSerializer(serializers.HyperlinkedModelSerializer):
resp = RESPSerializer()
regon_data = REGONSerializer()
jstconnection = JSTConnectionSerializer()
esp_set = ESPSerializer(many=True)
class Meta:
model = Institution
fields = ['id', 'created', 'modified', 'name', 'epuap_id', 'regon', 'active',
'esp_set', 'jstconnection', 'regon_data', 'resp']
| 32.365385 | 107 | 0.69697 | 1,459 | 0.866904 | 0 | 0 | 0 | 0 | 0 | 0 | 409 | 0.243018 |
9813904cd1f0fe02015ac63d50232c8db9af77e9 | 21,950 | py | Python | ross/fluid_flow/fluid_flow_coefficients.py | hiagopinacio/ross | 1bc84061f23df455d9e37cb11b244ac795c836ad | [
"MIT"
] | 1 | 2020-01-21T02:05:21.000Z | 2020-01-21T02:05:21.000Z | ross/fluid_flow/fluid_flow_coefficients.py | hiagopinacio/ross | 1bc84061f23df455d9e37cb11b244ac795c836ad | [
"MIT"
] | null | null | null | ross/fluid_flow/fluid_flow_coefficients.py | hiagopinacio/ross | 1bc84061f23df455d9e37cb11b244ac795c836ad | [
"MIT"
] | 1 | 2020-01-20T23:19:24.000Z | 2020-01-20T23:19:24.000Z | import warnings
from math import isnan
import numpy as np
from scipy import integrate
from ross.fluid_flow.fluid_flow_geometry import move_rotor_center
def calculate_oil_film_force(fluid_flow_object, force_type=None):
"""This function calculates the forces of the oil film in the N and T directions, ie in the
opposite direction to the eccentricity and in the tangential direction.
Parameters
----------
fluid_flow_object: A FluidFlow object.
force_type: str
If set, calculates the oil film force matrix analytically considering the chosen type: 'short' or 'long'.
If set to 'numerical', calculates the oil film force numerically.
Returns
-------
radial_force: float
Force of the oil film in the opposite direction to the eccentricity direction.
tangential_force: float
Force of the oil film in the tangential direction
f_x: float
Components of forces in the x direction
f_y: float
Components of forces in the y direction
Examples
--------
>>> from ross.fluid_flow.fluid_flow import fluid_flow_example
>>> my_fluid_flow = fluid_flow_example()
>>> calculate_oil_film_force(my_fluid_flow) # doctest: +ELLIPSIS
(...
"""
if force_type != "numerical" and (
force_type == "short" or fluid_flow_object.bearing_type == "short_bearing"
):
radial_force = (
0.5
* fluid_flow_object.viscosity
* (fluid_flow_object.radius_rotor / fluid_flow_object.radial_clearance) ** 2
* (fluid_flow_object.length ** 3 / fluid_flow_object.radius_rotor)
* (
(
2
* fluid_flow_object.eccentricity_ratio ** 2
* fluid_flow_object.omega
)
/ (1 - fluid_flow_object.eccentricity_ratio ** 2) ** 2
)
)
tangential_force = (
0.5
* fluid_flow_object.viscosity
* (fluid_flow_object.radius_rotor / fluid_flow_object.radial_clearance) ** 2
* (fluid_flow_object.length ** 3 / fluid_flow_object.radius_rotor)
* (
(np.pi * fluid_flow_object.eccentricity_ratio * fluid_flow_object.omega)
/ (2 * (1 - fluid_flow_object.eccentricity_ratio ** 2) ** (3.0 / 2))
)
)
elif force_type != "numerical" and (
force_type == "long" or fluid_flow_object.bearing_type == "long_bearing"
):
radial_force = (
6
* fluid_flow_object.viscosity
* (fluid_flow_object.radius_rotor / fluid_flow_object.radial_clearance) ** 2
* fluid_flow_object.radius_rotor
* fluid_flow_object.length
* (
(
2
* fluid_flow_object.eccentricity_ratio ** 2
* fluid_flow_object.omega
)
/ (
(2 + fluid_flow_object.eccentricity_ratio ** 2)
* (1 - fluid_flow_object.eccentricity_ratio ** 2)
)
)
)
tangential_force = (
6
* fluid_flow_object.viscosity
* (fluid_flow_object.radius_rotor / fluid_flow_object.radial_clearance) ** 2
* fluid_flow_object.radius_rotor
* fluid_flow_object.length
* (
(np.pi * fluid_flow_object.eccentricity_ratio * fluid_flow_object.omega)
/ (
(2 + fluid_flow_object.eccentricity_ratio ** 2)
* (1 - fluid_flow_object.eccentricity_ratio ** 2) ** 0.5
)
)
)
else:
p_mat = fluid_flow_object.p_mat_numerical
a = np.zeros([fluid_flow_object.nz, fluid_flow_object.ntheta])
b = np.zeros([fluid_flow_object.nz, fluid_flow_object.ntheta])
g1 = np.zeros(fluid_flow_object.nz)
g2 = np.zeros(fluid_flow_object.nz)
base_vector = np.array(
[
fluid_flow_object.xre[0][0] - fluid_flow_object.xi,
fluid_flow_object.yre[0][0] - fluid_flow_object.yi,
]
)
for i in range(fluid_flow_object.nz):
for j in range(int(fluid_flow_object.ntheta / 2)):
vector_from_rotor = np.array(
[
fluid_flow_object.xre[i][j] - fluid_flow_object.xi,
fluid_flow_object.yre[i][j] - fluid_flow_object.yi,
]
)
angle_between_vectors = np.arccos(
np.dot(base_vector, vector_from_rotor)
/ (np.linalg.norm(base_vector) * np.linalg.norm(vector_from_rotor))
)
if isnan(angle_between_vectors):
angle_between_vectors = 0
if angle_between_vectors != 0 and j * fluid_flow_object.dtheta > np.pi:
angle_between_vectors += np.pi
a[i][j] = p_mat[i][j] * np.cos(angle_between_vectors)
b[i][j] = p_mat[i][j] * np.sin(angle_between_vectors)
for i in range(fluid_flow_object.nz):
g1[i] = integrate.simps(a[i][:], fluid_flow_object.gama[0])
g2[i] = integrate.simps(b[i][:], fluid_flow_object.gama[0])
integral1 = integrate.simps(g1, fluid_flow_object.z_list)
integral2 = integrate.simps(g2, fluid_flow_object.z_list)
radial_force = -fluid_flow_object.radius_rotor * integral1
tangential_force = fluid_flow_object.radius_rotor * integral2
force_x = -radial_force * np.sin(
fluid_flow_object.attitude_angle
) + tangential_force * np.cos(fluid_flow_object.attitude_angle)
force_y = radial_force * np.cos(
fluid_flow_object.attitude_angle
) + tangential_force * np.sin(fluid_flow_object.attitude_angle)
return radial_force, tangential_force, force_x, force_y
def calculate_stiffness_and_damping_coefficients(fluid_flow_object):
"""This function calculates the bearing stiffness and damping matrices numerically.
Parameters
----------
fluid_flow_object: A FluidFlow object.
Returns
-------
Two lists of floats
A list of length four including stiffness floats in this order: kxx, kxy, kyx, kyy.
And another list of length four including damping floats in this order: cxx, cxy, cyx, cyy.
And
Examples
--------
>>> from ross.fluid_flow.fluid_flow import fluid_flow_example
>>> my_fluid_flow = fluid_flow_example()
>>> calculate_stiffness_and_damping_coefficients(my_fluid_flow) # doctest: +ELLIPSIS
([428...
"""
N = 6
t = np.linspace(0, 2 * np.pi / fluid_flow_object.omegap, N)
fluid_flow_object.xp = fluid_flow_object.radial_clearance * 0.0001
fluid_flow_object.yp = fluid_flow_object.radial_clearance * 0.0001
dx = np.zeros(N)
dy = np.zeros(N)
xdot = np.zeros(N)
ydot = np.zeros(N)
radial_force = np.zeros(N)
tangential_force = np.zeros(N)
force_xx = np.zeros(N)
force_yx = np.zeros(N)
force_xy = np.zeros(N)
force_yy = np.zeros(N)
X1 = np.zeros([N, 3])
X2 = np.zeros([N, 3])
F1 = np.zeros(N)
F2 = np.zeros(N)
F3 = np.zeros(N)
F4 = np.zeros(N)
for i in range(N):
fluid_flow_object.t = t[i]
delta_x = fluid_flow_object.xp * np.sin(
fluid_flow_object.omegap * fluid_flow_object.t
)
move_rotor_center(fluid_flow_object, delta_x, 0)
dx[i] = delta_x
xdot[i] = (
fluid_flow_object.omegap
* fluid_flow_object.xp
* np.cos(fluid_flow_object.omegap * fluid_flow_object.t)
)
fluid_flow_object.geometry_description()
fluid_flow_object.calculate_pressure_matrix_numerical(direction="x")
[
radial_force[i],
tangential_force[i],
force_xx[i],
force_yx[i],
] = calculate_oil_film_force(fluid_flow_object, force_type="numerical")
delta_y = fluid_flow_object.yp * np.sin(
fluid_flow_object.omegap * fluid_flow_object.t
)
move_rotor_center(fluid_flow_object, -delta_x, 0)
move_rotor_center(fluid_flow_object, 0, delta_y)
dy[i] = delta_y
ydot[i] = (
fluid_flow_object.omegap
* fluid_flow_object.yp
* np.cos(fluid_flow_object.omegap * fluid_flow_object.t)
)
fluid_flow_object.geometry_description()
fluid_flow_object.calculate_pressure_matrix_numerical(direction="y")
[
radial_force[i],
tangential_force[i],
force_xy[i],
force_yy[i],
] = calculate_oil_film_force(fluid_flow_object, force_type="numerical")
move_rotor_center(fluid_flow_object, 0, -delta_y)
fluid_flow_object.geometry_description()
fluid_flow_object.calculate_pressure_matrix_numerical()
X1[i] = [1, dx[i], xdot[i]]
X2[i] = [1, dy[i], ydot[i]]
F1[i] = -force_xx[i]
F2[i] = -force_xy[i]
F3[i] = -force_yx[i]
F4[i] = -force_yy[i]
P1 = np.dot(
np.dot(np.linalg.inv(np.dot(np.transpose(X1), X1)), np.transpose(X1)), F1
)
P2 = np.dot(
np.dot(np.linalg.inv(np.dot(np.transpose(X2), X2)), np.transpose(X2)), F2
)
P3 = np.dot(
np.dot(np.linalg.inv(np.dot(np.transpose(X1), X1)), np.transpose(X1)), F3
)
P4 = np.dot(
np.dot(np.linalg.inv(np.dot(np.transpose(X2), X2)), np.transpose(X2)), F4
)
K = [P1[1], P2[1], P3[1], P4[1]]
C = [P1[2], P2[2], P3[2], P4[2]]
return K, C
def calculate_short_stiffness_matrix(fluid_flow_object):
"""This function calculates the stiffness matrix for the short bearing.
Parameters
----------
fluid_flow_object: A FluidFlow object.
Returns
-------
list of floats
A list of length four including stiffness floats in this order: kxx, kxy, kyx, kyy
Examples
--------
>>> from ross.fluid_flow.fluid_flow import fluid_flow_example
>>> my_fluid_flow = fluid_flow_example()
>>> calculate_short_stiffness_matrix(my_fluid_flow) # doctest: +ELLIPSIS
[417...
"""
h0 = 1.0 / (
(
(np.pi ** 2) * (1 - fluid_flow_object.eccentricity_ratio ** 2)
+ 16 * fluid_flow_object.eccentricity_ratio ** 2
)
** 1.5
)
a = fluid_flow_object.load / fluid_flow_object.radial_clearance
kxx = (
a
* h0
* 4
* (
(np.pi ** 2) * (2 - fluid_flow_object.eccentricity_ratio ** 2)
+ 16 * fluid_flow_object.eccentricity_ratio ** 2
)
)
kxy = (
a
* h0
* np.pi
* (
(np.pi ** 2) * (1 - fluid_flow_object.eccentricity_ratio ** 2) ** 2
- 16 * fluid_flow_object.eccentricity_ratio ** 4
)
/ (
fluid_flow_object.eccentricity_ratio
* np.sqrt(1 - fluid_flow_object.eccentricity_ratio ** 2)
)
)
kyx = (
-a
* h0
* np.pi
* (
(np.pi ** 2)
* (1 - fluid_flow_object.eccentricity_ratio ** 2)
* (1 + 2 * fluid_flow_object.eccentricity_ratio ** 2)
+ (32 * fluid_flow_object.eccentricity_ratio ** 2)
* (1 + fluid_flow_object.eccentricity_ratio ** 2)
)
/ (
fluid_flow_object.eccentricity_ratio
* np.sqrt(1 - fluid_flow_object.eccentricity_ratio ** 2)
)
)
kyy = (
a
* h0
* 4
* (
(np.pi ** 2) * (1 + 2 * fluid_flow_object.eccentricity_ratio ** 2)
+ (
(32 * fluid_flow_object.eccentricity_ratio ** 2)
* (1 + fluid_flow_object.eccentricity_ratio ** 2)
)
/ (1 - fluid_flow_object.eccentricity_ratio ** 2)
)
)
return [kxx, kxy, kyx, kyy]
def calculate_short_damping_matrix(fluid_flow_object):
"""This function calculates the damping matrix for the short bearing.
Parameters
-------
fluid_flow_object: A FluidFlow object.
Returns
-------
list of floats
A list of length four including damping floats in this order: cxx, cxy, cyx, cyy
Examples
--------
>>> from ross.fluid_flow.fluid_flow import fluid_flow_example
>>> my_fluid_flow = fluid_flow_example()
>>> calculate_short_damping_matrix(my_fluid_flow) # doctest: +ELLIPSIS
[...
"""
# fmt: off
h0 = 1.0 / (((np.pi ** 2) * (1 - fluid_flow_object.eccentricity_ratio ** 2)
+ 16 * fluid_flow_object.eccentricity_ratio ** 2) ** 1.5)
a = fluid_flow_object.load / (fluid_flow_object.radial_clearance * fluid_flow_object.omega)
cxx = (a * h0 * 2 * np.pi * np.sqrt(1 - fluid_flow_object.eccentricity_ratio ** 2) *
((np.pi ** 2) * (1 + 2 * fluid_flow_object.eccentricity_ratio ** 2)
- 16 * fluid_flow_object.eccentricity_ratio ** 2) / fluid_flow_object.eccentricity_ratio)
cxy = (-a * h0 * 8 * ((np.pi ** 2) * (1 + 2 * fluid_flow_object.eccentricity_ratio ** 2)
- 16 * fluid_flow_object.eccentricity_ratio ** 2))
cyx = cxy
cyy = (a * h0 * (2 * np.pi * (
(np.pi ** 2) * (1 - fluid_flow_object.eccentricity_ratio ** 2) ** 2
+ 48 * fluid_flow_object.eccentricity_ratio ** 2)) /
(fluid_flow_object.eccentricity_ratio * np.sqrt(1 - fluid_flow_object.eccentricity_ratio ** 2)))
# fmt: on
return [cxx, cxy, cyx, cyy]
def find_equilibrium_position(
fluid_flow_object,
print_along=True,
tolerance=1e-05,
increment_factor=1e-03,
max_iterations=10,
increment_reduction_limit=1e-04,
return_iteration_map=False,
):
"""This function returns an eccentricity value with calculated forces matching the load applied,
meaning an equilibrium position of the rotor.
It first moves the rotor center on x-axis, aiming for the minimum error in the force on x (zero), then
moves on y-axis, aiming for the minimum error in the force on y (meaning load minus force on y equals zero).
Parameters
----------
fluid_flow_object: A FluidFlow object.
print_along: bool, optional
If True, prints the iteration process.
tolerance: float, optional
increment_factor: float, optional
This number will multiply the first eccentricity found to reach an increment number.
max_iterations: int, optional
increment_reduction_limit: float, optional
The error should always be approximating zero. If it passes zeros (for instance, from a positive error
to a negative one), the iteration goes back one step and the increment is reduced. This reduction must
have a limit to avoid long iterations.
return_iteration_map: bool, optional
If True, along with the eccentricity found, the function will return a map of position and errors in
each step of the iteration.
Returns
-------
None, or
Matrix of floats
A matrix [4, n], being n the number of iterations. In each line, it contains the x and y of the rotor
center, followed by the error in force x and force y.
Examples
--------
>>> from ross.fluid_flow.fluid_flow import fluid_flow_example2
>>> my_fluid_flow = fluid_flow_example2()
>>> find_equilibrium_position(my_fluid_flow, print_along=False,
... tolerance=0.1, increment_factor=0.01,
... max_iterations=5, increment_reduction_limit=1e-03)
"""
fluid_flow_object.calculate_coefficients()
fluid_flow_object.calculate_pressure_matrix_numerical()
r_force, t_force, force_x, force_y = calculate_oil_film_force(
fluid_flow_object, force_type="numerical"
)
increment = increment_factor * fluid_flow_object.eccentricity
error_x = abs(force_x)
error_y = abs(force_y - fluid_flow_object.load)
error = max(error_x, error_y)
k = 1
map_vector = []
while error > tolerance and k <= max_iterations:
increment_x = increment
increment_y = increment
iter_x = 0
iter_y = 0
previous_x = fluid_flow_object.xi
previous_y = fluid_flow_object.yi
infinite_loop_x_check = False
infinite_loop_y_check = False
if print_along:
print("\nIteration " + str(k) + "\n")
while error_x > tolerance:
iter_x += 1
move_rotor_center(fluid_flow_object, increment_x, 0)
fluid_flow_object.calculate_coefficients()
fluid_flow_object.calculate_pressure_matrix_numerical()
(
new_r_force,
new_t_force,
new_force_x,
new_force_y,
) = calculate_oil_film_force(fluid_flow_object, force_type="numerical")
new_error_x = abs(new_force_x)
move_rotor_center(fluid_flow_object, -increment_x, 0)
if print_along:
print("Iteration in x axis " + str(iter_x))
print("Force x: " + str(new_force_x))
print("Previous force x: " + str(force_x))
print("Increment x: ", str(increment_x))
print("Error x: " + str(new_error_x))
print("Previous error x: " + str(error_x) + "\n")
if new_force_x * force_x < 0:
infinite_loop_x_check = False
increment_x = increment_x / 10
if print_along:
print("Went beyond error 0. Reducing increment. \n")
if abs(increment_x) < abs(increment * increment_reduction_limit):
if print_along:
print("Increment too low. Breaking x iteration. \n")
break
elif new_error_x > error_x:
if print_along:
print("Error increased. Changing sign of increment. \n")
increment_x = -increment_x
if infinite_loop_x_check:
break
else:
infinite_loop_x_check = True
else:
infinite_loop_x_check = False
move_rotor_center(fluid_flow_object, increment_x, 0)
error_x = new_error_x
force_x = new_force_x
force_y = new_force_y
error_y = abs(new_force_y - fluid_flow_object.load)
error = max(error_x, error_y)
while error_y > tolerance:
iter_y += 1
move_rotor_center(fluid_flow_object, 0, increment_y)
fluid_flow_object.calculate_coefficients()
fluid_flow_object.calculate_pressure_matrix_numerical()
(
new_r_force,
new_t_force,
new_force_x,
new_force_y,
) = calculate_oil_film_force(fluid_flow_object, force_type="numerical")
new_error_y = abs(new_force_y - fluid_flow_object.load)
move_rotor_center(fluid_flow_object, 0, -increment_y)
if print_along:
print("Iteration in y axis " + str(iter_y))
print("Force y: " + str(new_force_y))
print("Previous force y: " + str(force_y))
print("Increment y: ", str(increment_y))
print(
"Force y minus load: " + str(new_force_y - fluid_flow_object.load)
)
print(
"Previous force y minus load: "
+ str(force_y - fluid_flow_object.load)
)
print("Error y: " + str(new_error_y))
print("Previous error y: " + str(error_y) + "\n")
if (new_force_y - fluid_flow_object.load) * (
force_y - fluid_flow_object.load
) < 0:
infinite_loop_y_check = False
increment_y = increment_y / 10
if print_along:
print("Went beyond error 0. Reducing increment. \n")
if abs(increment_y) < abs(increment * increment_reduction_limit):
if print_along:
print("Increment too low. Breaking y iteration. \n")
break
elif new_error_y > error_y:
if print_along:
print("Error increased. Changing sign of increment. \n")
increment_y = -increment_y
if infinite_loop_y_check:
break
else:
infinite_loop_y_check = True
else:
infinite_loop_y_check = False
move_rotor_center(fluid_flow_object, 0, increment_y)
error_y = new_error_y
force_y = new_force_y
force_x = new_force_x
error_x = abs(new_force_x)
error = max(error_x, error_y)
if print_along:
print("Iteration " + str(k))
print("Error x: " + str(error_x))
print("Error y: " + str(error_y))
print(
"Current x, y: ("
+ str(fluid_flow_object.xi)
+ ", "
+ str(fluid_flow_object.yi)
+ ")"
)
k += 1
map_vector.append(
[fluid_flow_object.xi, fluid_flow_object.yi, error_x, error_y]
)
if previous_x == fluid_flow_object.xi and previous_y == fluid_flow_object.yi:
if print_along:
print("Rotor center did not move during iteration. Breaking.")
break
if print_along:
print(map_vector)
if return_iteration_map:
return map_vector
| 39.266547 | 113 | 0.579954 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,381 | 0.245148 |
981440fe7da5408c2f393c5c158c741ef85a08d1 | 486 | py | Python | dashboard/admin.py | AliBigdeli/Django-Metric-Monitoring-App | a251dc9c4eab26561029437ad437f43bffc479f7 | [
"MIT"
] | null | null | null | dashboard/admin.py | AliBigdeli/Django-Metric-Monitoring-App | a251dc9c4eab26561029437ad437f43bffc479f7 | [
"MIT"
] | null | null | null | dashboard/admin.py | AliBigdeli/Django-Metric-Monitoring-App | a251dc9c4eab26561029437ad437f43bffc479f7 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Device,Metric
class DeviceAdmin(admin.ModelAdmin):
list_display = ["name", "token","user", "created_date"]
search_fields = ["name", "token"]
list_filter = ("user",)
class MetricAdmin(admin.ModelAdmin):
list_display = ["device","temperature", "humidity", "created_date"]
search_fields = ["device"]
list_filter = ("device",)
admin.site.register(Device, DeviceAdmin)
admin.site.register(Metric, MetricAdmin)
| 28.588235 | 71 | 0.709877 | 331 | 0.68107 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.23251 |
98154190061bb9767d88942e0de497d9e1a07137 | 1,071 | py | Python | tryTimer.py | Timaos123/maintian_PlatoUtils | 6e3396ea303e2ac9b17b2eaa6d7c5db57ed1dc45 | [
"MIT"
] | null | null | null | tryTimer.py | Timaos123/maintian_PlatoUtils | 6e3396ea303e2ac9b17b2eaa6d7c5db57ed1dc45 | [
"MIT"
] | null | null | null | tryTimer.py | Timaos123/maintian_PlatoUtils | 6e3396ea303e2ac9b17b2eaa6d7c5db57ed1dc45 | [
"MIT"
] | null | null | null | import time
import numpy as np
import gc
a=[]
start=time.time()
for i in range(10000000):
a.extend([[i,i+1,i+2]])
end=time.time()
print(end-start)
a=[]
start=time.time()
for i in range(10000000):
a.append([i,i+1,i+2])
end=time.time()
print(end-start)
a=[]
start=time.time()
gc.disable()
for i in range(10000000):
a.append([i,i+1,i+2])
gc.enable()
end=time.time()
print(end-start)
# a=[]
# start=time.time()
# for i in range(10000000):
# a.insert(0,[i,i+1,i+2])
# end=time.time()
# print(end-start)
a=np.zeros(10000000).tolist()
start=time.time()
for i in range(10000000):
a[i]=[i,i+1,i+2]
end=time.time()
print(end-start)
from collections import deque
from queue import Queue
class LifoQueue(Queue):
def _init(self, maxsize):
self.queue = []
def _qsize(self):
return len(self.queue)
def _put(self, item):
self.queue.append(item)
def _get(self):
return self.queue.pop()
start=time.time()
a=LifoQueue()
for i in range(10000000):
a._put([i,i+1,i+2])
end=time.time()
print(end-start) | 16.734375 | 31 | 0.630252 | 244 | 0.227824 | 0 | 0 | 0 | 0 | 0 | 0 | 116 | 0.10831 |
9815ebb912152a391b7b3289618744e5e4cc1f23 | 818 | py | Python | migrations/versions/a45942d815f_added_upload_table.py | green-mercury/SampleManagerWeb | 6dd2c9557e0285e1270c84375ebd6f8d10e422a4 | [
"Apache-2.0"
] | 4 | 2020-01-28T11:21:32.000Z | 2020-01-29T09:09:02.000Z | migrations/versions/a45942d815f_added_upload_table.py | green-mercury/SampleManagerWeb | 6dd2c9557e0285e1270c84375ebd6f8d10e422a4 | [
"Apache-2.0"
] | 5 | 2021-06-02T03:09:36.000Z | 2022-03-12T00:49:43.000Z | migrations/versions/a45942d815f_added_upload_table.py | HolgerGraef/MSM | 6dd2c9557e0285e1270c84375ebd6f8d10e422a4 | [
"Apache-2.0"
] | null | null | null | """added upload table
Revision ID: a45942d815f
Revises: 1de13fd625b1
Create Date: 2016-03-07 12:00:50.283535
"""
# revision identifiers, used by Alembic.
revision = 'a45942d815f'
down_revision = '1de13fd625b1'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('uploads',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('source', sa.String(length=256), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('uploads')
### end Alembic commands ###
| 24.787879 | 63 | 0.677262 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 417 | 0.50978 |
9818a800cb69cee7cd1d2943f67320ac45add3c8 | 956 | py | Python | core/tests/test_views.py | honno/ascii-forever | 8364219db115229fa9eb0b059e9c0611dcb689cf | [
"MIT"
] | null | null | null | core/tests/test_views.py | honno/ascii-forever | 8364219db115229fa9eb0b059e9c0611dcb689cf | [
"MIT"
] | null | null | null | core/tests/test_views.py | honno/ascii-forever | 8364219db115229fa9eb0b059e9c0611dcb689cf | [
"MIT"
] | null | null | null | from django.urls import reverse
from pytest import mark
from core.models import *
urls = [reverse(name) for name in ["core:index", "core:arts"]]
@mark.parametrize("url", urls)
@mark.django_db
def test_nsfw_filter(url, django_user_model, client):
target = django_user_model.objects.create(username="bob", password="pass")
follower = django_user_model.objects.create(username="alice", password="pass")
follower.following.add(target)
sfw = Art(id=1, artist=target, title="sfw", text="sfw", nsfw=False)
nsfw = Art(id=2, artist=target, title="nsfw", text="nsfw", nsfw=True)
sfw.save()
nsfw.save()
client.force_login(follower)
response = client.get(url)
assert sfw in response.context["arts"]
assert nsfw in response.context["arts"]
follower.nsfw_pref = "HA"
follower.save()
response = client.get(url)
assert sfw in response.context["arts"]
assert nsfw not in response.context["arts"]
| 25.837838 | 82 | 0.694561 | 0 | 0 | 0 | 0 | 806 | 0.843096 | 0 | 0 | 102 | 0.106695 |
981a4012f0e9207383228850425145d6abe23671 | 168 | py | Python | constants/nodetype.py | vladzaharia/meshnet | 87a48c678094870cc273f4940f122e899eb1f5e1 | [
"MIT"
] | 1 | 2021-02-16T05:29:03.000Z | 2021-02-16T05:29:03.000Z | constants/nodetype.py | vladzaharia/meshnet | 87a48c678094870cc273f4940f122e899eb1f5e1 | [
"MIT"
] | 1 | 2021-02-16T06:23:08.000Z | 2021-02-16T06:23:08.000Z | constants/nodetype.py | vladzaharia/meshnet | 87a48c678094870cc273f4940f122e899eb1f5e1 | [
"MIT"
] | null | null | null | # Node types
TYPE_NODE = b'\x10'
TYPE_NODE_NR = b'\x11'
# Gateway types
TYPE_GATEWAY = b'\x20'
TYPE_GATEWAY_TIME = b'\x21'
# Special types
TYPE_PROVISIONING = b'\xFF' | 16.8 | 27 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 77 | 0.458333 |
981c4fce270cc5bfa4ae7432956bd7590c48d09a | 1,232 | py | Python | commissioning/log2cases/parselog.py | dobos/pysynphot | 5d2e0b52ceda78890940ac9239c2d88e149e0bed | [
"BSD-3-Clause"
] | 24 | 2015-01-04T23:38:21.000Z | 2022-02-01T00:11:07.000Z | commissioning/log2cases/parselog.py | dobos/pysynphot | 5d2e0b52ceda78890940ac9239c2d88e149e0bed | [
"BSD-3-Clause"
] | 126 | 2015-01-29T14:50:37.000Z | 2022-02-15T01:58:13.000Z | commissioning/log2cases/parselog.py | dobos/pysynphot | 5d2e0b52ceda78890940ac9239c2d88e149e0bed | [
"BSD-3-Clause"
] | 25 | 2015-02-09T12:12:02.000Z | 2021-09-09T13:06:54.000Z | """This script *replaces* the processlog.csh shell script. It has
more sophisticated logic to add the etc test name to the pysynphot
command as another keyword-value pair.
Modified to parse the modified line, then write a dictionary that will
be read by the gencases tool.
"""
import sys,re, pickle
from pysynphot import etc
import gencases
def run(fname):
log=open(fname)
out=open(fname.replace('.txt','_lookup.pickle'),'w')
d={}
line='unopened'
while len(line)>0:
line = log.readline().strip()
if "] starting" in line or "] running" in line:
x=re.search("'(?P<name>.*)'",line)
testname=x.group('name')
elif 'command is' in line:
prefix,value=line.lstrip().split('command is')
cmd='%s&etcid="%s"\n'%(value[0:-2],
testname)
#Entry in the new dictionary
ktuple=gencases.line2ktuple(cmd)
try:
d[ktuple].append(testname)
except KeyError:
d[ktuple]=[testname]
log.close()
#Save the resulting library
pickle.dump(d,out)
out.close()
if __name__ == '__main__':
run(sys.argv[1])
| 29.333333 | 70 | 0.575487 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 463 | 0.375812 |
981cc398c8251fbd0b8066f2be2d71a32ba021d7 | 5,561 | py | Python | src/hypergraph_nets/reducers.py | yutian-zhao/STRIPS-HGN | 2b5024335fe0f1518c4a2bab76983c504df09726 | [
"MIT"
] | null | null | null | src/hypergraph_nets/reducers.py | yutian-zhao/STRIPS-HGN | 2b5024335fe0f1518c4a2bab76983c504df09726 | [
"MIT"
] | null | null | null | src/hypergraph_nets/reducers.py | yutian-zhao/STRIPS-HGN | 2b5024335fe0f1518c4a2bab76983c504df09726 | [
"MIT"
] | null | null | null | import torch
# import time
# from torch_scatter import scatter
# Maybe see if this is good: https://github.com/rusty1s/pytorch_scatter
# My implementation is quite suboptimal
# def time_fun(f, iter, *args):
# start_time = time.perf_counter()
# start = time.time()
# for i in range(iter):
# f(*args)
# print(str(f))
# print(time.perf_counter()-start_time)
# print(time.time()-start)
# return f(*args)
def _unsorted_segment_helper(
data: torch.Tensor, segment_ids: torch.Tensor, num_segments
):
assert data.shape[0] == segment_ids.shape[0]
if len(segment_ids.shape) == 1:
assert data.shape[0] == segment_ids.shape[0]
repeated_data, indices = data, segment_ids
else:
# FIXME: Bad hack, -1 indicates we're using zero padding, so ignore those nodes
# Repeat Data Tensor depending on number of segment_ids for that idx that are not -1
repeats = torch.sum(segment_ids != -1, dim=1)
# TODO: which one is better???
repeated_data = data.repeat_interleave(repeats, dim=0)
# Divide hyperedge feature by the number of receivers/senders in the given hyperedge?
# repeated_data = (data / repeats.reshape(-1, 1).float()).repeat_interleave(
# repeats, dim=0
# )
# Flatten list of Tensors into single Tensor
indices = segment_ids[segment_ids != -1]
assert repeated_data.shape[0] == indices.shape[0]
# Placeholder for the segments
segments = torch.zeros((num_segments, repeated_data.shape[1]))
return repeated_data, indices, segments
def torch_unsorted_segment_sum(
data: torch.Tensor, segment_ids: torch.Tensor, num_segments
):
"""
Compute sums along segments of a Tensor
Better described here: https://www.tensorflow.org/api_docs/python/tf/math/unsorted_segment_sum
"""
repeated_data, indices, segments = _unsorted_segment_helper(
data, segment_ids, num_segments
)
# Do the summation, i.e. sum by index
sum_results = segments.index_add(0, indices, repeated_data)
return sum_results
# def torch_unsorted_segment_sum_2(
# data: torch.Tensor, segment_ids: torch.Tensor, num_segments
# ):
# """
# Compute sums along segments of a Tensor
# Better described here: https://www.tensorflow.org/api_docs/python/tf/math/unsorted_segment_sum
# """
# if len(segment_ids.shape) == 1:
# segments = torch.zeros((num_segments, *data.shape[1:]))
# segments.index_add_(0, segment_ids, data)
# return segments
# else:
# segments = torch.zeros((num_segments+1, *data.shape[1:]))
# segment_ids[segment_ids==-1] = num_segments # bad hack
# for i in range(segment_ids.shape[1]):
# segments = segments.index_add(0, segment_ids[:,i], data)
# return segments[:-1]
# def torch_unsorted_segment_sum_1(
# data: torch.Tensor, segment_ids: torch.Tensor, num_segments
# ):
# """
# Compute sums along segments of a Tensor
# Better described here: https://www.tensorflow.org/api_docs/python/tf/math/unsorted_segment_sum
# """
# if len(segment_ids.shape) == 1:
# return scatter(src=data, index=segment_ids, dim=0, dim_size=num_segments, reduce='sum')
# else:
# segments = torch.zeros((num_segments+1, *data.shape[1:]))
# segment_ids[segment_ids==-1] = num_segments # bad hack
# for i in range(segment_ids.shape[1]):
# segments = scatter(src=data, index=segment_ids[:,i], dim=0, out=segments, dim_size=num_segments+1, reduce='sum')
# return segments[:-1]
# def torch_unsorted_segment_sum_3(
# data: torch.Tensor, segment_ids: torch.Tensor, num_segments
# ):
# """
# Compute sums along segments of a Tensor
# Better described here: https://www.tensorflow.org/api_docs/python/tf/math/unsorted_segment_sum
# """
# repeated_data, indices, segments = _unsorted_segment_helper(
# data, segment_ids, num_segments
# )
# # Do the summation, i.e. sum by index
# sum_results = scatter(src=repeated_data, index=indices, dim=0, out=segments, reduce='sum')
# return sum_results
# def torch_unsorted_segment_mean(
# data: torch.Tensor, segment_ids: torch.Tensor, num_segments
# ):
# """
# Computes means along segments of a Tensor
# """
# repeated_data, indices, segments = _unsorted_segment_helper(
# data, segment_ids, num_segments
# )
#
# # Do the summation, i.e. sum by index
# sum_results = segments.index_add(0, indices, repeated_data)
#
# # Pytorch doesn't have an efficient implementation so we have to hack around
# idx_elems, existing_idx_counts = torch.unique(
# indices, sorted=True, return_counts=True
# )
#
# # Note: not all indices will be present in a segment. Use torch.ones not torch.zeros to avoid divide by 0
# idx_counts = torch.zeros(num_segments)
# idx_counts[idx_elems] = existing_idx_counts.float()
# idx_counts = idx_counts.reshape(-1, 1)
#
# mean_results = sum_results / idx_counts
# return mean_results
# device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
# print("Device", device)
# data = torch.tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=torch.float).repeat(3,1).reshape(3,3,3)
# print(data)
# index = torch.tensor([[0,0,0], [4,4,4], [2,2,2]])
# index_neg = torch.tensor([[0,0], [-1,-1], [2,2]])
# index_sin = torch.tensor([0,4,2])
# num_segments=6
# segments = torch.zeros((num_segments, *data.shape[1:])) | 37.322148 | 126 | 0.664089 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,423 | 0.795361 |
e21853ab6ec1a8c3451b991676d774192dce2fde | 211 | py | Python | examples/button.py | Akuli/tkinder | c360fbfe086ca09cdd856a8636de05b24e1b7093 | [
"MIT"
] | 23 | 2019-01-15T00:07:30.000Z | 2022-01-18T06:19:18.000Z | examples/button.py | Akuli/tkinder | c360fbfe086ca09cdd856a8636de05b24e1b7093 | [
"MIT"
] | 12 | 2019-01-13T19:51:52.000Z | 2021-05-17T17:55:51.000Z | examples/button.py | Akuli/pythotk | c360fbfe086ca09cdd856a8636de05b24e1b7093 | [
"MIT"
] | 7 | 2019-01-13T19:48:26.000Z | 2021-04-21T13:30:21.000Z | import teek
def on_click():
print("You clicked me!")
window = teek.Window()
button = teek.Button(window, "Click me", command=on_click)
button.pack()
window.on_delete_window.connect(teek.quit)
teek.run()
| 16.230769 | 58 | 0.720379 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.127962 |
e2187da14d04338260778b407902ef0160f842f8 | 5,378 | py | Python | docarray/array/mixins/find.py | fastflair/docarray | 0bbdbc816b2f4a3b399779f6816875fbc1dfe862 | [
"Apache-2.0"
] | null | null | null | docarray/array/mixins/find.py | fastflair/docarray | 0bbdbc816b2f4a3b399779f6816875fbc1dfe862 | [
"Apache-2.0"
] | null | null | null | docarray/array/mixins/find.py | fastflair/docarray | 0bbdbc816b2f4a3b399779f6816875fbc1dfe862 | [
"Apache-2.0"
] | null | null | null | import abc
from typing import overload, Optional, Union, Dict, List, Tuple, Callable, TYPE_CHECKING
import numpy as np
from ...math import ndarray
from ...score import NamedScore
if TYPE_CHECKING:
from ...types import T, ArrayType
from ... import Document, DocumentArray
class FindMixin:
"""A mixin that provides find functionality to DocumentArrays
Subclass should override :meth:`._find` not :meth:`.find`.
"""
@overload
def find(self: 'T', query: 'ArrayType', **kwargs):
...
@overload
def find(self: 'T', query: Union['Document', 'DocumentArray'], **kwargs):
...
@overload
def find(self: 'T', query: Dict, **kwargs):
...
@overload
def find(self: 'T', query: str, **kwargs):
...
def find(
self: 'T',
query: Union['DocumentArray', 'Document', 'ArrayType'],
metric: Union[
str, Callable[['ArrayType', 'ArrayType'], 'np.ndarray']
] = 'cosine',
limit: Optional[Union[int, float]] = 20,
metric_name: Optional[str] = None,
exclude_self: bool = False,
only_id: bool = False,
**kwargs,
) -> Union['DocumentArray', List['DocumentArray']]:
"""Returns approximate nearest neighbors given an input query.
:param query: the input query to search by
:param limit: the maximum number of matches, when not given defaults to 20.
:param metric_name: if provided, then match result will be marked with this string.
:param metric: the distance metric.
:param exclude_self: if set, Documents in results with same ``id`` as the query values will not be
considered as matches. This is only applied when the input query is Document or DocumentArray.
:param only_id: if set, then returning matches will only contain ``id``
:param kwargs: other kwargs.
:return: a list of DocumentArrays containing the closest Document objects for each of the queries in `query`.
"""
from ... import Document, DocumentArray
if limit is not None:
if limit <= 0:
raise ValueError(f'`limit` must be larger than 0, receiving {limit}')
else:
limit = int(limit)
_limit = len(self) if limit is None else (limit + (1 if exclude_self else 0))
if isinstance(query, (DocumentArray, Document)):
if isinstance(query, Document):
query = DocumentArray(query)
_query = query.embeddings
else:
_query = query
_, _ = ndarray.get_array_type(_query)
n_rows, n_dim = ndarray.get_array_rows(_query)
# Ensure query embedding to have the correct shape
if n_dim != 2:
_query = _query.reshape((n_rows, -1))
metric_name = metric_name or (metric.__name__ if callable(metric) else metric)
kwargs.update(
{
'limit': _limit,
'only_id': only_id,
'metric': metric,
'metric_name': metric_name,
}
)
_result = self._find(
_query,
**kwargs,
)
result: List['DocumentArray']
if isinstance(_result, list) and isinstance(_result[0], DocumentArray):
# already auto-boxed by the storage backend, e.g. pqlite
result = _result
elif (
isinstance(_result, tuple)
and isinstance(_result[0], np.ndarray)
and isinstance(_result[1], np.ndarray)
):
# do autobox for Tuple['np.ndarray', 'np.ndarray']
dist, idx = _result
result = []
for _ids, _dists in zip(idx, dist):
matches = DocumentArray()
for _id, _dist in zip(_ids, _dists):
# Note, when match self with other, or both of them share the same Document
# we might have recursive matches .
# checkout https://github.com/jina-ai/jina/issues/3034
if only_id:
d = Document(id=self[_id].id)
else:
d = Document(self[int(_id)], copy=True) # type: Document
# to prevent self-reference and override on matches
d.pop('matches')
d.scores[metric_name] = NamedScore(value=_dist)
matches.append(d)
if len(matches) >= _limit:
break
result.append(matches)
else:
raise TypeError(
f'unsupported type `{type(_result)}` returned from `._find()`'
)
if exclude_self and isinstance(query, DocumentArray):
for i, q in enumerate(query):
matches = result[i].traverse_flat('r', filter_fn=lambda d: d.id != q.id)
if limit and len(matches) > limit:
result[i] = matches[:limit]
else:
result[i] = matches
if len(result) == 1:
return result[0]
else:
return result
@abc.abstractmethod
def _find(
self, query: 'ArrayType', limit: int, **kwargs
) -> Tuple['np.ndarray', 'np.ndarray']:
raise NotImplementedError
| 33.403727 | 118 | 0.551134 | 5,092 | 0.94682 | 0 | 0 | 479 | 0.089067 | 0 | 0 | 1,699 | 0.315917 |
e21a888a401f8d1f617db8d771f21b250370c0bb | 1,569 | py | Python | zgres/systemd.py | jinty/zgres | 88730e94bb543ec4d48c27523d02e3136b332173 | [
"MIT"
] | 12 | 2015-11-08T21:29:52.000Z | 2018-10-25T04:45:58.000Z | zgres/systemd.py | jinty/zgres | 88730e94bb543ec4d48c27523d02e3136b332173 | [
"MIT"
] | null | null | null | zgres/systemd.py | jinty/zgres | 88730e94bb543ec4d48c27523d02e3136b332173 | [
"MIT"
] | 6 | 2015-10-25T05:59:12.000Z | 2021-01-06T08:02:46.000Z | import os
from subprocess import check_call, call
def write_service(service_name, contents):
"""Write a service file in a "safe" manner.
If the contents of the file are the same as what is desired to be written,
do nothing.
First writes to a temporary file in the same directory as the target, them
move that temporary file into plce.
Return a boolean True if the file was changed else False
"""
assert '/' not in service_name
path = '/lib/systemd/system/' + service_name
if os.path.exists(path):
with open(path, 'r') as f:
existing = f.read()
if existing == contents:
return False
tmppath = '/lib/systemd/system/.' + service_name + '.tmp'
with open(tmppath, 'w') as f:
f.write(contents)
os.rename(tmppath, path)
return True
def assert_enabled_and_running(service_name, reload=False, reload_daemon=False, restart=False):
check_call(['systemctl', 'enable', service_name])
if reload_daemon:
check_call(['systemctl', 'daemon-reload'])
check_call(['systemctl', 'start', service_name]) # do we need to check status?
if reload:
check_call(['systemctl', 'reload', service_name]) # maybe avoid if we just started the service
if restart:
check_call(['systemctl', 'restart', service_name]) # maybe avoid if we just started the service
def assert_disabled_and_stopped(service_name):
check_call(['systemctl', 'disable', service_name])
call(['systemctl', 'stop', service_name]) # fails if service does not exist
| 38.268293 | 103 | 0.67559 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 682 | 0.434672 |
e21afb57aa8ccce52815f0ad4d4f545a41684adb | 512 | py | Python | native/java/lang/double.py | wonderyue/TinyJVM | 5559730ab2aad35963fce977fb9b3ea78eb9a8e2 | [
"MIT"
] | null | null | null | native/java/lang/double.py | wonderyue/TinyJVM | 5559730ab2aad35963fce977fb9b3ea78eb9a8e2 | [
"MIT"
] | null | null | null | native/java/lang/double.py | wonderyue/TinyJVM | 5559730ab2aad35963fce977fb9b3ea78eb9a8e2 | [
"MIT"
] | null | null | null | import struct
def double_to_raw_long_bits(frame):
"""
public static native long doubleToRawLongBits(double value);
"""
value = frame.get_local_double(0)
b = struct.pack("d", value)
i = struct.unpack("l", b)[0]
frame.push_operand_long(i)
def long_bits_to_double(frame):
"""
public static native double longBitsToDouble(long bits);
"""
i = frame.get_local_long(0)
b = struct.pack("l", i)
value = struct.unpack("d", b)[0]
frame.push_operand_double(value)
| 23.272727 | 64 | 0.65625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 160 | 0.3125 |
e21b633cad6ae7e10b4918cae9d3ced7f0c2f856 | 224 | py | Python | materiais/admin.py | lucgbrl/ecossistema-django | a374662be7e93a1f57b7318487074aa0bd905043 | [
"MIT"
] | null | null | null | materiais/admin.py | lucgbrl/ecossistema-django | a374662be7e93a1f57b7318487074aa0bd905043 | [
"MIT"
] | null | null | null | materiais/admin.py | lucgbrl/ecossistema-django | a374662be7e93a1f57b7318487074aa0bd905043 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Post, Material, Video, Section
# Register your models here.
admin.site.register(Post)
admin.site.register(Section)
admin.site.register(Material)
admin.site.register(Video) | 32 | 50 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.125 |
e21e9fda6c4a55682bbf0defc32a4a12f38a5131 | 6,493 | py | Python | utils.py | ericqian1/poker_sim | 0825b9d1e3e93dc4af37cf44568bd0baf6c5868a | [
"MIT"
] | null | null | null | utils.py | ericqian1/poker_sim | 0825b9d1e3e93dc4af37cf44568bd0baf6c5868a | [
"MIT"
] | null | null | null | utils.py | ericqian1/poker_sim | 0825b9d1e3e93dc4af37cf44568bd0baf6c5868a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 4 13:54:27 2019
@author: eric.qian
"""
from collections import Counter
from itertools import combinations
# Numeric hand rankings, higher integer value is stronger hand
HAND_RANKINGS = {0: 'High Card',
1: 'One Pair',
2: 'Two Pair',
3: 'Trips',
4: 'Straight',
5: 'Flush',
6: 'Full House',
7: 'Quads',
8: 'Straight Flush'}
# Card values mapped to integer strengths, higher integer value stronger
CARD_VALS = {'2':1,
'3':2,
'4':3,
'5':4,
'6':5,
'7':6,
'8':7,
'9':8,
'10':9,
'J':10,
'Q':11,
'K':12,
'A':13}
# Cache mapper
REVERSE_VALS = {v:k for k,v in CARD_VALS.items()}
def check_flush(suits):
# If count of suits equals length of hand
if suits.count(suits[0]) == len(suits):
return True
def check_strt(card_vals):
# Edge case of A,2,3,4,5 straight
if sorted(card_vals) == [1,2,3,4,13]:
return True
# If sorted(hand) - min evaluates to 0,1,2,3,4
if [i - min(card_vals) for i in sorted(card_vals)] == [0,1,2,3,4]:
return True
def read_hand(hand):
"""
hand :: list of str :: unordered str hand value representation, i.e. ['As','As','2h','2s','Ks']
=========
returns
hand_strength :: list of int :: sequential integer representation of hand strs,
[HAND_RANKINGS, HIGH_CARD_1, HIGH_CARD_2, HIGH_CARD_3, HIGH_CARD_4] in descending strength priority
For example, if we have Two pairs Aces and 2s w/ K high:
[2,13,1,12] -> [Two Pair int val, Ace int val, 2 int val, K int val]
desc :: str :: full description of hand
"""
# Initialize ranking and descriptiong
ranking = 0
desc = ''
# Split card vals and suits. 10 is edge case
suits = [i[1] if '10' not in i else i[2] for i in hand]
card_vals = [CARD_VALS[i[0]] if '10' not in i else CARD_VALS['10'] for i in hand]
# Unique card counts, hands are easily evaluated with dict of count of cards
card_counter = Counter(card_vals)
# Straight flush ranking
if check_flush(suits) and check_strt(card_vals):
ranking = 8
# Flush ranking
elif check_flush(suits):
ranking = 5
# Straight ranking
elif check_strt(card_vals):
ranking = 4
# If there are only 2 unique cards, must be quads or full house
elif len(card_counter) == 2:
# If 2 cards and counts are 3,2 is full house
if 3 in card_counter.values():
ranking = 6
# Otherwise, quads
else:
ranking = 7
# Must be trips or two pairs
elif len(card_counter) == 3:
if 3 in card_counter.values():
ranking = 3
else:
ranking = 2
# Must be one pairs
elif len(card_counter) == 4:
ranking = 1
# High card
else:
ranking = 0
# Init hand strength vector
hand_strength = [ranking]
# Add hand strength data
# Sort card counter dict by count, card integer value, and add data. This is enough info to rank hands
sorted_high_cards = sorted(card_counter.items(), key=lambda x: (x[1],x[0]), reverse=True)
for i in range(len(sorted_high_cards)):
hand_strength.append(sorted_high_cards.pop(0)[0])
# Logic to describe hand
desc_vals = [str(REVERSE_VALS[i]) if idx > 0 else 'None' for idx, i in enumerate(hand_strength)]
if ranking == 8:
if 13 in hand_strength and 4 in hand_strength:
return 'Straight Flush 5 high'
else:
desc = 'Straight Flush ' + desc_vals[1] + ' high'
elif ranking == 7:
desc = 'Quad ' + desc_vals[1] + ' ' + desc_vals[2] + ' high'
elif ranking == 6:
desc = 'Full House ' + desc_vals[1] + ' full of ' + desc_vals[2]
elif ranking == 5:
desc = 'Flush ' + desc_vals[1] + ' high'
elif ranking == 4:
desc = 'Straight ' + desc_vals[1] + ' high'
elif ranking == 3:
desc = 'Trip ' + desc_vals[1]
elif ranking == 2:
desc = 'Two Pairs ' + desc_vals[1] + ' and '+desc_vals[2]+' '+desc_vals[3]+' high'
elif ranking == 1:
desc = 'One Pair ' + desc_vals[1] + ' ' + desc_vals[2] + desc_vals[3] + desc_vals[4] + ' high'
elif ranking == 0:
desc = 'High Card ' + desc_vals[1] + ' ' + desc_vals[2] + desc_vals[3] + desc_vals[4] + desc_vals[5] +' high'
return hand_strength, desc
def eval_hands(hands):
"""
hands :: list :: list of int hand_strength, 1st output of read_hand
=======
Returns
candidates :: iterable :: strongest hand(s), may be more than 1 hand if there is a split
"""
card = 0
candidates = [i for i in range(len(hands))]
# Evaluates hands
# Looks at first index of each hand in hands, retains largest
# Iterates over indices, each time retaining largest int (strongest hand hierarchically)
while len(candidates) != 1:
try:
strongest = max([hands[hand][card] for hand in candidates])
keep = []
for i in range(len(candidates)):
if strongest == hands[candidates[i]][card]:
keep.append(i)
candidates = [candidates[i] for i in keep]
card += 1
except:
# If there is more than 1 strongest hand, splits
break
return candidates[0]
def eval_combos(hands):
# Given several hands, find the strongest hand with our evaluation engine above
hand_strs = []
descs = []
for hand in hands:
hand_str, desc = read_hand(hand)
hand_strs.append(hand_str)
descs.append(desc)
strongest_hand = eval_hands(hand_strs)
return hands[strongest_hand], hand_strs[strongest_hand], descs[strongest_hand]
def combos(cards, n = 5):
# Create hand combos
return [list(combo) for combo in combinations(cards, n)]
| 31.066986 | 119 | 0.538118 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,356 | 0.362852 |
e21eb67206281500e398b6199cc031ce513a61af | 1,713 | py | Python | tests/types/test_boolean.py | arthurazs/py61850 | ba9c5f40ef21bfecd14a8d380e9ff512da9ba5bf | [
"MIT"
] | 3 | 2020-09-21T02:13:58.000Z | 2021-09-18T02:32:56.000Z | tests/types/test_boolean.py | arthurazs/py61850 | ba9c5f40ef21bfecd14a8d380e9ff512da9ba5bf | [
"MIT"
] | null | null | null | tests/types/test_boolean.py | arthurazs/py61850 | ba9c5f40ef21bfecd14a8d380e9ff512da9ba5bf | [
"MIT"
] | 2 | 2020-12-29T15:09:50.000Z | 2022-01-04T16:19:48.000Z | from pytest import fixture, raises
from py61850.types import Boolean
@fixture
def true():
return Boolean(True)
# === DECODE ===
def test_byte_true_min_raw_value():
assert Boolean(b'\x01').raw_value == b'\x01'
def test_byte_true_min_value():
assert Boolean(b'\x01').value is True
def test_byte_true_max_raw_value():
assert Boolean(b'\xFF').raw_value == b'\xFF'
def test_byte_true_max_value():
assert Boolean(b'\xFF').value is True
def test_byte_false_raw_value():
assert Boolean(b'\x00').raw_value == b'\x00'
def test_byte_false_value():
assert Boolean(b'\x00').value is False
# === TRUE ===
def test_true_value(true):
assert true.value is True
def test_true_raw_value(true):
assert true.raw_value != b'\x00'
# === FALSE ===
def test_false_value():
assert Boolean(False).value is False
def test_false_raw_value(true):
assert Boolean(False).raw_value == b'\x00'
# === UNCHANGED VALUES ===
def test_raw_tag(true):
assert true.raw_tag == b'\x83'
def test_tag(true):
assert true.tag == 'Boolean'
def test_raw_length(true):
assert true.raw_length == b'\x01'
def test_length(true):
assert true.length == 1
def test_bytes():
assert bytes(Boolean(False)) == b'\x83\x01\x00'
def test_len(true):
assert len(true) == 3
# === EXCEPTIONS ===
def test_encode_decode():
with raises(TypeError):
Boolean(1)
def test_decode_below():
with raises(ValueError):
Boolean(b'')
def test_decode_above():
with raises(ValueError):
Boolean(b'\x00\x00')
def test_none():
with raises(TypeError):
Boolean(None)
def test_none_empty():
with raises(TypeError):
Boolean()
| 15.861111 | 51 | 0.664332 | 0 | 0 | 0 | 0 | 45 | 0.02627 | 0 | 0 | 220 | 0.12843 |
e21f915c5f29fa2f88567990fe37ec71534110ac | 129 | py | Python | demo/cat.py | zbentley/useful-bash | d1d9f4bde3d53564c037f6ef18e0bbaa2144ffa3 | [
"MIT"
] | 5 | 2017-07-27T13:50:15.000Z | 2021-08-14T18:52:52.000Z | demo/cat.py | zbentley/useful-bash | d1d9f4bde3d53564c037f6ef18e0bbaa2144ffa3 | [
"MIT"
] | 1 | 2020-09-18T18:04:29.000Z | 2020-09-19T23:02:28.000Z | demo/cat.py | zbentley/useful-bash | d1d9f4bde3d53564c037f6ef18e0bbaa2144ffa3 | [
"MIT"
] | 1 | 2020-09-19T23:15:25.000Z | 2020-09-19T23:15:25.000Z | #!/usr/bin/env python
import sys
with open(sys.argv[1], 'r') as f:
for line in f.readlines():
sys.stdout.write(line)
| 21.5 | 33 | 0.627907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 24 | 0.186047 |
e220e53720f2fd8d160f1ff08fa40187d3250585 | 40,539 | py | Python | orgediting.py | waynezhang/orgextended | 853ae89f937d302c2dd9dad3ae98aa5c2485faaa | [
"MIT"
] | null | null | null | orgediting.py | waynezhang/orgextended | 853ae89f937d302c2dd9dad3ae98aa5c2485faaa | [
"MIT"
] | null | null | null | orgediting.py | waynezhang/orgextended | 853ae89f937d302c2dd9dad3ae98aa5c2485faaa | [
"MIT"
] | null | null | null | import sublime
import sublime_plugin
import datetime
import re
from pathlib import Path
import os
import fnmatch
import OrgExtended.orgparse.loader as loader
import OrgExtended.orgparse.node as node
import OrgExtended.orgparse.date as orgdate
from OrgExtended.orgparse.sublimenode import *
import OrgExtended.orgutil.util as util
import OrgExtended.orgutil.navigation as nav
import OrgExtended.orgutil.template as templateEngine
import logging
import sys
import traceback
import OrgExtended.orgfolding as folding
import OrgExtended.orgdb as db
import OrgExtended.asettings as sets
import OrgExtended.pymitter as evt
import OrgExtended.orgproperties as props
import OrgExtended.orgdatepicker as datep
import OrgExtended.orginsertselected as insSel
import OrgExtended.orglinks as orglink
import OrgExtended.orgneovi as nvi
import OrgExtended.orgagenda as oa
import OrgExtended.orgcheckbox as checkbox
import OrgExtended.orgnumberedlist as numberedlist
log = logging.getLogger(__name__)
# MOVING TO ANY DONE STATE:
# Support these:
#+STARTUP: lognotedone Prompt and stored below the item with a Closing Note heading.
#+STARTUP: logdone CLOSED: [TIMESTAMP] in LOGBOOK
# As well as configuration options
#
# PER TRANSITON MOVEMENT:
# @ - note
# ! - timestamp
# / - when leaving the state if next state doesn't log
#
# Then they go futher with: :LOGGING: WAIT(@) logrepeat properties
#
# - 1) We need the transitions recorded in the node (in the todos list)
# - 2) We need a method to insert CLOSED: and or prompt and note
# - 3) We need to track the state transitions themselves (from / to)
#
# - 4) Habits break all this with LAST_REPEAT and To From transition text.
#
RE_CLOSED = re.compile(r"^\s*CLOSED:\s*\[.*\]")
def LocateClosed(view,node):
for row in range(node.start_row, node.local_end_row + 1):
pt = view.text_point(row,0)
line = view.line(pt)
lstr = view.substr(line)
m = RE_CLOSED.search(lstr)
if(m):
return line
return None
def InsertClosed(view, node, onDone=None):
stamp = OrgDate.format_clock(datetime.datetime.now(),active=False)
closedPt = LocateClosed(view,node)
if(closedPt):
text = node.indent() + "CLOSED: " + stamp
view.ReplaceRegion(closedPt, text, onDone)
else:
row = node.start_row+1
pt = view.text_point(row,0)
newline = "\n" if view.isBeyondLastRow(row) else ""
text = newline + node.indent() + "CLOSED: " + stamp + "\n"
view.Insert(pt, text, onDone)
#props.UpdateLogbook(view,node, "CLOSED:", stamp)
def RemoveClosed(view, node, onDone=None):
closedPt = LocateClosed(view, node)
if(closedPt):
view.ReplaceRegion(closedPt.IncEnd(),"",onDone)
#view.run_command("org_internal_replace", {"start": closedPt.begin(), "end": closedPt.end() + 1, "text": "", "onDone": onDone})
else:
evt.EmitIf(onDone)
def IsDoneState(node, toState):
return toState in node.env.done_keys
def ShouldRecur(node, fromState, toState):
if(IsDoneState(node, toState)):
if(node.scheduled and node.scheduled.repeating):
return node.scheduled
if(node.deadline and node.deadline.repeating):
return node.deadline
timestamps = node.get_timestamps(active=True,point=True,range=True)
for t in timestamps:
if(t and t.repeating):
return t
return None
def ShouldClose(node, fromState, toState):
if(ShouldRecur(node,fromState,toState)):
return False
# NOTE: We need to get the todo transitions
# into this as well!
toState = toState.strip()
startup = node.root.startup()
if(IsDoneState(node, toState) and Startup.logdone in startup):
return True
def InsertRecurrence(view, node, fromState, toState, onDone=None):
# - State "DONE" from "TODO" [2009-09-29 Tue]"
stamp = OrgDate.format_clock(datetime.datetime.now(),active=False)
def OnLogAdded():
props.UpdateProperty(view, node, "LAST_REPEAT", stamp, onDone)
props.AddLogbook(view,node, "- State {0:12} from {1:12} ".format('"' + toState + '"', '"' + fromState + '"'), stamp, evt.Make(OnLogAdded))
def InsertNote(view, node, text, fromState, toState, onDone=None):
stamp = OrgDate.format_clock(datetime.datetime.now(),active=False)
props.AddLogbook(view,node, "Note (to:{0},at:{1}): ".format(toState,stamp), text, onDone)
def ShouldNote(node, fromState, toState):
if(ShouldRecur(node,fromState,toState)):
return False
# NOTE: We need to get the todo transitions
# into this as well!
toState = toState.strip()
startup = node.root.startup()
if(IsDoneState(node,toState) and Startup.lognotedone in startup):
return True
RE_T = re.compile(r'\s(?P<time><\s*\d+-\d+-\d+\s+[^>]+>)(\s+|$)')
# Use a menu to change the todo state of an item
class OrgTodoChangeCommand(sublime_plugin.TextCommand):
def on_totally_done(self):
evt.EmitIf(self.onDone)
# recurrence needs to update the base timestamp!
# This needs to respect the .+ ++ and + markers
def on_update_timestamps_if_needed(self, row=0):
# We have to reload our node as we updated things.
self.node = db.Get().At(self.view, self.node.start_row)
if(row > (self.node.local_end_row+1)):
self.on_totally_done()
for i in range(self.node.start_row+row, self.node.local_end_row+1):
pt = self.view.text_point(i, 0)
line = self.view.line(pt)
txt = self.view.substr(line)
m = RE_T.search(txt)
now = datetime.datetime.now()
if(m):
tsl = OrgDate.list_from_str(m.group('time'))
if(tsl):
t = tsl[0]
if(t.repeating):
next = t.start
next2 = t.end
if(t.repeatpre == "+"):
next = t.next_repeat_from(oa.EnsureDateTime(next))
elif(t.repeatpre == "++"):
while(next < now):
next = t.next_repeat_from(oa.EnsureDateTime(next))
elif(t.repeatpre == ".+"):
next = t.next_repeat_from(now)
s = m.start(1)
e = m.end(1)
rpt = t.repeatpre + str(t.repeatnum) + t.repeatdwmy
wrn = ""
if(t.warning):
wrn = " " + t.warnpre + str(t.warnnum) + t.warndwmy
if(t.has_end()):
if(t.has_time()):
nout = txt[:s] + next.strftime("<%Y-%m-%d %a %H:%M-") + t.end.strftime("%H:%M ")+ rpt + wrn + ">" + txt[e:]
else:
# This really shouldn't happen.
nout = txt[:s] + next.strftime("<%Y-%m-%d %a ")+ rpt + wrn + ">" + txt[e:]
else:
if(t.has_time()):
nout = txt[:s] + next.strftime("<%Y-%m-%d %a %H:%M ") + rpt + wrn + ">" +txt[e:]
else:
nout = txt[:s] + next.strftime("<%Y-%m-%d %a ")+ rpt + wrn + ">" + txt[e:]
self.view.run_command("org_internal_replace", {"start": line.begin(), "end": line.end(), "text": nout, "onDone": evt.Make(lambda:self.on_update_timestamps_if_needed(i+1)) })
return
self.on_totally_done()
def do_recurrence_if_needed(self):
self.rec = ShouldRecur(self.node,self.fromState,self.newState)
if(self.rec):
InsertRecurrence(self.view, self.node, self.fromState, self.newState, evt.Make(self.on_update_timestamps_if_needed))
else:
self.on_totally_done()
def do_close_if_needed(self):
if(ShouldClose(self.node,self.fromState,self.newState)):
InsertClosed(self.view, self.node, evt.Make(self.do_recurrence_if_needed))
else:
RemoveClosed(self.view, self.node, evt.Make(self.do_recurrence_if_needed))
def on_insert_note(self, text):
InsertNote(self.view, self.node, text, self.fromState, self.newState, evt.Make(self.do_close_if_needed))
def do_note_if_needed(self):
if(ShouldNote(self.node,self.fromState, self.newState)):
self.view.window().show_input_panel("("+self.fromState + ">>" + self.newState + ") Note:","", self.on_insert_note, None, None)
else:
self.do_close_if_needed()
def on_done_st4(self,index,modifers):
self.on_done(index)
def on_done(self, index):
if(index < 0):
return
newState = self.todoStates[index]
if(newState == "none"):
newState = ""
# if we don't have a TODO state then we have to handle that as well.
m = self.todoRe.search(self.bufferContents)
fromState = None
if(m == None):
self.todoRe = re.compile(r"^([*]+ (\[\#[a-zA-Z0-9]+\]\s+)?)( )*")
else:
fromState = m.group(3)
if(newState != ""):
newState += " "
self.bufferContents = self.todoRe.sub(r"\g<1>" + newState, self.bufferContents)
# We have to do the editing in sequence because the reloads can get mixed up otherwise
if(fromState):
self.fromState = fromState.strip()
else:
self.fromState = ""
self.newState = newState.strip()
# Recurring events do not hit the done state when you toggle them
# They bounce back to TODO they just get a new note in them
if(ShouldRecur(self.node, self.fromState,self.newState)):
self.do_note_if_needed()
else:
self.view.ReplaceRegion(self.row,self.bufferContents, evt.Make(self.do_note_if_needed))
def run(self, edit, onDone=None):
self.onDone = onDone
self.node = db.Get().AtInView(self.view)
#self.todoStates = sets.Get("todoStates", sets.defaultTodoStates)
todos = self.node.env.all_todo_keys
if(len(todos) > 0):
self.todoStates = todos
self.todoStates += ["none"]
else:
for i in range(0, len(self.todoStates)):
if(self.todoStates[i] == "|"):
self.todoStates[i] = "none"
# ACTION vs DONE states
# TODO" "FEEDBACK" "VERIFY" "|" "DONE" "DELEGATED
row = self.node.start_row
self.todoRe = r"^([*]+ (\[\#[a-zA-Z0-9]+\]\s+)?)("
haveFirst = False
for state in self.todoStates:
if state != "|":
if(haveFirst):
self.todoRe += "|"
self.todoRe += state
haveFirst = True
self.todoRe += r")( )*"
self.todoRe = re.compile(self.todoRe)
sp = self.view.text_point(row,0)
self.row = self.view.line(sp)
self.bufferContents = self.view.substr(self.row)
if(int(sublime.version()) <= 4096):
self.view.window().show_quick_panel(self.todoStates, self.on_done, -1, -1)
else:
self.view.window().show_quick_panel(self.todoStates, self.on_done_st4, -1, -1)
# Use a menu to change the priority of an item
class OrgPriorityChangeCommand(sublime_plugin.TextCommand):
def on_done_st4(self,index,modifers):
self.on_done(index)
def on_done(self, index):
if(index < 0):
return
newState = self.priorities[index]
if(newState == "none"):
newState = ""
# if we don't have a TODO state then we have to handle that as well.
m = self.Re.search(self.bufferContents)
if(m == None):
todos = self.node.env.all_todo_keys
todos = '|'.join(todos)
self.Re = re.compile(r"^([*]+\s+(" + todos + r")?\s*)( )*")
if(newState != ""):
newState = "[#" + newState + "] "
self.bufferContents = self.Re.sub(r"\g<1>" + newState, self.bufferContents)
self.view.ReplaceRegion(self.row, self.bufferContents, self.onDone)
#self.view.run_command("org_internal_replace", {"start": self.row.begin(), "end": self.row.end(), "text": self.bufferContents, "onDone": self.onDone})
#self.view.replace(self.edit, self.row, self.bufferContents)
def run(self, edit, onDone = None):
self.onDone = onDone
self.node = db.Get().AtInView(self.view)
self.priorities = self.node.priorities()
self.priorities = copy.copy(self.priorities)
self.priorities.append("none")
row = self.node.start_row
self.Re = r"^([*]+ [^\[\]]*\s*)(\[\#[a-zA-Z0-9]+\]\s+)"
self.Re = re.compile(self.Re)
sp = self.view.text_point(row,0)
self.row = self.view.line(sp)
self.bufferContents = self.view.substr(self.row)
if(int(sublime.version()) <= 4096):
self.view.window().show_quick_panel(self.priorities, self.on_done, -1, -1)
else:
self.view.window().show_quick_panel(self.priorities, self.on_done_st4, -1, -1)
def indent_node(view, node, edit):
# Indent the node itself
sp = view.text_point(node.start_row,0)
view.insert(edit,sp,"*")
# Indent MY content
for i in range(node.start_row+1,node.local_end_row+1):
sp = view.text_point(i,0)
view.insert(edit,sp," ")
# Find my children and indent them.
for n in node.children:
indent_node(view, n, edit)
def indent_list(view, row, edit):
# Indent the node itself
sp = view.text_point(row,0)
view.insert(edit,sp," ")
line = view.lineAt(row)
children,crow = numberedlist.findChildrenByIndent(view, line)
for r in range(row+1,crow):
sp = view.text_point(r,0)
view.insert(edit,sp," ")
def deindent_list(view, row, edit):
# Get my position and ensure this node CAN de-indent
sp = view.text_point(row,0)
ep = view.text_point(row,1)
np = view.text_point(row,2)
bufferContents = view.substr(sublime.Region(sp,np))
bufferContentsS = view.substr(sublime.Region(sp,ep))
wasTab = bufferContentsS == "\t"
if(bufferContents == " " or wasTab):
if(wasTab):
view.erase(edit,sublime.Region(sp,ep))
else:
view.erase(edit,sublime.Region(sp,np))
line = view.lineAt(row)
children,crow = numberedlist.findChildrenByIndent(view, line)
for r in range(row+1,crow):
sp = view.text_point(r,0)
ep = view.text_point(r,1)
np = view.text_point(r,2)
bufferContents = view.substr(sublime.Region(sp,np))
bufferContentsS = view.substr(sublime.Region(sp,ep))
wasTab = bufferContentsS == "\t"
if(bufferContents == " " or wasTab):
if(wasTab):
view.erase(edit,sublime.Region(sp,ep))
else:
view.erase(edit,sublime.Region(sp,np))
def deindent_node(view, node, edit):
# Get my position and ensure this node CAN de-indent
sp = view.text_point(node.start_row,0)
ep = view.text_point(node.start_row,1)
np = view.text_point(node.start_row,2)
bufferContents = view.substr(sublime.Region(ep,np))
if(bufferContents == "*"):
view.erase(edit,sublime.Region(sp,ep))
# Now erase a space at the front of my contents.
for i in range(node.start_row+1,node.local_end_row+1):
sp = view.text_point(i,0)
ep = view.text_point(i,1)
bufferContents = view.substr(sublime.Region(sp,ep))
if(bufferContents == " " or bufferContents == "\t"):
view.erase(edit,sublime.Region(sp,ep))
for n in node.children:
deindent_node(view, n, edit)
else:
log.debug("Did not get star, not deindenting it " + str(len(bufferContents)) + " " + bufferContents)
# Thing is a region, and first line of the thing tuple
# things is a list of thing
def sort_things_alphabetically(things,reverse=False):
things.sort(key=lambda thing: thing[1],reverse=reverse)
class OrgSortListCommand(sublime_plugin.TextCommand):
def run(self, edit):
# Get a list of things
things = None
wasNumbered = False
if(numberedlist.isNumberedLine(self.view)):
wasNumbered = True
things = numberedlist.getListAtPointForSorting(self.view)
elif(checkbox.isUnorderedList(self.view.getLine(self.view.curRow()))):
things = checkbox.getListAtPointForSorting(self.view)
if(not things):
log.error(" Could not sort at point")
return
# Build macro region
start = things[0][0][0]
end = things[len(things)-1][0][1]
sp = self.view.text_point(start,0)
ep = self.view.text_point(end,0)
ep = self.view.line(ep).end()
reg = sublime.Region(sp,ep)
# Sort the things
sort_things_alphabetically(things)
# Copy from macro region to sorted version
buffer = ""
for thing in things:
bs = self.view.text_point(thing[0][0],0)
be = self.view.text_point(thing[0][1]-1,0)
be = self.view.line(be).end()
breg = sublime.Region(bs,be)
ss = self.view.substr(breg).rstrip() + "\n"
buffer += ss
# Replace the macro region with new str
self.view.replace(edit, reg, buffer)
if(wasNumbered):
self.view.run_command('org_update_numbered_list')
pass
class OrgSelectSubtreeCommand(sublime_plugin.TextCommand):
def run(self, edit):
curNode = db.Get().AtInView(self.view)
if(curNode and type(curNode) != node.OrgRootNode and curNode._index > 1):
sp = self.view.text_point(curNode.start_row, 0)
ep = self.view.text_point(curNode.end_row, 0)
r = self.view.line(ep)
reg = sublime.Region(sp, r.end()+1)
self.view.sel().clear()
self.view.sel().add(reg)
class OrgCopySubtreeCommand(sublime_plugin.TextCommand):
def run(self, edit):
curNode = db.Get().AtInView(self.view)
if(curNode and type(curNode) != node.OrgRootNode and curNode._index > 1):
sp = self.view.text_point(curNode.start_row, 0)
ep = self.view.text_point(curNode.end_row, 0)
r = self.view.line(ep)
reg = sublime.Region(sp, r.end()+1)
nodetext = self.view.substr(reg)
sublime.set_clipboard(nodetext)
nvi.TestAndSetClip(self.view, nodetext)
class OrgSelectEntityCommand(sublime_plugin.TextCommand):
def run(self, edit):
curNode = db.Get().AtInView(self.view)
if(curNode and type(curNode) != node.OrgRootNode and curNode._index > 1):
sp = self.view.text_point(curNode.start_row, 0)
ep = self.view.text_point(curNode.local_end_row, 0)
r = self.view.line(ep)
reg = sublime.Region(sp, r.end()+1)
self.view.sel().clear()
self.view.sel().add(reg)
class OrgCopyEntityCommand(sublime_plugin.TextCommand):
def run(self, edit):
curNode = db.Get().AtInView(self.view)
if(curNode and type(curNode) != node.OrgRootNode and curNode._index > 1):
sp = self.view.text_point(curNode.start_row, 0)
ep = self.view.text_point(curNode.local_end_row, 0)
r = self.view.line(ep)
reg = sublime.Region(sp, r.end()+1)
nodetext = self.view.substr(reg)
sublime.set_clipboard(nodetext)
nvi.TestAndSetClip(self.view, nodetext)
class OrgCopyLinkHrefCommand(sublime_plugin.TextCommand):
def run(self, edit):
if(self.view.match_selector(self.view.sel()[0].begin(), "orgmode.link")):
pt = self.view.sel()[0].end()
links = self.view.find_by_selector("orgmode.link")
hrefs = self.view.find_by_selector("orgmode.link.href")
reg = None
for link in links:
line = self.view.line(link.begin())
if(line.contains(pt)):
for href in hrefs:
if(line.contains(href.begin())):
reg = href
break
break
if(reg):
nodetext = self.view.substr(reg)
sublime.set_clipboard(nodetext)
nvi.TestAndSetClip(self.view, nodetext)
class OrgSelectLinkHrefCommand(sublime_plugin.TextCommand):
def run(self, edit):
if(self.view.match_selector(self.view.sel()[0].begin(), "orgmode.link")):
pt = self.view.sel()[0].end()
links = self.view.find_by_selector("orgmode.link")
hrefs = self.view.find_by_selector("orgmode.link.href")
reg = None
for link in links:
line = self.view.line(link.begin())
if(line.contains(pt)):
for href in hrefs:
if(line.contains(href.begin())):
reg = href
break
break
if(reg):
self.view.sel().clear()
self.view.sel().add(reg)
class OrgMoveHeadingUpCommand(sublime_plugin.TextCommand):
def run(self, edit):
curNode = db.Get().AtInView(self.view)
if(curNode and type(curNode) != node.OrgRootNode and curNode._index > 1):
targetNode = curNode.get_sibling_up()
if(targetNode):
index = targetNode._index - 1
r,c = self.view.curRowCol()
sp = self.view.text_point(curNode.start_row, 0)
ep = self.view.text_point(curNode.end_row, 0)
r = self.view.line(ep)
reg = sublime.Region(sp, r.end()+1)
nodetext = self.view.substr(reg)
sp = self.view.text_point(targetNode.start_row, 0)
treg = sublime.Region(sp, sp)
self.view.erase(edit,reg)
self.view.insert(edit,sp,nodetext)
self.view.sel().clear()
np = self.view.text_point(targetNode.start_row, c)
self.view.sel().add(np)
class OrgMoveHeadingDownCommand(sublime_plugin.TextCommand):
def run(self, edit):
curNode = db.Get().AtInView(self.view)
if(curNode and type(curNode) != node.OrgRootNode and curNode._index < (len(curNode.env._nodes) - 1)):
targetNode = curNode.get_sibling_down()
if(targetNode):
temp = curNode
curNode = targetNode
targetNode = temp
index = targetNode._index - 1
sp = self.view.text_point(curNode.start_row, 0)
ep = self.view.text_point(curNode.end_row, 0)
r = self.view.line(ep)
reg = sublime.Region(sp, r.end()+1)
nodetext = self.view.substr(reg)
sp = self.view.text_point(targetNode.start_row, 0)
treg = sublime.Region(sp, sp)
endline = self.view.line(self.view.size())
if(curNode.is_last_node() and curNode.end_row >= self.view.endRow()):
line = self.view.substr(self.view.line(self.view.text_point(self.view.endRow(),0)))
isEmpty = line.strip() == ""
if(not isEmpty):
nodetext = nodetext + "\n"
self.view.erase(edit,reg)
self.view.insert(edit,sp,nodetext)
class OrgInsertHeadingSiblingCommand(sublime_plugin.TextCommand):
def run(self, edit):
curNode = db.Get().AtInView(self.view)
needsNewline = False
if(not curNode):
level = 1
here = sublime.Region(self.view.size(),self.view.size())
reg = here
text = self.view.substr(self.view.line(reg))
if(text.strip() != ""):
needsNewline = True
else:
level = curNode.level
reg = curNode.region(self.view,True) # trim ending whitespace
if(level == 0):
level = 1
here = sublime.Region(self.view.size(),self.view.size())
text = self.view.substr(self.view.line(reg))
if(text.strip() != ""):
needsNewline = True
else:
here = sublime.Region(reg.end(),reg.end())
text = self.view.substr(self.view.line(here))
if(text.strip() != ""):
needsNewline = True
self.view.sel().clear()
self.view.sel().add(reg.end())
self.view.show(here)
if(needsNewline):
self.view.insert(edit,self.view.sel()[0].begin(),'\n')
ai = sublime.active_window().active_view().settings().get('auto_indent')
self.view.settings().set('auto_indent',False)
self.view.run_command("insert_snippet", {"name" : "Packages/OrgExtended/orgsnippets/heading"+str(level)+".sublime-snippet"})
sublime.active_window().active_view().settings().set('auto_indent',ai)
class OrgInsertHeadingChildCommand(sublime_plugin.TextCommand):
def run(self, edit, onDone=None):
curNode = db.Get().AtInView(self.view)
needsNewline = False
if(not curNode):
file = db.Get().FindInfo(self.view)
if(len(file.org) > 0):
curNode = file.org[len(file.org) - 1]
if(not curNode):
level = 1
l = self.view.line(self.view.size())
reg = sublime.Region(l.start(),l.start())
reg = here
else:
level = curNode.level
reg = curNode.region(self.view, True)
if(level == 0):
level = 1
here = sublime.Region(view.size(),view.size())
else:
here = sublime.Region(reg.end(),reg.end())
text = self.view.substr(self.view.line(here))
if(text.strip() != ""):
needsNewline = True
if(not needsNewline):
ll = self.view.line(reg.end())
text = self.view.substr(ll)
if(text.strip() == "" and len(text) > 0):
# This is an empty line! Have to work at the front of this line!
# Or we will insert to an odd location!
reg = sublime.Region(ll.start(), ll.start())
self.view.sel().clear()
self.view.sel().add(reg.end())
self.view.show(here)
if(needsNewline):
self.view.insert(edit,self.view.sel()[0].begin(),'\n')
ai = sublime.active_window().active_view().settings().get('auto_indent')
self.view.settings().set('auto_indent',False)
self.view.run_command("insert_snippet", {"name" : "Packages/OrgExtended/orgsnippets/heading"+str((level+1))+".sublime-snippet"})
sublime.active_window().active_view().settings().set('auto_indent',ai)
evt.EmitIf(onDone)
# This will insert whatever text you provide as a child heading of the current node
class OrgInsertTextAsChildHeadingCommand(sublime_plugin.TextCommand):
def run(self, edit, heading=None, onDone=None):
curNode = db.Get().AtInView(self.view)
if(not curNode):
file = db.Get().FindInfo(self.view)
if(len(file.org) > 0):
curNode = file.org[len(file.org) - 1]
if(not curNode):
level = 1
l = self.view.line(self.view.size())
reg = sublime.Region(l.start(),l.start())
reg = here
else:
level = curNode.level
reg = curNode.region(self.view)
if(level == 0):
level = 1
here = sublime.Region(view.size(),view.size())
else:
here = sublime.Region(reg.end(),reg.end())
self.view.sel().clear()
self.view.sel().add(reg.end()+1)
#self.view.show(here)
self.view.insert(edit,self.view.sel()[0].begin(),'\n' + ('*'*(level+1)) + ' ' + heading)
evt.EmitIf(onDone)
class OrgInsertTodayInactiveCommand(sublime_plugin.TextCommand):
def run(self, edit):
now = datetime.datetime.now()
toInsert = orgdate.OrgDate.format_date(now, False)
self.view.insert(edit,self.view.sel()[0].begin(), toInsert)
class OrgInsertNowInactiveCommand(sublime_plugin.TextCommand):
def run(self, edit):
now = datetime.datetime.now()
toInsert = orgdate.OrgDate.format_clock(now, False)
self.view.insert(edit,self.view.sel()[0].begin(), toInsert)
class OrgInsertTodayActiveCommand(sublime_plugin.TextCommand):
def run(self, edit):
now = datetime.datetime.now()
toInsert = orgdate.OrgDate.format_date(now, True)
self.view.insert(edit,self.view.sel()[0].begin(), toInsert)
class OrgInsertNowActiveCommand(sublime_plugin.TextCommand):
def run(self, edit):
now = datetime.datetime.now()
toInsert = orgdate.OrgDate.format_clock(now, True)
self.view.insert(edit,self.view.sel()[0].begin(), toInsert)
class OrgInsertDateInactiveCommand(sublime_plugin.TextCommand):
def insert(self, date):
if(date):
self.view.Insert(self.view.sel()[0].begin(), OrgDate.format_clock(date.start, active=False))
def run(self, edit):
datep.Pick(evt.Make(self.insert))
class OrgInsertDateActiveCommand(sublime_plugin.TextCommand):
def insert(self, date):
if(date):
self.view.Insert(self.view.sel()[0].begin(), OrgDate.format_clock(date.start, active=True))
def run(self, edit):
datep.Pick(evt.Make(self.insert))
class OrgBaseTimestampCommand(sublime_plugin.TextCommand):
def __init__(self,unknown=None, prefix=None):
super(OrgBaseTimestampCommand, self).__init__(unknown)
self.prefix = prefix
def insert(self, date):
if(date):
self.view.Insert(self.view.sel()[0].begin(), OrgDate.format_clock(date.start, active=True))
else:
self.view.Erase(self.reg)
self.view.sel().clear()
self.view.sel().add(self.oldsel)
def run(self, edit, dateval=None):
if(type(dateval) == str):
dateval = orgdate.OrgDateFreeFloating.from_str(dateval)
# TODO: Find scheduled and replace it as well.
node = db.Get().AtInView(self.view)
if(node and not node.is_root()):
self.oldsel = self.view.sel()[0]
pt = self.view.text_point(node.start_row,0)
l = self.view.line(pt)
# Last row handling If we are the last row we can't jump over the newline
# we have to add one.
nl = ""
addnl = 1
if(self.view.isBeyondLastRow(node.start_row+1)):
nl = "\n"
addnl = 0
insertpt = l.end() + addnl
endpt = insertpt + len(nl) + len(node.indent()) + len(self.prefix)
self.reg = sublime.Region(insertpt, endpt)
self.view.insert(edit, insertpt, nl + node.indent() + self.prefix)
pt = self.view.text_point(node.start_row+1,0)
l = self.view.line(pt)
self.view.sel().clear()
self.view.sel().add(l.end())
if(dateval == None):
datep.Pick(evt.Make(self.insert))
else:
self.insert(dateval)
class OrgScheduleCommand(OrgBaseTimestampCommand):
def __init__(self,unknown=None):
super(OrgScheduleCommand, self).__init__(unknown,"SCHEDULED: \n")
class OrgDeadlineCommand(OrgBaseTimestampCommand):
def __init__(self,unknown=None):
super(OrgDeadlineCommand, self).__init__(unknown,"DEADLINE: \n")
class OrgActiveTimestampCommand(OrgBaseTimestampCommand):
def __init__(self,unknown=None):
super(OrgActiveTimestampCommand, self).__init__(unknown," \n")
class OrgInsertClosedCommand(sublime_plugin.TextCommand):
def run(self, edit):
node = db.Get().AtInView(self.view)
if(not node.is_root()):
self.oldsel = self.view.sel()[0]
pt = self.view.text_point(node.start_row,0)
l = self.view.line(pt)
# Last row handling If we are the last row we can't jump over the newline
# we have to add one.
nl = ""
addnl = 1
if(self.view.isBeyondLastRow(node.start_row+1)):
nl = "\n"
addnl = 0
now = datetime.datetime.now()
toInsert = orgdate.OrgDate.format_clock(now, False)
self.view.insert(edit, l.end() + addnl, nl + node.indent() + "CLOSED: "+toInsert+"\n")
# ================================================================================
RE_TAGS = re.compile(r'^(?P<heading>[*]+[^:]+\s*)(\s+(?P<tags>[:]([^: ]+[:])+))?$')
class OrgInsertTagCommand(sublime_plugin.TextCommand):
def OnDone(self, text):
if(not text):
return
node = db.Get().AtInView(self.view)
if(node):
if not text in node.tags:
(region, line) = self.view.getLineAndRegion(node.start_row)
m = RE_TAGS.search(line)
if(m.group('tags') != None):
tags = m.group('tags') + text + ":"
else:
tags = " :" + text + ":"
toline = "{0:70}{1}".format(m.group('heading'), tags)
self.view.ReplaceRegion(region,toline,self.onDone)
else:
log.debug("Tag already part of node")
evt.EmitIf(self.onDone)
def run(self, edit, text=None, onDone=None):
self.onDone = onDone
self.text = text.strip() if text != None else text
if(self.text != None and self.text != ""):
self.OnDone(self.text)
else:
self.input = insSel.OrgInput()
self.input.run("Tag:",db.Get().tags,evt.Make(self.OnDone))
# ================================================================================
class OrgInsertArchiveTagCommand(sublime_plugin.TextCommand):
def OnDone(self):
evt.EmitIf(self.onDone)
def run(self, edit, onDone=None):
self.onDone = onDone
self.view.run_command("org_insert_tag",{"onDone": evt.Make(self.OnDone), "text": "ARCHIVE"})
# ================================================================================
class OrgInsertCustomIdCommand(sublime_plugin.TextCommand):
def on_done(self, text):
if(text):
# No spaces allowed in a custom id
text = text.replace(" ","-")
node = db.Get().AtInView(self.view)
if(node and not node.is_root()):
props.UpdateProperty(self.view,node,"CUSTOM_ID",text,self.onDone)
def run(self, edit, onDone=None):
self.onDone = onDone
self.input = insSel.OrgInput()
#print(str(db.Get().customids))
self.input.run("Custom Id:",db.Get().customids, evt.Make(self.on_done))
# ================================================================================
class OrgSetTodayCommand(sublime_plugin.TextCommand):
def run(self, edit, onDone=None):
self.onDone = onDone
idValue = "TODAY"
node = db.Get().AtInView(self.view)
if(not node or node.is_root()):
log.debug("Cannot update root node or non existent node as today")
return
file, at = db.Get().FindByCustomId(idValue)
if(file != None and at != None):
node = file.At(at)
if(node):
props.RemoveProperty(self.view, node, "CUSTOM_ID")
node = db.Get().AtInView(self.view)
if(node and not node.is_root()):
props.UpdateProperty(self.view,node,"CUSTOM_ID",idValue,self.onDone)
def get_view_for_silent_edit_file(file):
# First check all sheets for this file.
window = sublime.active_window()
view = window.find_open_file(file.filename)
if(view):
return view
# Okay the file is not opened, we have to open it
# but we don't want it having focus
# So keep the old view so we can refocus just to
# be sure.
currentView = window.active_view()
view = window.open_file(file.filename, sublime.ENCODED_POSITION)
window.focus_view(currentView)
return view
# ================================================================================
class RunEditingCommandOnToday:
def __init__(self, view, command, cmds = {}):
self.view = view
self.command = command
self.cmds = cmds
def onSaved(self):
db.Get().Reload(self.savedView)
evt.EmitIf(self.onDone)
def onEdited(self):
# NOTE the save here doesn't seem to be working
# Not sure why. BUT...
view = self.savedView
view.run_command("save")
sublime.set_timeout_async(lambda: self.onSaved(), 100)
def onLoaded(self):
view = self.savedView
self.n.move_cursor_to(view)
eventName = util.RandomString()
evt.Get().once(eventName, self.onEdited)
log.debug("Trying to run: " + self.command)
cmds = self.cmds
cmds["onDone"] = eventName
view.run_command(self.command, cmds)
def Run(self,onDone = None):
self.onDone = onDone
idValue = "TODAY"
file, at = db.Get().FindByCustomId(idValue)
if(file != None and at != None):
node = file.At(at)
if(node):
self.n = node
self.f = file
self.savedView = get_view_for_silent_edit_file(file)
# Give time for the document to be opened.
sublime.set_timeout_async(lambda: self.onLoaded(), 200)
return
else:
log.warning("COULD NOT LOCATE TODAY")
else:
log.warning("Could not locate today")
# Append text to a node
class OrgAppendTextCommand(sublime_plugin.TextCommand):
def run(self, edit, text="", onDone=None):
curNode = db.Get().AtInView(self.view)
if(not curNode):
file = db.Get().FindInfo(self.view)
if(len(file.org) > 0):
curNode = file.org[len(file.org) - 1]
if(not curNode):
level = 1
l = self.view.line(self.view.size())
reg = sublime.Region(l.start(),l.start())
reg = here
else:
level = curNode.level
reg = curNode.region(self.view)
if(level == 0):
level = 1
here = sublime.Region(view.size(),view.size())
else:
here = sublime.Region(reg.end(),reg.end())
self.view.sel().clear()
self.view.sel().add(reg.end() + 1)
#self.view.show(here)
self.view.insert(edit,self.view.sel()[0].begin(),'\n' + (' '*(level*2)) + text)
evt.EmitIf(onDone)
class OrgLinkToTodayCommand(sublime_plugin.TextCommand):
def OnDone(self):
evt.EmitIf(self.onDone)
def InsertLink(self):
self.ed = RunEditingCommandOnToday(self.view, "org_append_text", {'text': self.link})
self.ed.Run(evt.Make(self.OnDone))
def run(self, edit, onDone=None):
self.onDone = onDone
# Schedule this item so it is in the agenda
self.view.run_command("org_schedule", {"dateval": str(datetime.datetime.now())})
# Create a link to the current location so we can insert it in our today item
self.link = orglink.CreateLink(self.view)
curNode = db.Get().AtInView(self.view)
# Should we add a heading to this?
if(curNode and not curNode.is_root()):
self.ed = RunEditingCommandOnToday(self.view, "org_insert_text_as_child_heading", {'heading': curNode.heading})
self.ed.Run(evt.Make(self.InsertLink))
else:
self.InsertLink()
class OrgEnterOnHeadingCommand(sublime_plugin.TextCommand):
def run(self, edit, Indent=1):
indent = (" " * Indent) + " "
self.view.run_command("org_internal_insert", {"location": self.view.sel()[0].begin(), "text": "\n" + indent})
| 41.535861 | 197 | 0.574681 | 31,355 | 0.773453 | 0 | 0 | 0 | 0 | 0 | 0 | 5,601 | 0.138163 |
e221b1914e820e512f676c3a926c07afe30f2d05 | 526 | py | Python | examples/verilator/main.py | splhack/mantle | 42fe81df22f4556f891f77b9c9acfd7e8bbe648b | [
"MIT"
] | 33 | 2018-06-11T22:58:23.000Z | 2021-11-22T04:16:36.000Z | examples/verilator/main.py | splhack/mantle | 42fe81df22f4556f891f77b9c9acfd7e8bbe648b | [
"MIT"
] | 90 | 2017-08-31T01:12:13.000Z | 2021-09-17T04:07:07.000Z | examples/verilator/main.py | splhack/mantle | 42fe81df22f4556f891f77b9c9acfd7e8bbe648b | [
"MIT"
] | 12 | 2017-09-01T03:25:36.000Z | 2020-10-14T21:43:12.000Z | from magma import *
from magma.verilator.verilator import compile as compileverilator
from mantle.verilog.gates import And, XOr
def f(a, b, c):
return (a & b) ^ c
#main = DefineCircuit('main', "a", In(Bit), "b", In(Bit), "c", In(Bit), "d", Out(Bit), 'CLK', In(Bit))
main = DefineCircuit('main', "a", In(Bit), "b", In(Bit), "c", In(Bit), "d", Out(Bit))
t = And(2)(main.a,main.b)
d = XOr(2)(t,main.c)
wire(d,main.d)
EndCircuit()
compile("main", main)
compileverilator('sim_main.cpp', main, f)
#print(testvectors(main))
| 26.3 | 102 | 0.638783 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 165 | 0.313688 |
e222ff90231f967a2d2347ddf5dcea5c451c243b | 3,908 | py | Python | models/model.py | andersro93/School.ICT441.TextGenerator | e2efe2253a3cb24a196358d0074209340503069a | [
"MIT"
] | null | null | null | models/model.py | andersro93/School.ICT441.TextGenerator | e2efe2253a3cb24a196358d0074209340503069a | [
"MIT"
] | null | null | null | models/model.py | andersro93/School.ICT441.TextGenerator | e2efe2253a3cb24a196358d0074209340503069a | [
"MIT"
] | null | null | null | from nt import DirEntry
import os
import self as self
from keras import Model as KerasModel
class Model(object):
"""
Base class for the different models in this project
"""
_files: dict = {}
""" The files that is used to parse the model """
_raw_content: str = ''
""" The files contents as one long string """
_assets_path: str = 'assets_test'
""" Path to the assets to read from """
_model: KerasModel = None
""" The Keras model that is used """
_model_path: str = None
""" The path where to save and use the model from """
_activation_method: str = 'softmax'
""" Activation method to use """
_optimizer: str = 'adam'
""" Optimizer method to use """
_loss_method: str = 'categorical_crossentropy'
""" Loss method to use """
_training_data_encoding: str = 'utf-8'
""" The encoding used on the training data """
def print_model_summary(self) -> self:
"""
Returns a model summary if the model has been created
:return: str
"""
if self._model:
self._model.summary()
return
print('No model has been created yet')
return self
def load_weights(self, weights: str) -> self:
"""
Loads weights from the given weights file
:param weights:
:return: self
"""
if not self._model:
print('No model has been created, please create the model first!')
return
self._model.load_weights(weights)
self.compile_model()
return self
def compile_model(self) -> self:
"""
Compiles the model and runs some optimizer on it
:return: self
"""
if not self._model:
print('No model has been created, please create the model first!')
return
self._model.compile(loss=self._loss_method, optimizer=self._optimizer)
return self
def _read_data_from_assets(self) -> self:
"""
Reads and parses the data from the assets folder into the object itself
:return: self
"""
for directory in os.scandir(self._get_assets_full_path()):
self._parse_directory(directory)
return self
def _concat_assets_content_to_one_string(self) -> self:
"""
Concatenates the contents from all the assets to one string
:return: self
"""
for key, value in self._files.items():
self._raw_content = self._raw_content + value
self._raw_content = self._raw_content.lower()
return self
def _parse_directory(self, directory: DirEntry) -> self:
"""
Recursively parses the given directory and starts to parse any found files
:param directory:
:return: self
"""
entry: DirEntry
for entry in os.scandir(directory):
if entry.is_dir():
self._parse_directory(entry)
else:
self._parse_file(entry)
return self
def _parse_file(self, file: DirEntry) -> self:
"""
Tries to parse the given file and puts it in self._files dictionary
:param file: DirEntry
:return: self
"""
data: str
try:
with open(file.path, 'r', encoding=self._training_data_encoding) as file_reader:
data = file_reader.read()
file_reader.close()
except Exception:
print(f"Unable to parse file: {file.path}")
return
self._files[file.path] = data
return self
def _get_assets_full_path(self) -> str:
"""
Returns a computed full path to the directory where the assets are located as a string
:return: str
"""
return os.path.join(os.path.dirname(os.path.dirname(__file__)), self._assets_path)
| 27.138889 | 94 | 0.58956 | 3,812 | 0.975435 | 0 | 0 | 0 | 0 | 0 | 0 | 1,620 | 0.414534 |
e22361b6b85eb5d012588be5245ff521ec8b6afb | 930 | py | Python | GmailWrapper_JE/venv/Lib/site-packages/requests_oauthlib/compliance_fixes/fitbit.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | GmailWrapper_JE/venv/Lib/site-packages/requests_oauthlib/compliance_fixes/fitbit.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | GmailWrapper_JE/venv/Lib/site-packages/requests_oauthlib/compliance_fixes/fitbit.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | """
The Fitbit API breaks from the OAuth2 RFC standard by returning an "errors"
object list, rather than a single "error" string. This puts hooks in place so
that oauthlib can process an error in the results from access token and refresh
token responses. This is necessary to prevent getting the generic red herring
MissingTokenError.
"""
from json import loads, dumps
from oauthlib.common import to_unicode
def fitbit_compliance_fix(session):
def _missing_error(r):
token = loads(r.text)
if "errors" in token:
# Set the error to the first one we have
token["error"] = token["errors"][0]["errorType"]
r._content = to_unicode(dumps(token)).encode("UTF-8")
return r
session.register_compliance_hook("access_token_response", _missing_error)
session.register_compliance_hook("refresh_token_response", _missing_error)
return session
| 35.769231 | 80 | 0.708602 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 473 | 0.508602 |
e2246947e66e0b1245fc29fca7409f6373480374 | 1,609 | py | Python | merge_intervals.py | KevinLuo41/LeetCodeInPython | 051e1aab9bab17b0d63b4ca73473a7a00899a16a | [
"Apache-2.0"
] | 19 | 2015-01-19T19:36:09.000Z | 2020-03-18T03:10:12.000Z | merge_intervals.py | CodingVault/LeetCodeInPython | 051e1aab9bab17b0d63b4ca73473a7a00899a16a | [
"Apache-2.0"
] | null | null | null | merge_intervals.py | CodingVault/LeetCodeInPython | 051e1aab9bab17b0d63b4ca73473a7a00899a16a | [
"Apache-2.0"
] | 12 | 2015-04-25T14:20:38.000Z | 2020-09-27T04:59:59.000Z | #!/usr/bin/env python
# encoding: utf-8
"""
merge_intervals.py
Created by Shengwei on 2014-07-07.
"""
# https://oj.leetcode.com/problems/merge-intervals/
# tags: medium, array, interval
"""
Given a collection of intervals, merge all overlapping intervals.
For example,
Given [1,3],[2,6],[8,10],[15,18],
return [1,6],[8,10],[15,18].
"""
# https://gist.github.com/senvey/772e0afd345934cee475
# Definition for an interval.
# class Interval:
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
class Solution:
# @param intervals, a list of Interval
# @return a list of Interval
def merge(self, intervals):
if intervals is None or len(intervals) <= 1:
return intervals
result = []
intervals = sorted(intervals, key=lambda i: i.start)
# this step must be after the intervals are sorted
start, end = intervals[0].start, intervals[0].end
for interval in intervals:
n_start, n_end = interval.start, interval.end
# becasue the intervals are sorted now, start
# must be less than or equal to n_start, so
# if n_start <= end, it starts in [start, end]
if n_start <= end:
if n_end >= end:
end = n_end
# if n_end < end: pass
else:
result.append(Interval(start, end))
start, end = n_start, n_end
# IMPORTANT! remember to post-process
result.append(Interval(start, end))
return result
| 27.741379 | 65 | 0.573027 | 1,079 | 0.670603 | 0 | 0 | 0 | 0 | 0 | 0 | 821 | 0.510255 |
e226e53adb49fe2e348b71d72b9e0931ef0a41cb | 3,495 | py | Python | pesto-orm/pesto_orm/dialect/mysql/domain.py | Dreampie/pesto | 3d48637c61396cdc3a5427b8f51378c06c2473d7 | [
"Apache-2.0"
] | 13 | 2020-05-22T09:32:48.000Z | 2021-09-09T09:57:43.000Z | pesto-orm/pesto_orm/dialect/mysql/domain.py | Dreampie/pesto | 3d48637c61396cdc3a5427b8f51378c06c2473d7 | [
"Apache-2.0"
] | null | null | null | pesto-orm/pesto_orm/dialect/mysql/domain.py | Dreampie/pesto | 3d48637c61396cdc3a5427b8f51378c06c2473d7 | [
"Apache-2.0"
] | 2 | 2020-05-25T18:05:20.000Z | 2020-05-25T18:50:22.000Z | # coding=utf-8
import re
import traceback
from pesto_common.config.configer import Configer
from pesto_common.log.logger_factory import LoggerFactory
from pesto_orm.core.base import db_config
from pesto_orm.core.executor import ExecutorFactory
from pesto_orm.core.model import BaseModel
from pesto_orm.core.repository import BaseRepository
from pesto_orm.dialect.base import DefaultDialect
logger = LoggerFactory.get_logger('dialect.mysql.domain')
class MySQLDialect(DefaultDialect):
def get_db_type(self):
return 'mysql'
def paginate_with(self, sql, page_number, page_size):
if page_number == 1 and page_size == 1:
if re.match(DefaultDialect.select_single_pattern, sql) is not None:
return sql
offset = page_size * (page_number - 1)
return '%s LIMIT %d OFFSET %d' % (sql, page_size, offset)
db_type = Configer.get('db.type')
if db_type == 'mysql':
import mysql.connector as connector
db_config['target'] = connector
db_config['use_pure'] = True
from mysql.connector.conversion import MySQLConverter
class NumpyMySQLConverter(MySQLConverter):
''' A mysql.connector Converter that handles Numpy types '''
def _float32_to_mysql(self, value):
return float(value)
def _float64_to_mysql(self, value):
return float(value)
def _int32_to_mysql(self, value):
return int(value)
def _int64_to_mysql(self, value):
return int(value)
db_config['converter_class'] = NumpyMySQLConverter
mysqlExecutor = ExecutorFactory.get_executor(db_config=db_config)
mysqlDialect = MySQLDialect()
class MysqlBaseModel(BaseModel):
def __init__(self, db_name=None, table_name=None, table_alias=None, primary_key='id'):
super(MysqlBaseModel, self).__init__(db_name, table_name, table_alias, primary_key)
def get_dialect(self):
return mysqlDialect
def get_executor(self):
return mysqlExecutor
class MysqlBaseRepository(BaseRepository):
def __init__(self, model_class=None):
super(MysqlBaseRepository, self).__init__(model_class)
def get_dialect(self):
return mysqlDialect
def get_executor(self):
return mysqlExecutor
def transaction(rollback_exceptions=[]):
def wrap(func):
def handle(result, **kwargs): # 真实执行原方法.
func = kwargs['func']
args = kwargs['args']
kwargs = kwargs['kwargs']
return_value = func(*args, **kwargs)
logger.info('Transaction method: ' + func.__name__)
result.append(return_value)
def to_do(*args, **kwargs):
new_kwargs = {'func': func, 'args': args, 'kwargs': kwargs}
result = []
try:
mysqlExecutor.begin_transaction()
handle(result, **new_kwargs)
mysqlExecutor.commit_transaction()
except Exception as e:
if len(rollback_exceptions) == 0 or e.__class__ in rollback_exceptions:
mysqlExecutor.rollback_transaction()
logger.error('Method execute error. method: ' + str(func.__name__) + ', error:' + traceback.format_exc() + ', transaction roll back.')
else:
mysqlExecutor.commit_transaction()
raise e
finally:
mysqlExecutor.close_transaction()
return to_do
return wrap
| 29.369748 | 155 | 0.650358 | 1,417 | 0.403819 | 0 | 0 | 0 | 0 | 0 | 0 | 336 | 0.095754 |
e2280586bc7cd2b0488f23077c0f84da82148064 | 10,454 | py | Python | surround/tests/experiment_reader_test.py | ScottyB/surround | f9a86f5b5677de7bd5763d26de4a6f18c36f2a4d | [
"BSD-3-Clause"
] | null | null | null | surround/tests/experiment_reader_test.py | ScottyB/surround | f9a86f5b5677de7bd5763d26de4a6f18c36f2a4d | [
"BSD-3-Clause"
] | null | null | null | surround/tests/experiment_reader_test.py | ScottyB/surround | f9a86f5b5677de7bd5763d26de4a6f18c36f2a4d | [
"BSD-3-Clause"
] | null | null | null | import os
import re
import unittest
import shutil
import subprocess
import logging
import zipfile
from io import BytesIO
from ..experiment.file_storage_driver import FileStorageDriver
from ..experiment.experiment_reader import ExperimentReader
from ..experiment.experiment_writer import ExperimentWriter
class ExperimentReaderTest(unittest.TestCase):
def setUp(self):
writer = ExperimentWriter(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
writer.write_project("test_project", "test_description")
writer.write_project("test_project_two", "test_description_2")
process = subprocess.Popen(['surround', 'init', '-p', 'test_project', '-d', 'test_description', '-w', 'no'], cwd="temporary", stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.wait()
process.stdout.close()
process.stderr.close()
process = subprocess.Popen(['surround', 'init', '-p', 'test_project_two', '-d', 'test_description', '-w', 'no'], cwd="temporary", stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.wait()
process.stdout.close()
process.stderr.close()
os.makedirs(os.path.join("temporary", "test_project", "models"))
with open("temporary/test_project/models/test.model", "w+") as f:
f.write("WEIGHTS")
os.makedirs(os.path.join("temporary", "test_project_two", "models"))
with open("temporary/test_project_two/models/test.model", "w+") as f:
f.write("WEIGHTS")
writer.start_experiment("test_project", "temporary/test_project", args={'mode': 'batch'}, notes=['test', 'note'])
logging.info("test_log")
writer.stop_experiment(metrics={'test_metric': 0.1})
writer.start_experiment("test_project", "temporary/test_project", args={'mode': 'batch'}, notes=['test', 'note'])
logging.info("test_log")
writer.stop_experiment(metrics={'test_metric': 0.2})
writer.start_experiment("test_project_two", "temporary/test_project_two", args={'mode': 'batch'}, notes=['test', 'note'])
logging.info("test_log")
writer.stop_experiment(metrics={'test_metric': 0.2})
writer.start_experiment("test_project_two", "temporary/test_project_two", args={'mode': 'batch'}, notes=['test', 'note'])
writer.stop_experiment(metrics={'test_metric': 0.3})
self.folder_names = os.listdir("temporary/experiments/experimentation/test_project/experiments")
self.folder_names_2 = os.listdir("temporary/experiments/experimentation/test_project_two/experiments")
self.folder_names = sorted(self.folder_names)
self.folder_names_2 = sorted(self.folder_names_2)
def tearDown(self):
shutil.rmtree("temporary")
def test_get_projects(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
projects = reader.get_projects()
self.assertIsInstance(projects, list)
expected_names = ["test_project", "test_project_two"]
expected_descs = ["test_description", "test_description_2"]
for proj in projects:
self.assertIn("project_name", proj)
self.assertIn("project_description", proj)
self.assertIn("last_time_updated", proj)
self.assertIn(proj['project_name'], expected_names)
self.assertIn(proj['project_description'], expected_descs)
self.assertRegex(proj['last_time_updated'], r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{6}$")
def test_get_project(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
project = reader.get_project("test_project")
self.assertEqual("test_project", project["project_name"])
self.assertEqual("test_description", project["project_description"])
self.assertRegex(project['last_time_updated'], r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{6}$")
project = reader.get_project("doesnt exist")
self.assertIsNone(project)
def test_has_project(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
self.assertTrue(reader.has_project("test_project"))
self.assertTrue(reader.has_project("test_project_two"))
self.assertFalse(reader.has_project("non-exist"))
def test_get_experiment(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
exp = reader.get_experiment("test_project", self.folder_names[0])
self.assertIn("execution_info", exp)
self.assertIn("logs", exp)
self.assertIn("results", exp)
self.assertEqual(exp["execution_info"]["start_time"], self.folder_names[0])
self.assertIn("INFO:root:test_log", exp["logs"][0])
self.assertEqual(exp["results"]["start_time"], self.folder_names[0])
def test_get_experiments(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
exps = reader.get_experiments("test_project")
for exp in exps:
self.assertIn(exp['execution_info']['start_time'], self.folder_names + self.folder_names_2)
self.assertIn("INFO:root:test_log", exp["logs"][0])
self.assertIn(exp['results']['start_time'], self.folder_names + self.folder_names_2)
def test_has_experiment(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
for name in self.folder_names:
self.assertTrue(reader.has_experiment("test_project", name))
for name in self.folder_names_2:
self.assertTrue(reader.has_experiment("test_project_two", name))
self.assertFalse(reader.has_experiment("non-exist", "non-exists"))
self.assertFalse(reader.has_experiment("test_project", "non-exists"))
self.assertFalse(reader.has_experiment("test_project_two", "non-exists"))
def test_get_experiment_files(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
expected_files = [
"code.zip",
"results.json",
"execution_info.json",
"log.txt"
]
for name in self.folder_names:
files = reader.get_experiment_files("test_project", name)
for f in expected_files:
self.assertIn(f, files)
for name in self.folder_names_2:
files = reader.get_experiment_files("test_project_two", name)
for f in expected_files:
self.assertIn(f, files)
def test_get_cache_files(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
files = reader.get_project_cache("test_project")
for f in files:
self.assertRegex(f, r"^model-[T0-9\-]{26}-[a-z0-9]+\.zip$")
def test_pull_experiment_files(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
log = reader.pull_experiment_file("test_project", self.folder_names[0], "log.txt")
self.assertIsNotNone(log)
log = log.decode('utf-8')
self.assertIn("INFO:root:test_log", log.rstrip())
log = reader.pull_experiment_file("test_project_two", self.folder_names_2[0], "log.txt")
self.assertIsNotNone(log)
log = log.decode('utf-8')
self.assertIn("INFO:root:test_log", log.rstrip())
def test_pull_cache_files(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
cache_files = reader.get_project_cache("test_project")
self.assertGreater(len(cache_files), 0)
model = reader.pull_cache_file("test_project", cache_files[0])
self.assertIsNotNone(model)
with open("temporary/model.zip", "wb+") as f:
f.write(model)
with zipfile.ZipFile("temporary/model.zip", "r") as f:
model_file = f.read("models/test.model")
model_file = model_file.decode('utf-8')
self.assertEqual("WEIGHTS", model_file)
def test_pull_model(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
cache_files = reader.get_project_cache("test_project")
self.assertGreater(len(cache_files), 0)
self.assertRegex(cache_files[0], r"^model.+-(.+)\.zip$")
expected_file = reader.pull_cache_file("test_project", cache_files[0])
model_hash = re.match(r"^model.+-(.+)\.zip$", cache_files[0]).group(1)
pulled_file = reader.pull_model("test_project", model_hash)
self.assertIsNotNone(pulled_file)
self.assertEqual(expected_file, pulled_file)
def test_replicate_file(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
output = reader.replicate("test_project", self.folder_names[0], file_path="temporary/replication")
self.assertEqual(output, "temporary/replication")
expected_files = [
'.surround',
'models/test.model',
'test_project/',
'test_project/stages/baseline.py',
'dodo.py',
]
self.assertTrue(os.path.exists("temporary/replication"))
for expected in expected_files:
self.assertTrue(os.path.exists(os.path.join("temporary/replication", expected)))
with open("temporary/replication/models/test.model", "r") as f:
self.assertEqual("WEIGHTS", f.read())
def test_replicate_zip(self):
reader = ExperimentReader(storage_url="temporary/experiments", storage_driver=FileStorageDriver)
output_zip = reader.replicate("test_project", self.folder_names[0])
output_zip = BytesIO(output_zip)
self.assertIsNotNone(output_zip)
expected_files = [
'.surround/config.yaml',
'models/test.model',
'test_project/__main__.py',
'test_project/stages/baseline.py',
'dodo.py',
]
with zipfile.ZipFile(output_zip, "r") as f:
for expected in expected_files:
self.assertIn(expected, f.namelist())
| 41.816 | 185 | 0.668261 | 10,146 | 0.970538 | 0 | 0 | 0 | 0 | 0 | 0 | 2,828 | 0.270518 |
e228a846a1a3922a30fabd5f090ee4a98bd3e4e7 | 1,806 | py | Python | quickmail/commands/clear.py | avikumar15/quick-mail-cli | 4282fb62794671de89d5d8efc3697b0a0d540230 | [
"MIT"
] | 7 | 2021-03-20T06:22:44.000Z | 2021-11-03T14:43:18.000Z | quickmail/commands/clear.py | avikumar15/quick-email-cli | 4282fb62794671de89d5d8efc3697b0a0d540230 | [
"MIT"
] | 2 | 2021-11-04T03:21:57.000Z | 2021-11-04T03:24:06.000Z | quickmail/commands/clear.py | avikumar15/quick-mail-cli | 4282fb62794671de89d5d8efc3697b0a0d540230 | [
"MIT"
] | null | null | null | from __future__ import print_function
import os
from argparse import ArgumentParser, Namespace
from zope.interface import implementer
from quickmail.commands import ICommand
from quickmail.utils.misc import quick_mail_dir, heavy_tick, quick_mail_template_dir
@implementer(ICommand)
class ClearCommand:
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('-j',
'--justdoit',
action='store_true',
help='clear storage including the credentials and token')
parser.description = 'Use the clear command to clear all email body that are saved in your home directories. ' \
'Additionally, pass --justdoit to remove the credential files as well'
def run_command(self, args: Namespace):
if not os.path.exists(quick_mail_dir):
print('Storage already is empty ' + heavy_tick)
return
if args.justdoit:
saved_files = [file for file in os.listdir(quick_mail_dir) if (file.endswith('.json') or file.endswith('.pickle'))]
for file in saved_files:
os.remove(quick_mail_dir + '/' + file)
else:
saved_files = [file for file in os.listdir(quick_mail_dir) if file.endswith('.txt')]
for file in saved_files:
os.remove(quick_mail_dir + '/' + file)
saved_files = [file for file in os.listdir(quick_mail_template_dir) if file.endswith('.txt')]
for file in saved_files:
os.remove(quick_mail_template_dir + file)
print('Storage cleared ' + heavy_tick + heavy_tick)
def get_desc(self) -> str:
return 'clear the body of message from local or even the token if --justdoit argument is added'
| 43 | 127 | 0.643965 | 1,520 | 0.841639 | 0 | 0 | 1,543 | 0.854374 | 0 | 0 | 405 | 0.224252 |
e22911017f694984b51868ace74cab9c0ac7a861 | 1,247 | py | Python | python/int_questions/intersection.py | kellyjovan/algoholics-anon | 0aec27a3cb8b54a60348c7c6f82f251355f7d698 | [
"MIT"
] | 17 | 2019-06-07T16:50:18.000Z | 2021-12-27T16:40:13.000Z | python/cracking-coding/linked_list/intersection.py | ChristianEdwardPadilla/algoholics-anon | ee95348227caceffc621ef0962db7e81a2efd996 | [
"MIT"
] | 5 | 2020-01-31T16:01:49.000Z | 2021-10-20T07:13:58.000Z | python/cracking-coding/linked_list/intersection.py | ChristianEdwardPadilla/algoholics-anon | ee95348227caceffc621ef0962db7e81a2efd996 | [
"MIT"
] | 9 | 2019-06-04T14:33:42.000Z | 2020-08-14T13:14:19.000Z | class Node:
def __init__(self, value):
self.value = value
self.next = None
# Have no idea how to do this
# import sys
# sys.path.insert(0, '../../data_structures')
# import node
def intersection(l1: Node, l2: Node) -> Node:
l1_end, len1 = get_tail(l1)
l2_end, len2 = get_tail(l2)
if l1_end != l2_end:
return None
if len1 > len2:
l1 = move_head(l1, len1 - len2)
else:
l2 = move_head(l2, len2 - len1)
while l1 != l2:
l1 = l1.next
l2 = l2.next
print(l1.value, l2.value)
return l1
def move_head(head: Node, pos: int) -> Node:
current = head
while pos > 0:
current = current.next
pos -= 1
return current
def get_tail(head: Node) -> (Node, int):
current = head
length = 0
while not current.next == None:
current = current.next
length += 1
return (current, length)
inter = Node('c')
inter.next = Node('a')
inter.next.next = Node('r')
l1 = Node('r')
l1.next = Node('a')
l1.next.next = Node('c')
l1.next.next.next = Node('e')
l1.next.next.next.next = inter
l2 = Node('r')
l2.next = Node('e')
l2.next.next = Node('d')
l2.next.next.next = inter
res = intersection(l1, l2)
print(res.value) | 19.793651 | 45 | 0.580593 | 94 | 0.075381 | 0 | 0 | 0 | 0 | 0 | 0 | 129 | 0.103448 |
e22a7e102c77cfb0c69aac520a8607ca2b2c88b3 | 550 | py | Python | bank_bot/tests/conftest.py | Tengro/larp_bankbot | 22d5ea49d5f507da74fb3b1f106c24ad52cb9e68 | [
"MIT"
] | 3 | 2019-07-27T15:20:49.000Z | 2019-10-14T13:10:55.000Z | bank_bot/tests/conftest.py | Tengro/larp_bankbot | 22d5ea49d5f507da74fb3b1f106c24ad52cb9e68 | [
"MIT"
] | 1 | 2021-06-01T23:55:12.000Z | 2021-06-01T23:55:12.000Z | bank_bot/tests/conftest.py | Tengro/larp_bankbot | 22d5ea49d5f507da74fb3b1f106c24ad52cb9e68 | [
"MIT"
] | null | null | null | import os
import pytest
from bank_bot.banking_system import Database
class MockMessage(object):
def __init__(self, from_who, chat_id, message_text):
self.json = {"from": {"id": from_who}, "chat": {"id": chat_id}}
self.text = message_text
@pytest.fixture
def database():
test_file_path = "test_database.db"
database = Database(test_file_path)
database.initialize_system()
yield database
os.remove(test_file_path)
@pytest.fixture
def mock_message():
message = MockMessage(2, 2, "Mock")
return message
| 25 | 71 | 0.703636 | 188 | 0.341818 | 177 | 0.321818 | 287 | 0.521818 | 0 | 0 | 44 | 0.08 |
e22cdecef594a7a4d01026c734e302cfb7902186 | 673 | py | Python | django/users/migrations/0004_auto_20160408_1032.py | BD2KGenomics/brca-website | 243bee560d5714f7cf5d98d06c83be345f1a11b4 | [
"Apache-2.0"
] | 5 | 2016-01-12T01:29:50.000Z | 2017-03-10T08:34:52.000Z | django/users/migrations/0004_auto_20160408_1032.py | BD2KGenomics/brca-website-deprecated | 243bee560d5714f7cf5d98d06c83be345f1a11b4 | [
"Apache-2.0"
] | 141 | 2015-08-06T18:51:37.000Z | 2017-04-03T20:41:30.000Z | django/users/migrations/0004_auto_20160408_1032.py | BD2KGenomics/brca-website-deprecated | 243bee560d5714f7cf5d98d06c83be345f1a11b4 | [
"Apache-2.0"
] | 8 | 2015-08-08T00:32:18.000Z | 2016-07-29T16:05:44.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-08 10:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('users', '0003_myuser_has_image'),
]
operations = [
migrations.AddField(
model_name='myuser',
name='activation_key',
field=models.CharField(blank=True, max_length=40),
),
migrations.AddField(
model_name='myuser',
name='key_expires',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
| 24.925926 | 74 | 0.616642 | 487 | 0.723626 | 0 | 0 | 0 | 0 | 0 | 0 | 145 | 0.215453 |
e22ed24dd1982b59d460dc15a81e37cb147cdf17 | 582 | py | Python | src/page_object_pattern/test_template.py | paulbodean88/automation-design-patterns | b160f317a0c0a1de409908f938fbeab0772c8147 | [
"MIT"
] | 14 | 2017-07-25T10:11:06.000Z | 2022-03-25T10:17:25.000Z | src/page_object_pattern/test_template.py | paulbodean88/automation-design-patterns | b160f317a0c0a1de409908f938fbeab0772c8147 | [
"MIT"
] | 3 | 2017-07-23T17:19:14.000Z | 2017-07-24T19:54:52.000Z | src/page_object_pattern/test_template.py | paulbodean88/automation-design-patterns | b160f317a0c0a1de409908f938fbeab0772c8147 | [
"MIT"
] | 5 | 2019-08-29T02:35:04.000Z | 2020-02-24T14:39:09.000Z | """
Description:
- Test Template class.
Methods:
- test setup
- test teardown
- test implementation
@author: Paul Bodean
@date: 26/12/2017
"""
import unittest
from selenium import webdriver
class TestTemplate(unittest.TestCase):
def setUp(self):
"""
Open the page to be tested
:return: the driver implementation
"""
self.driver = webdriver.Chrome()
self.driver.get("https://en.wikipedia.org/wiki/Main_Page")
def tearDown(self):
"""
Quit the browser
"""
self.driver.quit()
| 18.1875 | 66 | 0.603093 | 371 | 0.637457 | 0 | 0 | 0 | 0 | 0 | 0 | 333 | 0.572165 |
e22fce042441cdd77adbcb54853ba9d70a939d7d | 14,940 | py | Python | gmacpyutil/gmacpyutil/profiles_test.py | rgayon/macops | 1181ca269c9ae3235c1e9e7ae1bad4755b33c299 | [
"Apache-2.0"
] | 758 | 2015-01-05T19:48:20.000Z | 2022-02-18T10:44:52.000Z | gmacpyutil/gmacpyutil/profiles_test.py | rgayon/macops | 1181ca269c9ae3235c1e9e7ae1bad4755b33c299 | [
"Apache-2.0"
] | 161 | 2015-04-17T21:15:42.000Z | 2019-05-27T03:05:19.000Z | gmacpyutil/gmacpyutil/profiles_test.py | rgayon/macops | 1181ca269c9ae3235c1e9e7ae1bad4755b33c299 | [
"Apache-2.0"
] | 106 | 2015-01-20T21:21:00.000Z | 2022-03-04T00:15:41.000Z | """Tests for profiles module."""
import mock
from google.apputils import basetest
import profiles
class ProfilesModuleTest(basetest.TestCase):
def testGenerateUUID(self):
self.assertIsInstance(profiles.GenerateUUID('a'), str)
self.assertTrue(profiles.GenerateUUID('a').isupper())
self.assertEqual(profiles.GenerateUUID('a'),
profiles.GenerateUUID('a'))
def testValidatePayload(self):
payload = {}
with self.assertRaises(profiles.PayloadValidationError):
profiles.ValidatePayload(payload)
payload.update({profiles.PAYLOADKEYS_IDENTIFIER: 'a',
profiles.PAYLOADKEYS_DISPLAYNAME: 'a',
profiles.PAYLOADKEYS_TYPE: 'com.apple.welcome.to.1984'})
profiles.ValidatePayload(payload)
self.assertEqual(payload.get(profiles.PAYLOADKEYS_UUID),
profiles.GenerateUUID('a'))
self.assertEqual(payload.get(profiles.PAYLOADKEYS_ENABLED), True)
self.assertEqual(payload.get(profiles.PAYLOADKEYS_VERSION), 1)
class ProfileClassTest(basetest.TestCase):
"""Tests for the Profile class."""
def _GetValidProfile(self, include_payload=True):
profile = profiles.Profile()
profile.Set(profiles.PAYLOADKEYS_DISPLAYNAME, 'Acme Corp Config Profile')
profile.Set(profiles.PAYLOADKEYS_IDENTIFIER, 'com.acme.configprofile')
profile.Set(profiles.PAYLOADKEYS_ORG, 'Acme Corp')
profile.Set(profiles.PAYLOADKEYS_SCOPE, ['System', 'User'])
profile.Set(profiles.PAYLOADKEYS_TYPE, 'Configuration')
if include_payload:
profile.AddPayload(self._GetValidPayload())
return profile
def _GetValidPayload(self):
test_payload = {profiles.PAYLOADKEYS_IDENTIFIER: 'com.test.payload',
profiles.PAYLOADKEYS_DISPLAYNAME: 'Test Payload',
profiles.PAYLOADKEYS_TYPE: 'com.apple.welcome.to.1984'}
return test_payload
def testInit(self):
"""Test the __init__ method."""
profile = profiles.Profile()
self.assertIsNotNone(profile._profile)
self.assertEqual(profile._profile[profiles.PAYLOADKEYS_CONTENT], [])
def testGet(self):
profile = profiles.Profile()
profile._profile['TestKey'] = 'TestValue'
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_CONTENT), [])
self.assertEqual(profile.Get('TestKey'), 'TestValue')
def testSet(self):
profile = profiles.Profile()
profile.Set('TestKey', 'TestValue')
profile.Set('OtherKey', 'OtherValue')
self.assertEqual(profile._profile['TestKey'], 'TestValue')
self.assertEqual(profile._profile['OtherKey'], 'OtherValue')
def testStr(self):
profile = self._GetValidProfile()
self.assertEqual(profile.__str__(), 'Acme Corp Config Profile')
def testAddPayload(self):
profile = self._GetValidProfile(include_payload=False)
test_payload = self._GetValidPayload()
with self.assertRaises(profiles.PayloadValidationError):
profile.AddPayload('Payloads should be dicts')
profile.AddPayload(test_payload)
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_CONTENT), [test_payload])
def testValidateProfile(self):
profile = profiles.Profile()
with self.assertRaises(profiles.ProfileValidationError):
profile._ValidateProfile()
profile = self._GetValidProfile(include_payload=False)
with self.assertRaises(profiles.ProfileValidationError):
profile._ValidateProfile()
profile.AddPayload(self._GetValidPayload())
profile._ValidateProfile()
self.assertIsNotNone(profile.Get(profiles.PAYLOADKEYS_UUID))
self.assertIsNotNone(profile.Get(profiles.PAYLOADKEYS_VERSION))
@mock.patch.object(profiles.plistlib, 'writePlist')
def testSaveSuccess(self, mock_writeplist):
profile = self._GetValidProfile()
profile.Save('/tmp/hello')
mock_writeplist.assert_called_once_with(profile._profile, '/tmp/hello')
@mock.patch.object(profiles.plistlib, 'writePlist')
def testSaveIOError(self, mock_writeplist):
profile = self._GetValidProfile()
mock_writeplist.side_effect = IOError
with self.assertRaises(profiles.ProfileSaveError):
profile.Save('/tmp/hello')
mock_writeplist.assert_called_once_with(profile._profile, '/tmp/hello')
@mock.patch.object(profiles.gmacpyutil, 'RunProcess')
@mock.patch.object(profiles.Profile, 'Save')
def testInstallSuccess(self, mock_save, mock_runprocess):
profile = self._GetValidProfile()
mock_runprocess.return_value = ['Output', None, 0]
profile.Install()
mock_save.assert_called_once_with(mock.ANY)
mock_runprocess.assert_called_once_with(
[profiles.CMD_PROFILES, '-I', '-F', mock.ANY],
sudo=None, sudo_password=None)
@mock.patch.object(profiles.gmacpyutil, 'RunProcess')
@mock.patch.object(profiles.Profile, 'Save')
def testInstallSudoPassword(self, mock_save, mock_runprocess):
profile = self._GetValidProfile()
mock_runprocess.return_value = ['Output', None, 0]
profile.Install(sudo_password='ladygagaeatssocks')
mock_save.assert_called_once_with(mock.ANY)
mock_runprocess.assert_called_once_with(
[profiles.CMD_PROFILES, '-I', '-F', mock.ANY],
sudo='ladygagaeatssocks', sudo_password='ladygagaeatssocks')
@mock.patch.object(profiles.gmacpyutil, 'RunProcess')
@mock.patch.object(profiles.Profile, 'Save')
def testInstallCommandFail(self, mock_save, mock_runprocess):
profile = self._GetValidProfile()
mock_runprocess.return_value = ['Output', 'Errors', 42]
with self.assertRaisesRegexp(profiles.ProfileInstallationError,
'Profile installation failed!\n'
'Output, Errors, 42'):
profile.Install(sudo_password='ladygagaeatssocks')
mock_save.assert_called_once_with(mock.ANY)
mock_runprocess.assert_called_once_with(
[profiles.CMD_PROFILES, '-I', '-F', mock.ANY],
sudo='ladygagaeatssocks', sudo_password='ladygagaeatssocks')
@mock.patch.object(profiles.gmacpyutil, 'RunProcess')
@mock.patch.object(profiles.Profile, 'Save')
def testInstallCommandException(self, mock_save, mock_runprocess):
profile = self._GetValidProfile()
mock_runprocess.side_effect = profiles.gmacpyutil.GmacpyutilException
with self.assertRaisesRegexp(profiles.ProfileInstallationError,
'Profile installation failed!\n'):
profile.Install(sudo_password='ladygagaeatssocks')
mock_save.assert_called_once_with(mock.ANY)
mock_runprocess.assert_called_once_with(
[profiles.CMD_PROFILES, '-I', '-F', mock.ANY],
sudo='ladygagaeatssocks', sudo_password='ladygagaeatssocks')
class NetworkProfileClassTest(basetest.TestCase):
"""Tests for the NetworkProfile class."""
def testInit(self):
profile = profiles.NetworkProfile('testuser')
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_DISPLAYNAME),
'Network Profile (testuser)')
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_DESCRIPTION),
'Network authentication settings')
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_IDENTIFIER),
'com.megacorp.networkprofile')
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_SCOPE),
['System', 'User'])
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_TYPE), 'Configuration')
self.assertEqual(profile.Get(profiles.PAYLOADKEYS_CONTENT), [])
def testGenerateID(self):
profile = profiles.NetworkProfile('testuser')
self.assertEqual(profile._GenerateID('test_suffix'),
'com.megacorp.networkprofile.test_suffix')
self.assertEqual(profile._GenerateID('another_suffix'),
'com.megacorp.networkprofile.another_suffix')
@mock.patch.object(profiles.NetworkProfile, 'AddPayload')
@mock.patch.object(profiles.crypto, 'load_privatekey')
@mock.patch.object(profiles.crypto, 'load_certificate')
@mock.patch.object(profiles.crypto, 'PKCS12Type')
@mock.patch.object(profiles.certs, 'Certificate')
def testAddMachineCertificateSuccess(self, mock_certificate, mock_pkcs12,
mock_loadcert, mock_loadkey,
mock_addpayload):
mock_certobj = mock.MagicMock()
mock_certobj.subject_cn = 'My Cert Subject'
mock_certobj.osx_fingerprint = '0011223344556677889900'
mock_certificate.return_value = mock_certobj
mock_pkcs12obj = mock.MagicMock()
mock_pkcs12obj.export.return_value = '-----PKCS12 Data-----'
mock_pkcs12.return_value = mock_pkcs12obj
mock_loadcert.return_value = 'certobj'
mock_loadkey.return_value = 'keyobj'
profile = profiles.NetworkProfile('testuser')
profile.AddMachineCertificate('fakecert', 'fakekey')
mock_pkcs12.assert_called_once_with()
mock_pkcs12obj.set_certificate.assert_called_once_with('certobj')
mock_pkcs12obj.set_privatekey.assert_called_once_with('keyobj')
mock_pkcs12obj.export.assert_called_once_with('0011223344556677889900')
mock_loadcert.assert_called_once_with(1, 'fakecert')
mock_loadkey.assert_called_once_with(1, 'fakekey')
mock_addpayload.assert_called_once_with(
{profiles.PAYLOADKEYS_IDENTIFIER:
'com.megacorp.networkprofile.machine_cert',
profiles.PAYLOADKEYS_TYPE: 'com.apple.security.pkcs12',
profiles.PAYLOADKEYS_DISPLAYNAME: 'My Cert Subject',
profiles.PAYLOADKEYS_ENABLED: True,
profiles.PAYLOADKEYS_VERSION: 1,
profiles.PAYLOADKEYS_CONTENT: profiles.plistlib.Data(
'-----PKCS12 Data-----'),
profiles.PAYLOADKEYS_UUID: mock.ANY,
'Password': '0011223344556677889900'})
@mock.patch.object(profiles.crypto, 'load_privatekey')
@mock.patch.object(profiles.crypto, 'load_certificate')
@mock.patch.object(profiles.crypto, 'PKCS12Type')
@mock.patch.object(profiles.certs, 'Certificate')
def testAddMachineCertificateInvalidKey(self, mock_certificate, mock_pkcs12,
mock_loadcert, mock_loadkey):
mock_certobj = mock.MagicMock()
mock_certobj.subject_cn = 'My Cert Subject'
mock_certobj.osx_fingerprint = '0011223344556677889900'
mock_certificate.return_value = mock_certobj
mock_pkcs12obj = mock.MagicMock()
mock_pkcs12obj.export.side_effect = profiles.crypto.Error
mock_pkcs12.return_value = mock_pkcs12obj
mock_loadcert.return_value = 'certobj'
mock_loadkey.return_value = 'keyobj_from_different_cert'
profile = profiles.NetworkProfile('testuser')
with self.assertRaises(profiles.CertificateError):
profile.AddMachineCertificate('fakecert', 'otherfakekey')
@mock.patch.object(profiles.certs, 'Certificate')
def testAddMachineCertificateBadCert(self, mock_certificate):
mock_certificate.side_effect = profiles.certs.CertError
profile = profiles.NetworkProfile('testuser')
with self.assertRaises(profiles.CertificateError):
profile.AddMachineCertificate('fakecert', 'fakekey')
@mock.patch.object(profiles.NetworkProfile, 'AddPayload')
@mock.patch.object(profiles.certs, 'Certificate')
def testAddAnchorCertificateSuccess(self, mock_certificate, mock_addpayload):
mock_certobj = mock.MagicMock()
mock_certobj.subject_cn = 'My Cert Subject'
mock_certobj.osx_fingerprint = '0011223344556677889900'
mock_certificate.return_value = mock_certobj
profile = profiles.NetworkProfile('testuser')
profile.AddAnchorCertificate('my_cert')
mock_certificate.assert_called_once_with('my_cert')
mock_addpayload.assert_called_once_with(
{profiles.PAYLOADKEYS_IDENTIFIER:
'com.megacorp.networkprofile.0011223344556677889900',
profiles.PAYLOADKEYS_TYPE: 'com.apple.security.pkcs1',
profiles.PAYLOADKEYS_DISPLAYNAME: 'My Cert Subject',
profiles.PAYLOADKEYS_CONTENT: profiles.plistlib.Data('my_cert'),
profiles.PAYLOADKEYS_ENABLED: True,
profiles.PAYLOADKEYS_VERSION: 1,
profiles.PAYLOADKEYS_UUID: mock.ANY})
@mock.patch.object(profiles.certs, 'Certificate')
def testAddAnchorCertificateBadCert(self, mock_certificate):
mock_certificate.side_effect = profiles.certs.CertError
profile = profiles.NetworkProfile('testuser')
with self.assertRaises(profiles.CertificateError):
profile.AddAnchorCertificate('test_cert')
@mock.patch.object(profiles.NetworkProfile, 'AddPayload')
def testAddNetworkPayloadSSID(self, mock_addpayload):
profile = profiles.NetworkProfile('test_user')
profile._auth_cert = '00000000-AUTH-CERT-UUID-00000000'
profile._anchor_certs = ['00000000-ANCH-ORCE-RTUU-ID000000']
profile.AddTrustedServer('radius.company.com')
profile.AddNetworkPayload('SSID')
eap_client_data = {'AcceptEAPTypes': [13],
'PayloadCertificateAnchorUUID':
['00000000-ANCH-ORCE-RTUU-ID000000'],
'TLSTrustedServerNames':
['radius.company.com'],
'TLSAllowTrustExceptions': False}
mock_addpayload.assert_called_once_with(
{'AutoJoin': True,
'SetupModes': ['System', 'User'],
'PayloadCertificateUUID': '00000000-AUTH-CERT-UUID-00000000',
'EncryptionType': 'WPA',
'Interface': 'BuiltInWireless',
profiles.PAYLOADKEYS_DISPLAYNAME: 'SSID',
profiles.PAYLOADKEYS_IDENTIFIER:
'com.megacorp.networkprofile.ssid.SSID',
profiles.PAYLOADKEYS_TYPE: 'com.apple.wifi.managed',
'SSID_STR': 'SSID',
'EAPClientConfiguration': eap_client_data})
@mock.patch.object(profiles.NetworkProfile, 'AddPayload')
def testAddNetworkPayloadWired(self, mock_addpayload):
profile = profiles.NetworkProfile('test_user')
profile._auth_cert = '00000000-AUTH-CERT-UUID-00000000'
profile._anchor_certs = ['00000000-ANCH-ORCE-RTUU-ID000000']
profile.AddTrustedServer('radius.company.com')
profile.AddNetworkPayload('wired')
eap_client_data = {'AcceptEAPTypes': [13],
'PayloadCertificateAnchorUUID':
['00000000-ANCH-ORCE-RTUU-ID000000'],
'TLSTrustedServerNames':
['radius.company.com'],
'TLSAllowTrustExceptions': False}
mock_addpayload.assert_called_once_with(
{'AutoJoin': True,
'SetupModes': ['System', 'User'],
'PayloadCertificateUUID': '00000000-AUTH-CERT-UUID-00000000',
'EncryptionType': 'Any',
'Interface': 'FirstActiveEthernet',
profiles.PAYLOADKEYS_DISPLAYNAME: 'Wired',
profiles.PAYLOADKEYS_IDENTIFIER:
'com.megacorp.networkprofile.wired',
profiles.PAYLOADKEYS_TYPE: 'com.apple.firstactiveethernet.managed',
'EAPClientConfiguration': eap_client_data})
if __name__ == '__main__':
basetest.main()
| 40.16129 | 79 | 0.71178 | 14,783 | 0.989491 | 0 | 0 | 10,111 | 0.676774 | 0 | 0 | 3,103 | 0.207697 |
e23054b80783b903c8f24c3008cf90e547c8f3cc | 835 | py | Python | Hackerearth Set/NaviAndMath.py | Siddharth2016/PYTHON3_prog | 9dfa258d87f5b00779d39d9de9a49c1c6cea06be | [
"MIT"
] | 2 | 2019-02-26T14:06:53.000Z | 2019-02-27T17:13:01.000Z | Hackerearth Set/NaviAndMath.py | Siddharth2016/PYTHON3_prog | 9dfa258d87f5b00779d39d9de9a49c1c6cea06be | [
"MIT"
] | null | null | null | Hackerearth Set/NaviAndMath.py | Siddharth2016/PYTHON3_prog | 9dfa258d87f5b00779d39d9de9a49c1c6cea06be | [
"MIT"
] | 2 | 2017-12-26T07:59:57.000Z | 2018-06-24T03:35:05.000Z | # NAVI AND MATH
def power(base, exp):
res = 1
while exp>0:
if exp&1:
res = (res*base)%1000000007
exp = exp>>1
base = (base*base)%1000000007
return res%1000000007
mod = 1000000007
for i in range(int(input().strip())):
ans = "Case #" + str(i+1) + ': '
N = int(input().strip())
Arr = [int(a) for a in input().strip().split()]
mask = 3
maxx = -1
#ct = 0
while mask<(1<<N):
p = 0
sm = 0
ml = 1
#ct += 1
for j in range(0,N,1):
if mask&(1<<j):
sm += Arr[j]
ml = (ml*Arr[j])%mod
#print(Arr[j])
p = (ml*power(sm,mod-2))%mod
if maxx<p:
maxx = p
#print(maxx)
mask += 1
#print(ct)
ans += str(maxx)
print(ans)
| 21.973684 | 51 | 0.42515 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.093413 |
e23065a0cac10137de0062f9b29000d28da9f538 | 10,241 | py | Python | hw2/stud/models/absa_classifier.py | LeonardoEmili/Aspect-Based-Sentiment-Analysis | d6a37132931401c1a6d4a53208055cc507e2a7a8 | [
"MIT"
] | 6 | 2021-09-19T20:35:54.000Z | 2022-03-20T08:07:30.000Z | hw2/stud/models/absa_classifier.py | LeonardoEmili/Aspect-Based-Sentiment-Analysis | d6a37132931401c1a6d4a53208055cc507e2a7a8 | [
"MIT"
] | null | null | null | hw2/stud/models/absa_classifier.py | LeonardoEmili/Aspect-Based-Sentiment-Analysis | d6a37132931401c1a6d4a53208055cc507e2a7a8 | [
"MIT"
] | null | null | null | from typing import *
from dataclasses import asdict
from transformers import BertModel
from torch.nn.utils.rnn import pad_sequence
from stud.models.ner_classifier import NERClassifier
from stud.models.polarity_classifier import PolarityClassifier
from stud.constants import LOGGER_TRAIN_LOSS, LOGGER_VALID_LOSS, LOGGER_TEST_LOSS
from stud.torch_utils import batch_scatter_mean
from torchtext.vocab import Vocab
from stud import utils
import pytorch_lightning as pl
import torch
import torch.nn as nn
def model_from(hparams: utils.HParams, polarity_hparams: Optional[utils.HParams] = None) -> nn.Module:
''' Returns the correct model from the input hparams. '''
if polarity_hparams:
ner_model = NERClassifier.load_from_checkpoint(hparams.ner_model_path, hparams=hparams)
polarity_model = PolarityClassifier.load_from_checkpoint(polarity_hparams.polarity_model_path, hparams=polarity_hparams)
return AspectMultistepClassifier(ner_model, polarity_model)
if hparams.model_name == 'bert_lstm':
bert = BertModel.from_pretrained(utils.get_bert_path(hparams))
# Using BERT as a frozen encoder
bert.eval()
return BERTLSTMClassification(hparams, bert)
if hparams.model_name == 'multistep_classifier':
raise Exception(f'Missing implementation of AspectMultistepClassifier!')
raise Exception(f'{hparams.model_name} not supported!')
class BERTLSTMClassification(nn.Module):
def __init__(
self,
hparams: utils.HParams,
bert: BertModel
):
super().__init__()
self.hparams = hparams
self.bert = bert
self.lstm = nn.LSTM(self.bert_output_dim, hparams.hidden_dim,
batch_first=True, bidirectional=True)
self.fc1 = nn.Linear(2 * hparams.hidden_dim, hparams.hidden_dim)
self.fc2 = nn.Linear(hparams.hidden_dim, len(hparams.output_vocab))
self.dropout = nn.Dropout(hparams.dropout)
@property
def bert_output_dim(self) -> int:
''' Returns BERT output dimension from the chosen pooling strategy. '''
return (
self.hparams.input_dim * len(self.hparams.layers_to_merge)
if self.hparams.strategy == 'cat'
else self.hparams.input_dim
)
def forward(self, x: Dict[str, torch.Tensor]) -> torch.Tensor:
with torch.no_grad():
batch_out = self.bert(**x['batch'], output_hidden_states=True)
batch_out = utils.merge_layers(
batch_out.hidden_states, strategy=self.hparams.strategy,
layers_to_merge=self.hparams.layers_to_merge
)
# Merge WordPiece embeddings into token embeddings
batch_out = batch_scatter_mean(batch_out, x['mask'])
batch_out, _ = self.lstm(self.dropout(batch_out))
batch_out = torch.relu(self.dropout(self.fc1(batch_out)))
batch_out = self.fc2(batch_out)
return batch_out
class ABSAClassifier(pl.LightningModule):
'''
NER classifier identifies aspect terms and polarities for task A+B.
:param hparams: hyperparameters and target vocab to set up the model
'''
def __init__(self, hparams: utils.HParams):
super().__init__()
self.save_hyperparameters(asdict(hparams))
self.output_dim = len(hparams.output_vocab)
self.model = model_from(hparams)
self.evaluate_callback = None
self.ignore_index = hparams.output_vocab.get_default_index()
self.loss_fn = nn.CrossEntropyLoss(ignore_index=self.ignore_index)
self.aspect_predictions = []
self.aspect_indexes = []
self.gold = []
@property
def bio_itos(self) -> Tuple[List[int], List[int], int]:
''' Returns the extended BIOs indexes. '''
return (
utils.vocab_tokens_startswith(self.hparams.output_vocab, 'B'),
utils.vocab_tokens_startswith(self.hparams.output_vocab, 'I'),
self.hparams.output_vocab['O'])
def training_step(
self,
batch: Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]],
batch_idx: int
) -> torch.Tensor:
x, y = batch
y_hat = self.model(x)
loss = self.loss_fn(y_hat.view(-1, self.output_dim), y['ner'].view(-1).long())
metrics = {LOGGER_TRAIN_LOSS: loss}
self.log_dict(metrics, on_step=False, on_epoch=True)
return loss
@torch.no_grad()
def evaluation(
self,
batch: Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]
) -> Tuple[torch.Tensor, torch.Tensor]:
x, y = batch
y_hat = self.model(x)
loss = 0
if not self.hparams.test_only:
loss = self.loss_fn(y_hat.view(-1, self.output_dim), y['ner'].view(-1).long())
return loss, y_hat
def validation_step(
self,
batch: Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]],
batch_idx: int
):
loss, logits = self.evaluation(batch)
metrics = {LOGGER_VALID_LOSS: loss}
self.log_dict(metrics, on_step=False, on_epoch=True)
def test_step(
self,
batch: Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]],
batch_idx: int = 0
) -> Tuple[List[List[str]], List[List[str]]]:
x, y = batch
loss, logits = self.evaluation(batch)
y_hat = logits.argmax(-1)
aspect_indexes, polarity_terms = zip(*[utils.extract_aspect_indices(
prediction_idxs, length, *self.bio_itos, return_tensors=True
) for prediction_idxs, length in zip(y_hat.detach().cpu(), x['lengths'])])
aspect_terms = self.batch_aspects_lookup(x['tokens'], aspect_indexes)
polarity_terms = self.batch_aggregate_polarities(polarity_terms)
self.aspect_indexes += aspect_indexes
self.aspect_predictions += polarity_terms
if not self.hparams.test_only:
self.gold += [list(zip(*labels)) for labels in zip(y['aspect'], y['polarity_labels'])]
assert len(self.aspect_indexes) == len(self.gold)
metrics = {LOGGER_TEST_LOSS: loss}
self.log_dict(metrics)
return aspect_terms, polarity_terms
def batch_aggregate_polarities(
self,
batch_idxs: List[int]
) -> List[List[str]]:
return [[utils.aggregate_polarities(idxs, self.hparams.output_vocab) for idxs in sent_idxs] for sent_idxs in batch_idxs]
def aspects_lookup(
self,
tokens: List[str],
idxs: List[List[int]],
sep: str = ' '
) -> List[List[str]]:
''' Returns the collection of tokens indexed by idxs. '''
return [sep.join([tokens[k].text for k in idx]) for idx in idxs]
def batch_aspects_lookup(
self,
batch_tokens: List[List[str]],
indexes: List[List[List[int]]]
) -> List[List[List[str]]]:
''' Batch-version of aspects_lookup. '''
return [self.aspects_lookup(tokens, idxs) for idxs, tokens in zip(indexes, batch_tokens)]
def test_epoch_end(self, outputs: Tuple[List[List[str]], List[List[str]]]) -> None:
aspect_terms, polarity_terms = zip(*[sample for batch in outputs for sample in zip(*batch)])
self.predictions = [{'targets': list(zip(*pred))} for pred in zip(aspect_terms, polarity_terms)]
if not self.hparams.test_only and self.evaluate_callback:
self.gold = [{'targets': [(1, *t) for t in terms]} for terms in self.gold]
scores, precision, recall, f1 = self.evaluate_callback(self.gold, self.predictions)
self.log_dict({'precision': precision, 'recall': recall, 'f1': f1})
def configure_optimizers(self):
return torch.optim.Adam(self.model.parameters())
class AspectMultistepClassifier(pl.LightningModule):
'''
Experiment with a multistep classifier that predicts labels for A+B using
individually trained models for task A and task B.
'''
def __init__(
self,
ner_model: pl.LightningModule,
polarity_model: pl.LightningModule
):
super().__init__()
self.ner_model = ner_model
self.polarity_model = polarity_model
self.predictions = []
self.gold = []
self.aspect_pred = []
self.polarity_pred = []
@torch.no_grad()
def evaluation(
self,
batch: Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]
):
x, y = batch
y_hat = self.ner_model.model(x).argmax(-1)
# Extract aspect indexes from the first model
x['aspect_indexes'], _ = zip(*[utils.extract_aspect_indices(
prediction_idxs, length, *self.ner_model.bio_idxs, return_tensors=True
) for prediction_idxs, length in zip(y_hat, x['lengths'])])
self.aspect_pred += [[' '.join([sent_tokens[idx].text for idx in idxs])
for idxs in sent_idx]
for sent_idx, sent_tokens in zip(x['aspect_indexes'], x['tokens'])]
y_hat = self.polarity_model.model(x).argmax(-1)
aspects_indexes = pad_sequence(
[torch.ones(len(idxs)) for idxs in x['aspect_indexes']],
batch_first=True, padding_value=self.polarity_model.ignore_index)
# Apply masking to aspect_indexes, ignoring padded elements
aspects_mask = aspects_indexes != self.polarity_model.ignore_index
# Extract predictions from the second model
self.polarity_pred += [pred[mask].tolist() for pred, mask in zip(y_hat, aspects_mask)]
# Pair predictions of model A with predictions of model B (i.e. list of (term_i, polarity_i))
self.predictions += [list(zip(aspects, predictions))
for aspects, predictions in zip(self.aspect_pred, self.polarity_pred)]
# Store gold labels to perform evaluation
gold_mask = y['polarity'] != self.polarity_model.ignore_index
self.gold += [list(zip(aspects, polarities[mask].int().tolist()))
for aspects, polarities, mask in zip(y['aspect'], y['polarity'], gold_mask)]
def test_step(
self,
batch: Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]],
batch_idx: int
):
x, y = batch
logits = self.evaluation(batch)
| 40.003906 | 128 | 0.650229 | 8,825 | 0.861732 | 0 | 0 | 2,781 | 0.271556 | 0 | 0 | 1,276 | 0.124597 |
e2307c5662b8b852eaadcd3190cbb2fd42ef7f6a | 4,596 | py | Python | license_plate_images_generator.py | aboerzel/ANPR-keras | c49fe4031752227d99d4acccccb35f6cb896db68 | [
"MIT"
] | 6 | 2019-07-24T05:18:27.000Z | 2020-04-25T12:45:04.000Z | license_plate_images_generator.py | aboerzel/ANPR-keras | c49fe4031752227d99d4acccccb35f6cb896db68 | [
"MIT"
] | null | null | null | license_plate_images_generator.py | aboerzel/ANPR-keras | c49fe4031752227d99d4acccccb35f6cb896db68 | [
"MIT"
] | 2 | 2019-03-23T05:16:15.000Z | 2020-06-22T03:12:38.000Z | import argparse
import json
import os.path
import random
import re
import time
import cv2
import numpy as np
import requests
from PIL import Image
from config import config
class GermanLicensePlateImagesGenerator:
def __init__(self, output):
self.output = output
self.COUNTRY_MARKS = np.asarray([d['CM'] for d in json.loads(open(config.GERMAN_COUNTY_MARKS, encoding='utf-8').read())])
self.LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZÄÖÜ"
self.DIGITS = "0123456789"
self.COUNTRIES = ['BW', 'BY', 'BE', 'BB', 'HB', 'HH', 'HE', 'MV', 'NI', 'NW', 'RP', 'SL', 'SN', 'ST', 'SH', 'TH']
self.MONTHS = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']
self.YEARS = ['06', '07', '08', '09', '10', '11', '12', '13', '14', '15', '16', '17']
random.seed()
@staticmethod
def get_image_url(license_number, country, month, year):
license_number = license_number.replace("-", "%3A").replace("Ä", "%C4").replace("Ö", "%D6").replace("Ü", "%DC")
return "http://nummernschild.heisnbrg.net/fe/task?action=startTask&kennzeichen={0}&kennzeichenZeile2=&engschrift=false&pixelHoehe=32&breiteInMM=520&breiteInMMFest=true&sonder=FE&dd=01&mm=01&yy=00&kreis={1}&kreisName=&humm={2}&huyy={3}&sonderKreis=LEER&mm1=01&mm2=01&farbe=SCHWARZ&effekt=KEIN&tgaDownload=false".format(
license_number, country, month, year)
def __generate_license_number(self):
country = random.choice(self.COUNTRY_MARKS)
letter_count = random.randint(1, 2)
letters = "{}".format(random.choice(self.LETTERS)) if letter_count == 1 else "{}{}".format(
random.choice(self.LETTERS), random.choice(self.LETTERS))
min = 1 if letter_count == 2 else 1
digit_count = random.randint(min, max((8 - len(country) - letter_count), 4))
digits = ""
for i in range(digit_count):
digits += random.choice(self.DIGITS)
return "{}-{}{}".format(country, letters, digits)
def __create_license_plate_picture(self, n, license_number, country, front):
file_path = self.output + '/{0}#{1}.png'.format("F" if front else "R", license_number)
if os.path.exists(file_path):
return False
month = random.choice(self.MONTHS) if front else ''
year = random.choice(self.YEARS) if front else ''
create_image_url = GermanLicensePlateImagesGenerator.get_image_url(license_number, country, month, year)
r = requests.get(create_image_url)
if r.status_code != 200:
return False
id = re.compile('<id>(.*?)</id>', re.DOTALL | re.IGNORECASE).findall(
r.content.decode("utf-8"))[0]
status_url = 'http://nummernschild.heisnbrg.net/fe/task?action=status&id=%s' % id
time.sleep(.200)
r = requests.get(status_url)
if r.status_code != 200:
return False
show_image_url = 'http://nummernschild.heisnbrg.net/fe/task?action=showInPage&id=%s'
show_image_url = show_image_url % id
time.sleep(.200)
r = requests.get(show_image_url)
if r.status_code != 200:
return False
# sometimes the web service returns a corrupted image, check the image by getting the size and skip if corrupted
try:
numpyarray = np.fromstring(r.content, np.uint8)
image = cv2.imdecode(numpyarray, cv2.IMREAD_COLOR)
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
im = Image.fromarray(image) # don't use cv2.imwrite() because there is a bug with utf-8 encoded filepaths
im.save(file_path)
print("{0:06d} : {1}".format(n, file_path))
return True
except:
return False
def generate(self, items):
for n in range(items):
while True:
license_number = self.__generate_license_number()
country = random.choice(self.COUNTRIES)
if not self.__create_license_plate_picture(n, license_number, country, True):
break
time.sleep(.200)
self.__create_license_plate_picture(n, license_number, country, False)
time.sleep(.200)
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--items", default="60000", help="Number of items to generate")
ap.add_argument("-o", "--output", default=config.PLATE_IMAGES, help="Output path")
args = vars(ap.parse_args())
lpdg = GermanLicensePlateImagesGenerator(os.path.abspath(args["output"]))
lpdg.generate(int(args["items"]))
| 41.405405 | 326 | 0.625109 | 4,084 | 0.88744 | 0 | 0 | 574 | 0.124728 | 0 | 0 | 1,048 | 0.227727 |
e230b228e297b8e956c0badf85c55a74344705b8 | 4,697 | py | Python | SUIBE_DID_Data_Manager/weidentity/localweid.py | SUIBE-Blockchain/SUIBE_DID_Data_Manager | d38f3f37463f36802eb6acb578f8e17faf878c79 | [
"MIT"
] | null | null | null | SUIBE_DID_Data_Manager/weidentity/localweid.py | SUIBE-Blockchain/SUIBE_DID_Data_Manager | d38f3f37463f36802eb6acb578f8e17faf878c79 | [
"MIT"
] | 2 | 2020-10-21T07:05:43.000Z | 2020-10-22T17:10:53.000Z | SUIBE_DID_Data_Manager/weidentity/localweid.py | SUIBE-Blockchain/SUIBE_DID_Data_Manager | d38f3f37463f36802eb6acb578f8e17faf878c79 | [
"MIT"
] | null | null | null | # -*- coding:utf8 -*-
import os
from eth_account import Account
import sha3
import base64
import hashlib
from ecdsa import SigningKey, SECP256k1
DID_TYPE = ["weid"]
def create_privkey():
return os.urandom(32)
def create_ecdsa_privkey():
return SigningKey.generate(curve=SECP256k1)
def create_random_weid():
# 通过get的方式传送一个privkey data。
privkey = create_privkey()
account = generate_addr(priv=privkey.hex())
addr = account["payload"]["addr"]
# 拼接weid,这里CHAIN_ID是留给上链用的。
weid = "did:weid:CHAIN_ID:{addr}".format(addr=addr)
data = {
"privateKeyHex": account["payload"]["priv"],
"publicKeyHex": account["payload"]["pubv"],
"privateKeyInt": str(int(account["payload"]["priv"], 16)),
"publicKeyInt": str(int(account["payload"]["pubv"], 16)),
"weId": weid,
}
return data
def create_watting_weid(privkey):
# 通过get的方式传送一个privkey data。
account = generate_addr(priv=privkey)
addr = account["payload"]["addr"]
# 拼接weid,这里CHAIN_ID是留给上链用的。
weid = "did:weid:{addr}".format(addr=addr)
data = {
"privateKeyHex": account["payload"]["priv"],
"publicKeyHex": account["payload"]["pubv"],
"privateKeyInt": str(int(account["payload"]["priv"], 16)),
"publicKeyInt": str(int(account["payload"]["pubv"], 16)),
"weId": weid,
}
return data
def create_weid_by_privkey(privkey, chain_id):
if privkey[:2] == "0x":
account = generate_addr(priv=privkey[2:])
else:
account = generate_addr(priv=hex(int(privkey))[2:])
addr = account["payload"]["addr"]
# 拼接weid,这里CHAIN_ID是留给上链用的。
weid = "did:weid:{chain_id}:{addr}".format(chain_id=chain_id, addr=addr)
data = {
"privateKeyHex": account["payload"]["priv"],
"publicKeyHex": account["payload"]["pubv"],
"privateKeyInt": str(int(account["payload"]["priv"], 16)),
"publicKeyInt": str(int(account["payload"]["pubv"], 16)),
"weid": weid,
}
return data
def generate_addr(priv=None):
if priv == None:
account = Account.create()
else:
try:
account = Account.privateKeyToAccount(priv)
except Exception as e:
return {"result": "error", "error":e}
return {"result": "success",
"payload":
{"addr": account.address,
"priv": account.privateKey.hex(),
"pubv": str(account._key_obj.public_key).lower()
}}
def update_did_chain_id(did, chain_id):
split_did = did.split("CHAIN_ID")
split_did.append(split_did[1])
split_did[1] = chain_id
uplink_did = "".join(i for i in split_did)
return uplink_did
def verify_did(did):
verify_data = did.split(":")
if verify_data[0] != "did":
return "请提供正确的did。"
if verify_data[1] not in DID_TYPE:
return "请提供正确的DID Type。"
# if verify_data[2] == "CHAIN_ID":
# return "请指定正确的chain id。"
if verify_data[3][:2] != "0x":
return "请输入正确的did。"
return True
def Hash(msg):
k = sha3.keccak_256()
k.update(msg)
return k.hexdigest()
def ethtype_to_int_priv_pubv(priv, pubv):
"""
将 priv 和 pubv 转换为 weidentity 支持的格式(十进制)
:param priv: type: bytes
:param pubv: type: hex
:return: priv int, pubv int
"""
private_key = int.from_bytes(priv, byteorder='big', signed=False)
public_key = eval(pubv)
return {"priv": str(private_key), "pubv": str(public_key)}
def int_to_ethtype_priv_pubv(priv, pubv):
pass
def base64_decode(base_data):
"""
base64解密
:param base_data:
:return:
"""
bytes_data = base64.b64decode(base_data)
return bytes_data
def base64_encode(bytes_data):
"""
base64加密
:param bytes_data:
:return:
"""
base_data = base64.b64encode(bytes_data)
return bytes.decode(base_data)
def binary_to_list(bin):
list = []
for idx, val in enumerate(bin):
list.append(val)
return list
def list_to_binary(list):
bin = b''
for i in list:
bin += bytes([i])
return bin
def ecdsa_sign(encode_transaction, privkey):
if isinstance(privkey, str):
privkey = bytes.fromhex(privkey)
signning_key = SigningKey.from_string(privkey, curve=SECP256k1)
# encode_transaction = respBody['respBody']['encodedTransaction']
# base64解密
transaction = base64_decode(encode_transaction)
# 获取hash
hashedMsg = Hash(transaction)
bytes_hashed = bytes(bytearray.fromhex(hashedMsg))
# 签名
signature = signning_key.sign(bytes_hashed, hashfunc=hashlib.sha256)
# base64加密
transaction_encode = base64_encode(signature)
return transaction_encode | 28.295181 | 76 | 0.629764 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,457 | 0.295838 |
e231b3e0f6c6a8c2301795d5b2f82321cd2ce52c | 1,969 | py | Python | poco/commands/start.py | gaborkolarovics/poco | 31237ce9138c134e7780cd3ed7459cd5f4e07834 | [
"MIT"
] | 52 | 2017-10-13T06:11:41.000Z | 2021-12-08T08:08:48.000Z | poco/commands/start.py | gaborkolarovics/poco | 31237ce9138c134e7780cd3ed7459cd5f4e07834 | [
"MIT"
] | 28 | 2017-08-02T13:55:53.000Z | 2017-09-07T14:31:55.000Z | poco/commands/start.py | gaborkolarovics/poco | 31237ce9138c134e7780cd3ed7459cd5f4e07834 | [
"MIT"
] | 8 | 2018-02-16T10:51:27.000Z | 2022-02-04T11:17:44.000Z | import os
from .abstract_command import AbstractCommand
from ..services.state_utils import StateUtils
from ..services.state import StateHolder
from ..services.command_handler import CommandHandler
from ..services.console_logger import ColorPrint
class Start(AbstractCommand):
command = ["start", "up"]
args = ["[<project/plan>]"]
args_descriptions = {"[<project/plan>]": "Name of the project in the catalog and/or name of the project's plan"}
description = "Run: 'poco start nginx/default' or 'poco up nginx/default' to start nginx project (docker, helm " \
"or kubernetes) with the default plan."
run_command = "start"
need_checkout = True
def prepare_states(self):
StateUtils.calculate_name_and_work_dir()
StateUtils.prepare("compose_handler")
def resolve_dependencies(self):
if StateHolder.catalog_element is not None and not StateUtils.check_variable('repository'):
ColorPrint.exit_after_print_messages(message="Repository not found for: " + str(StateHolder.name))
self.check_poco_file()
def execute(self):
if self.need_checkout:
StateHolder.compose_handler.run_checkouts()
CommandHandler().run(self.run_command)
if hasattr(self, "end_message"):
ColorPrint.print_info(getattr(self, "end_message"))
@staticmethod
def check_poco_file():
if not StateUtils.check_variable('poco_file'):
poco_file = str(StateHolder.repository.target_dir if StateHolder.repository is not None
else os.getcwd()) + '/poco.yml'
ColorPrint.print_error(message="Poco file not found: " + poco_file)
ColorPrint.exit_after_print_messages(message="Use 'poco init " + StateHolder.name +
"', that will generate a default poco file for you",
msg_type="warn")
| 42.804348 | 118 | 0.655663 | 1,720 | 0.87354 | 0 | 0 | 607 | 0.308278 | 0 | 0 | 463 | 0.235145 |
e2327222b3a6ee2598025ec347bc46a24be18c32 | 172 | py | Python | last_stone_weight.py | spencercjh/sync-leetcode-today-problem-python3-example | 4957e5eadb697334741df0fc297bec2edaa9e2ab | [
"Apache-2.0"
] | null | null | null | last_stone_weight.py | spencercjh/sync-leetcode-today-problem-python3-example | 4957e5eadb697334741df0fc297bec2edaa9e2ab | [
"Apache-2.0"
] | null | null | null | last_stone_weight.py | spencercjh/sync-leetcode-today-problem-python3-example | 4957e5eadb697334741df0fc297bec2edaa9e2ab | [
"Apache-2.0"
] | null | null | null |
class LastStoneWeight:
"""
https://leetcode-cn.com/problems/last-stone-weight/
"""
def lastStoneWeight(self, stones: List[int]) -> int:
| 17.2 | 56 | 0.575581 | 161 | 0.936047 | 0 | 0 | 0 | 0 | 0 | 0 | 67 | 0.389535 |
e233625c28cad064126485489cb5b21f0fc984ce | 8,098 | py | Python | netguardmitm/netguardmitm.py | RTN-Team/netguard-mitm | 76f8d03da2f377c8c102410befca7140c7e5d156 | [
"MIT"
] | null | null | null | netguardmitm/netguardmitm.py | RTN-Team/netguard-mitm | 76f8d03da2f377c8c102410befca7140c7e5d156 | [
"MIT"
] | null | null | null | netguardmitm/netguardmitm.py | RTN-Team/netguard-mitm | 76f8d03da2f377c8c102410befca7140c7e5d156 | [
"MIT"
] | null | null | null | from netfilterqueue import NetfilterQueue
import socket
from os import system
import scapy.all as scapy
import scapy_http.http
from urllib.parse import urlparse, parse_qs
REQUEST_LOGIN = 0
REQUEST_PROTECT = 1
class NetGuardMITM:
def __init__(self, ip, queue_number=1):
self.ip = ip
self.queue_number = queue_number
self.log_callback = None
self.netguard_server_ip = None
self.login_request_callback = None
self.protect_request_callback = None
self.login_response_callback = None
self.protect_response_callback = None
self.file_upload_packet_callback = None
self.file_download_packet_callback = None
self.file_transfer_in_progress = False
self.file_transfer_bytes_remaining = 0
self.__last_request = None
def log(self, message):
if self.log_callback:
self.log_callback(message)
def packet_callback(self, raw_packet):
"""
Main call back of sent and received packets.
:param raw_packet: The packet that is being sent/received.
"""
packet = scapy.IP(raw_packet.get_payload())
accept = True
if packet.haslayer("HTTP"):
tcp_layer = packet.getlayer("TCP")
http_layer = packet.getlayer("HTTP")
if packet.haslayer("Raw") and self.file_transfer_in_progress:
if packet.dst == self.netguard_server_ip:
accept = self.handle_file_upload_packet(raw_packet, packet)
elif packet.src == self.netguard_server_ip:
accept = self.handle_file_download_packet(raw_packet, packet)
else:
accept = True
if "HTTP Request" in http_layer:
accept = self.handle_request(raw_packet, packet)
elif "HTTP Response" in http_layer:
accept = self.handle_response(raw_packet, packet)
if accept:
raw_packet.accept()
else:
raw_packet.drop()
def handle_request(self, raw_packet, packet):
"""
Handles HTTP requests sent towards netguard.io. All other requests are ignored and therefore accepted.
:param raw_packet: The raw packet as obtained by NetfilterQueue
:param packet: The scapy representation of the HTTP packet.
:return True if the packet should be accepted, False otherwise.
"""
accept = True
http_layer = packet.getlayer("HTTP")
request = http_layer["HTTP Request"]
if request.Host != b"netguard.io":
return accept
# Record the (current) netguard.io IP.
self.netguard_server_ip = packet.dst
# Parse URL.
o = urlparse(request.Path)
arguments = parse_qs(o.query)
# Check which API call is being made and invoke corresponding callback.
if request.Method == b"GET":
if o.path == b"/API/login.php" and self.login_request_callback:
self.__last_request = REQUEST_LOGIN
accept = self.login_request_callback(raw_packet, packet, arguments[b"username"], arguments[b"password"])
elif request.Method == b"POST":
if o.path == b"/API/protect.php":
if self.protect_request_callback:
accept = self.protect_request_callback(raw_packet, packet, arguments[b"username"], arguments[b"password"])
self.__last_request = REQUEST_PROTECT
self.file_transfer_in_progress = True
self.file_transfer_bytes_remaining = int(request.fields["Content-Length"])
return accept
def handle_response(self, raw_packet, packet):
"""
Handles a single HTTP response from netguard.io. All other responses are ignored and therefore accepted.
:param raw_packet: The raw packet as obtained by NetfilterQueue.
:param packet: The scapy representation of the HTTP packet.
:return: True if the packet should be accepted, False otherwise.
"""
accept = True
if packet.src != self.netguard_server_ip:
return accept
http_layer = packet.getlayer("HTTP")
response = http_layer["HTTP Response"]
body = packet.getlayer("Raw")
# NOTE: We assume that the response comes directly after the request.
# This might not be accurate, as packets can be reordered during the transmission.
# For more reliable results, check sequence numbers of packets.
# Check what kind of response we're dealing with.
if self.__last_request == REQUEST_LOGIN and self.login_response_callback:
accept = self.login_response_callback(raw_packet, packet, body)
self.__last_request = None
elif self.__last_request == REQUEST_PROTECT:
if self.protect_response_callback:
accept = self.protect_response_callback(raw_packet, packet, body)
if "Content-Length" in response.fields:
self.file_transfer_in_progress = True
self.file_transfer_bytes_remaining = int(response.fields["Content-Length"])
self.handle_file_download_packet(raw_packet, packet)
self.__last_request = None
return accept
def handle_file_upload_packet(self, raw_packet, packet):
"""
Handles a single HTTP packet containing (a chunk of) the file to be uploaded to netguard.io.
:param raw_packet: The raw packet as obtained by NetfilterQueue.
:param packet: The scapy representation of the HTTP packet.
:return: True if the packet should be accepted, False otherwise.
"""
accept = True
raw_layer = packet.getlayer("Raw")
self.file_transfer_bytes_remaining -= len(raw_layer.load)
if self.file_upload_packet_callback:
accept = self.file_upload_packet_callback(raw_packet, packet, raw_layer.load, self.file_transfer_bytes_remaining)
self.file_transfer_in_progress = self.file_transfer_bytes_remaining > 0
return accept
def handle_file_download_packet(self, raw_packet, packet):
"""
Handles a single HTTP packet containing (a chunk of) the protected file that is being downloaded from
the netguard.io server.
:param raw_packet: The raw packet as obtained by NetfilterQueue.
:param packet: THe scapy representation of the HTTP packet.
:return: True if the packet should be accepted, False otherwise.
"""
accept = True
raw_layer = packet.getlayer("Raw")
self.file_transfer_bytes_remaining -= len(raw_layer.load)
if self.file_download_packet_callback:
accept = self.file_download_packet_callback(raw_packet, packet, raw_layer.load, self.file_transfer_bytes_remaining)
self.file_transfer_in_progress = self.file_transfer_bytes_remaining > 0
return accept
def do_mitm(self):
"""
Performs the man-in-the-middle attack. This function is blocking.
"""
try:
# Add necessary IP table entries.
self.log("Updating IP tables...")
system("iptables -A INPUT -d {} -p tcp -j NFQUEUE --queue-num {}".format(self.ip, self.queue_number))
system("iptables -A OUTPUT -s {} -p tcp -j NFQUEUE --queue-num {}".format(self.ip, self.queue_number))
# Bind to filter queue.
nfqueue = NetfilterQueue()
nfqueue.bind(self.queue_number, self.packet_callback)
s = socket.fromfd(nfqueue.get_fd(), socket.AF_UNIX, socket.SOCK_STREAM)
try:
self.log("Running MITM...")
nfqueue.run_socket(s)
except KeyboardInterrupt:
pass
self.log("Closing sockets...")
s.close()
nfqueue.unbind()
finally:
# Remove IP table entries.
self.log("Restoring IP tables.")
system("iptables -D INPUT 1")
system("iptables -D OUTPUT 1")
| 40.089109 | 127 | 0.63917 | 7,884 | 0.973574 | 0 | 0 | 0 | 0 | 0 | 0 | 2,575 | 0.31798 |
e23422dd40279f3c778ce0b0151d008ed9279e87 | 1,660 | py | Python | wdra_extender/settings.py | Southampton-RSG/wdra-extender | cfbcc8633ef13002a8f0266c782380c9c6f4ae55 | [
"BSD-3-Clause"
] | null | null | null | wdra_extender/settings.py | Southampton-RSG/wdra-extender | cfbcc8633ef13002a8f0266c782380c9c6f4ae55 | [
"BSD-3-Clause"
] | 28 | 2020-07-10T14:05:18.000Z | 2022-02-01T15:31:16.000Z | wdra_extender/settings.py | Southampton-RSG/wdra-extender | cfbcc8633ef13002a8f0266c782380c9c6f4ae55 | [
"BSD-3-Clause"
] | null | null | null | """Config options for WDRAX.
Gets settings from environment variables or .env/settings.ini file.
"""
import pathlib
from decouple import AutoConfig
BASE_DIR = pathlib.Path(__name__).absolute().parent
config = AutoConfig(search_path=str(BASE_DIR)) # pylint: disable=invalid-name
LOG_LEVEL = config('LOG_LEVEL', default='INFO')
PLUGIN_DIR = config('PLUGIN_DIR',
cast=pathlib.Path,
default=BASE_DIR.joinpath('plugins'))
#: Directory into which output zip files should be placed
OUTPUT_DIR = config('OUTPUT_DIR',
cast=pathlib.Path,
default=BASE_DIR.joinpath('media'))
REDIS_HOST = config('REDIS_HOST', default=None)
REDIS_PORT = config('REDIS_PORT', cast=int, default=6379)
REDIS_DB = config('REDIS_DB', default='0')
CELERY_BROKER_URL = config(
'CELERY_BROKER_URL',
default=(None if REDIS_HOST is None else
f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'))
CELERY_RESULT_BACKEND = config(
'CELERY_RESULT_BACKEND',
default=CELERY_BROKER_URL
)
SQLALCHEMY_DATABASE_URI = config(
'SQLALCHEMY_DATABASE_URI',
default=f'sqlite:///{BASE_DIR.joinpath("db.sqlite3")}')
TWEET_PROVIDERS = [
'wdra_extender.extract.tweet_providers.redis_provider',
'wdra_extender.extract.tweet_providers.twarc_provider',
]
TWITTER_CONSUMER_KEY = config('TWITTER_CONSUMER_KEY', default=None)
TWITTER_CONSUMER_SECRET = config('TWITTER_CONSUMER_SECRET', default=None)
TWITTER_ACCESS_TOKEN = config('TWITTER_ACCESS_TOKEN', default=None)
TWITTER_ACCESS_TOKEN_SECRET = config('TWITTER_ACCESS_TOKEN_SECRET', default=None)
SQLALCHEMY_TRACK_MODIFICATIONS = False
| 31.320755 | 81 | 0.734337 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 648 | 0.390361 |
e2349ddee45820710716c607505fad37b5e41eba | 3,066 | py | Python | tests/contrib/zerorpc/zeropc_tests.py | trancee/apm-agent-python | 04f014661f18b871b8b1970978b639b92f627e5f | [
"BSD-3-Clause"
] | 1 | 2019-04-01T10:23:45.000Z | 2019-04-01T10:23:45.000Z | tests/contrib/zerorpc/zeropc_tests.py | alirezarezvani/apm-agent-python | 5a287a4968dcaf1d5ea84f15f9793df24cf39a3d | [
"BSD-3-Clause"
] | null | null | null | tests/contrib/zerorpc/zeropc_tests.py | alirezarezvani/apm-agent-python | 5a287a4968dcaf1d5ea84f15f9793df24cf39a3d | [
"BSD-3-Clause"
] | 2 | 2020-02-04T22:03:00.000Z | 2021-03-23T13:17:14.000Z | # BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import random
import shutil
import sys
import tempfile
import pytest
from elasticapm.contrib.zerorpc import Middleware
zerorpc = pytest.importorskip("zerorpc")
gevent = pytest.importorskip("gevent")
has_unsupported_pypy = hasattr(sys, "pypy_version_info") and sys.pypy_version_info < (2, 6)
@pytest.mark.skipif(has_unsupported_pypy, reason="Failure with pypy < 2.6")
def test_zerorpc_middleware_with_reqrep(elasticapm_client):
tmpdir = tempfile.mkdtemp()
server_endpoint = "ipc://{0}".format(os.path.join(tmpdir, "random_zeroserver"))
try:
zerorpc.Context.get_instance().register_middleware(Middleware(client=elasticapm_client))
server = zerorpc.Server(random)
server.bind(server_endpoint)
gevent.spawn(server.run)
client = zerorpc.Client()
client.connect(server_endpoint)
with pytest.raises(zerorpc.exceptions.RemoteError) as excinfo:
client.choice([])
client.close()
server.close()
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
ex = excinfo.value
assert ex.name == "IndexError"
assert len(elasticapm_client.events) == 1
exc = elasticapm_client.events[0]["errors"][0]["exception"]
assert exc["type"] == "IndexError"
frames = exc["stacktrace"]
assert frames[0]["function"] == "choice"
assert frames[0]["module"] == "random"
assert elasticapm_client.events[0]["errors"][0]["exception"]["handled"] is False
| 39.818182 | 96 | 0.737443 | 0 | 0 | 0 | 0 | 1,165 | 0.379974 | 0 | 0 | 1,776 | 0.579256 |
e23593b3827e34e746568409163528438deb1378 | 9,595 | py | Python | pyFAI/gui/calibration/RingCalibration.py | vallsv/pyFAI | 64143652c2b219978ec370bf2fa215af01f937c2 | [
"MIT"
] | null | null | null | pyFAI/gui/calibration/RingCalibration.py | vallsv/pyFAI | 64143652c2b219978ec370bf2fa215af01f937c2 | [
"MIT"
] | 1 | 2016-09-12T11:58:05.000Z | 2016-09-12T11:58:05.000Z | pyFAI/gui/calibration/RingCalibration.py | vallsv/pyFAI | 64143652c2b219978ec370bf2fa215af01f937c2 | [
"MIT"
] | null | null | null | # coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2016 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
from __future__ import absolute_import
__authors__ = ["V. Valls"]
__license__ = "MIT"
__date__ = "09/01/2018"
import logging
import numpy
import pyFAI.utils
from pyFAI.geometryRefinement import GeometryRefinement
from pyFAI.peak_picker import PeakPicker
_logger = logging.getLogger(__name__)
class RingCalibration(object):
def __init__(self, image, mask, calibrant, detector, wavelength, peaks, method):
self.__image = image
self.__mask = mask
self.__calibrant = calibrant
self.__calibrant.set_wavelength(wavelength)
self.__detector = detector
self.__wavelength = wavelength
self.__init(peaks, method)
fixed = pyFAI.utils.FixedParameters()
fixed.add("wavelength")
self.__fixed = fixed
self.__residual = None
self.__peakResidual = None
def __initgeoRef(self):
"""
Tries to initialise the GeometryRefinement (dist, poni, rot)
Returns a dictionary of key value pairs
"""
defaults = {"dist": 0.1, "poni1": 0.0, "poni2": 0.0,
"rot1": 0.0, "rot2": 0.0, "rot3": 0.0}
if self.__detector:
try:
p1, p2, _p3 = self.__detector.calc_cartesian_positions()
defaults["poni1"] = p1.max() / 2.
defaults["poni2"] = p2.max() / 2.
except Exception as err:
_logger.warning(err)
# if ai:
# for key in defaults.keys(): # not PARAMETERS which holds wavelength
# val = getattr(self.ai, key, None)
# if val is not None:
# defaults[key] = val
return defaults
def __init(self, peaks, method):
defaults = self.__initgeoRef()
fixed = pyFAI.utils.FixedParameters()
fixed.add("wavelength")
geoRef = GeometryRefinement(data=peaks,
wavelength=self.__wavelength,
detector=self.__detector,
calibrant=self.__calibrant,
**defaults)
self.__residual = geoRef.refine2(1000000, fix=fixed)
self.__peakResidual = self.__residual
self.__previousResidual = None
peakPicker = PeakPicker(data=self.__image,
calibrant=self.__calibrant,
wavelength=self.__wavelength,
detector=self.__detector,
method=method)
self.__peakPicker = peakPicker
self.__geoRef = geoRef
def init(self, peaks, method):
self.__init(peaks, method)
def update(self, image, mask, calibrant, detector, wavelength=None):
self.__image = image
self.__mask = mask
self.__calibrant = calibrant
self.__detector = detector
if wavelength is not None:
self.__wavelength = wavelength
def __computeResidual(self):
if "wavelength" in self.__fixed:
return self.__geoRef.chi2() / self.__geoRef.data.shape[0]
else:
return self.__geoRef.chi2_wavelength() / self.__geoRef.data.shape[0]
def __refine(self, maxiter=1000000, fix=None):
if "wavelength" in self.__fixed:
return self.__geoRef.refine2(maxiter, fix)
else:
return self.__geoRef.refine2_wavelength(maxiter, fix)
def refine(self, max_iter=500):
"""
Contains the common geometry refinement part
"""
self.__calibrant.set_wavelength(self.__wavelength)
self.__peakPicker.points.setWavelength_change2th(self.__wavelength)
self.__previousResidual = self.getResidual()
previous_residual = float("+inf")
print("Initial residual: %s" % previous_residual)
for count in range(max_iter):
residual = self.__refine(10000, fix=self.__fixed)
print("Residual: %s" % residual)
if residual >= previous_residual:
break
previous_residual = residual
self.__residual = residual
print("Final residual: %s (after %s iterations)" % (residual, count))
self.__geoRef.del_ttha()
self.__geoRef.del_dssa()
self.__geoRef.del_chia()
def getResidual(self):
"""Returns the residual computed from the current fitting."""
if self.__residual is None:
self.__residual = self.__computeResidual()
return self.__residual
def getPreviousResidual(self):
"""Returns the previous residual computed before the last fitting."""
return self.__previousResidual
def getPeakResidual(self):
"""Returns the residual computed from the peak selection."""
return self.__peakResidual
def getRings(self):
"""
Overlay a contour-plot
"""
tth = self.__geoRef.twoThetaArray(self.__peakPicker.shape)
tth_max = tth.max()
tth_min = tth.min()
if self.__calibrant:
angles = [i for i in self.__calibrant.get_2th()
if (i is not None) and (i >= tth_min) and (i <= tth_max)]
if len(angles) == 0:
return []
else:
return []
# FIXME use documentaed function
import matplotlib._cntr
x, y = numpy.mgrid[:tth.shape[0], :tth.shape[1]]
contour = matplotlib._cntr.Cntr(x, y, tth)
rings = []
for angle in angles:
res = contour.trace(angle)
nseg = len(res) // 2
segments, _codes = res[:nseg], res[nseg:]
rings.append(segments)
return rings
def getBeamCenter(self):
try:
f2d = self.__geoRef.getFit2D()
return f2d["centerY"], f2d["centerX"]
except TypeError:
return None
def getPoni(self):
""""Returns the PONI coord in image coordinate.
That's an approximation of the PONI coordinate at pixel precision
"""
solidAngle = self.__geoRef.solidAngleArray(shape=self.__image.shape)
index = numpy.argmax(solidAngle)
coord = numpy.unravel_index(index, solidAngle.shape)
dmin = self.__geoRef.dssa.min()
dmax = self.__geoRef.dssa.max()
if dmax > 1 - (dmax - dmin) * 0.001:
return coord
else:
return None
def toGeometryModel(self, model):
model.lockSignals()
model.wavelength().setValue(self.__geoRef.wavelength * 1e10)
model.distance().setValue(self.__geoRef.dist)
model.poni1().setValue(self.__geoRef.poni1)
model.poni2().setValue(self.__geoRef.poni2)
model.rotation1().setValue(self.__geoRef.rot1)
model.rotation2().setValue(self.__geoRef.rot2)
model.rotation3().setValue(self.__geoRef.rot3)
model.unlockSignals()
def fromGeometryModel(self, model, resetResidual=True):
wavelength = model.wavelength().value() * 1e-10
self.__calibrant.setWavelength_change2th(wavelength)
self.__geoRef.wavelength = wavelength
self.__geoRef.dist = model.distance().value()
self.__geoRef.poni1 = model.poni1().value()
self.__geoRef.poni2 = model.poni2().value()
self.__geoRef.rot1 = model.rotation1().value()
self.__geoRef.rot2 = model.rotation2().value()
self.__geoRef.rot3 = model.rotation3().value()
if resetResidual:
self.__previousResidual = None
self.__residual = None
def fromGeometryConstriansModel(self, contraintsModel):
# FIXME take care of range values
fixed = pyFAI.utils.FixedParameters()
if contraintsModel.wavelength().isFixed():
fixed.add("wavelength")
if contraintsModel.distance().isFixed():
fixed.add("dist")
if contraintsModel.poni1().isFixed():
fixed.add("poni1")
if contraintsModel.poni2().isFixed():
fixed.add("poni2")
if contraintsModel.rotation1().isFixed():
fixed.add("rot1")
if contraintsModel.rotation2().isFixed():
fixed.add("rot2")
if contraintsModel.rotation3().isFixed():
fixed.add("rot3")
self.__fixed = fixed
| 36.903846 | 84 | 0.605419 | 7,999 | 0.833663 | 0 | 0 | 0 | 0 | 0 | 0 | 2,370 | 0.247004 |
e236809ac5baf6f429907ed386884a66c65abed5 | 790 | py | Python | metrics_layer/core/parse/manifest.py | Zenlytic/metrics_layer | 45e291186c9171b44222a49444153c5df14985c4 | [
"Apache-2.0"
] | 5 | 2021-11-11T15:39:23.000Z | 2022-03-17T19:54:17.000Z | metrics_layer/core/parse/manifest.py | Zenlytic/metrics_layer | 45e291186c9171b44222a49444153c5df14985c4 | [
"Apache-2.0"
] | 10 | 2021-11-23T21:44:56.000Z | 2022-03-21T02:01:51.000Z | metrics_layer/core/parse/manifest.py | Zenlytic/metrics_layer | 45e291186c9171b44222a49444153c5df14985c4 | [
"Apache-2.0"
] | null | null | null | class Manifest:
def __init__(self, definition: dict):
self._definition = definition
def exists(self):
return self._definition is not None and self._definition != {}
def _resolve_node(self, name: str):
key = next((k for k in self._definition["nodes"].keys() if name == k.split(".")[-1]), None)
if key is None:
raise ValueError(
f"Could not find the ref {name} in the co-located dbt project."
" Please check the name in your dbt project."
)
return self._definition["nodes"][key]
def resolve_name(self, name: str):
node = self._resolve_node(name)
# return f"{node['database']}.{node['schema']}.{node['alias']}"
return f"{node['schema']}.{node['alias']}"
| 37.619048 | 99 | 0.583544 | 789 | 0.998734 | 0 | 0 | 0 | 0 | 0 | 0 | 223 | 0.282278 |
e237d3872eb66e61cab21518227533afd94c87e8 | 252 | py | Python | desafio/desafio051.py | henriquekirchheck/Curso-em-video-Python | 1a29f68515313af85c8683f626ba35f8fcdd10e7 | [
"MIT"
] | null | null | null | desafio/desafio051.py | henriquekirchheck/Curso-em-video-Python | 1a29f68515313af85c8683f626ba35f8fcdd10e7 | [
"MIT"
] | null | null | null | desafio/desafio051.py | henriquekirchheck/Curso-em-video-Python | 1a29f68515313af85c8683f626ba35f8fcdd10e7 | [
"MIT"
] | null | null | null | print('=====================')
print(' 10 Termos de um PA')
print('=====================')
p = int(input('Primeiro Termo: '))
r = int(input('Razão: '))
for loop in range(p, ((r * 10) + p), r):
print('{} ->' .format(loop), end=' ')
print('Acabou') | 25.2 | 41 | 0.452381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.44664 |