hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
faa101944e1ea1419bd8c69ecd6e50fa2ceaf965 | 2,280 | py | Python | source/0A_write_cgi_v1.4.py | SpencerEricksen/PCBA_downloads_oxphos | 62f5b7a6faf2743afa31368d3e8d529e248c815d | [
"MIT"
] | null | null | null | source/0A_write_cgi_v1.4.py | SpencerEricksen/PCBA_downloads_oxphos | 62f5b7a6faf2743afa31368d3e8d529e248c815d | [
"MIT"
] | null | null | null | source/0A_write_cgi_v1.4.py | SpencerEricksen/PCBA_downloads_oxphos | 62f5b7a6faf2743afa31368d3e8d529e248c815d | [
"MIT"
] | null | null | null |
# script to read-in CID list and write out cgi XML files
# for molecule downloads from pubchem using PUG REST
# usage: 1_write_cgi_PUG_v1.4.py pcba-aid411_activities.csv
import pandas as pd
import sys
def dump_cgi_xml( outfile, cid_list, AID, AID_chunk ):
'''write out a cgi xml file for fetching'''
outfile.write(
'<?xml version="1.0"?>\n'
'<!DOCTYPE PCT-Data PUBLIC "-//NCBI//NCBI PCTools/EN" "http://pubchem.ncbi.nlm.nih.gov/pug/pug.dtd">\n'
"<PCT-Data>\n"
" <PCT-Data_input>\n"
" <PCT-InputData>\n"
" <PCT-InputData_download>\n"
" <PCT-Download>\n"
" <PCT-Download_uids>\n"
" <PCT-QueryUids>\n"
" <PCT-QueryUids_ids>\n"
" <PCT-ID-List>\n"
" <PCT-ID-List_db>pccompound</PCT-ID-List_db>\n"
" <PCT-ID-List_uids>\n"
)
for cid in cid_list:
if cid != '0':
outfile.write(" <PCT-ID-List_uids_E>"+str(cid)+"</PCT-ID-List_uids_E>\n")
outfile.write(
" </PCT-ID-List_uids>\n"
" </PCT-ID-List>\n"
" </PCT-QueryUids_ids>\n"
" </PCT-QueryUids>\n"
" </PCT-Download_uids>\n"
' <PCT-Download_format value="smiles"/>\n'
' <PCT-Download_compression value="gzip"/>\n'
' <PCT-Download_use-3d value="false"/>\n'
" </PCT-Download>\n"
" </PCT-InputData_download>\n"
" </PCT-InputData>\n"
" </PCT-Data_input>\n"
"</PCT-Data>\n"
)
outfile.close()
AID_CID_list = sys.argv[1]
AID = AID_CID_list.split('-')[1].split('.')[0]
df = pd.read_csv( AID_CID_list, index_col='PUBCHEM_CID' )
cids = [ str(cid) for cid in df.index.to_list() ]
count = 0
chunk = 0
tmp_cid_list = []
for cid in cids:
tmp_cid_list.append(cid)
if count < 249999:
count += 1
else:
out_xml_file = open('./fetch_CGIs/pc_fetch_'+AID+'_'+str(chunk)+'.cgi', 'w')
dump_cgi_xml( out_xml_file, tmp_cid_list, AID, chunk )
count = 0
chunk += 1
tmp_cid_list = []
out_xml_file = open('./fetch_CGIs/pc_fetch_'+AID+'_'+str(chunk)+'.cgi', 'w')
dump_cgi_xml( out_xml_file, tmp_cid_list, AID, chunk )
| 31.666667 | 107 | 0.548684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,318 | 0.57807 |
faa1e8c5adf5fa05097413ffe672e4a2b342669b | 1,974 | py | Python | src/pymor/analyticalproblems/instationary.py | mahgadalla/pymor | ee2806b4c93748e716294c42454d611415da7b5e | [
"Unlicense"
] | 1 | 2021-07-26T12:58:50.000Z | 2021-07-26T12:58:50.000Z | src/pymor/analyticalproblems/instationary.py | mahgadalla/pymor | ee2806b4c93748e716294c42454d611415da7b5e | [
"Unlicense"
] | null | null | null | src/pymor/analyticalproblems/instationary.py | mahgadalla/pymor | ee2806b4c93748e716294c42454d611415da7b5e | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
# This file is part of the pyMOR project (http://www.pymor.org).
# Copyright 2013-2017 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from pymor.core.interfaces import ImmutableInterface
class InstationaryProblem(ImmutableInterface):
"""Instationary problem description.
This class desciribes an instationary problem of the form ::
| ∂_t u(x, t, μ) + A(u(x, t, μ), t, μ) = f(x, t, μ),
| u(x, 0, μ) = u_0(x, μ)
where A, f are given by the problem's `stationary_part` and
t is allowed to vary in the interval [0, T].
Parameters
----------
stationary_part
The stationary part of the problem.
initial_data
|Function| providing the initial values u_0.
T
The final time T.
parameter_space
|ParameterSpace| for the problem.
name
Name of the problem.
Attributes
----------
T
stationary_part
parameter_space
name
"""
with_arguments = None # with_arguments is an read-only property in the base class
_own_with_arguments = frozenset({'stationary_part', 'initial_data', 'T', 'parameter_space', 'name'})
def __init__(self, stationary_part, initial_data, T=1., parameter_space=None, name=None):
self.stationary_part = stationary_part
self.initial_data = initial_data
self.T = T
self.parameter_space = parameter_space or stationary_part.parameter_space
self.name = name or ('instationary_' + stationary_part.name)
self.with_arguments = self._own_with_arguments.union(stationary_part.with_arguments)
def with_(self, **kwargs):
arguments = {k: kwargs.pop(k, getattr(self, k)) for k in self._own_with_arguments}
arguments['stationary_part'] = arguments['stationary_part'].with_(**kwargs)
return InstationaryProblem(**arguments)
| 34.631579 | 104 | 0.66464 | 1,680 | 0.847629 | 0 | 0 | 0 | 0 | 0 | 0 | 1,140 | 0.575177 |
faa39971968922b9da43428bb86ae50cc3a8e49d | 3,183 | py | Python | pituophis/cli.py | dotcomboom/Pituophis | fbd07497a79a637f7bba4916458561cfdde9d698 | [
"BSD-2-Clause"
] | 30 | 2019-02-15T16:24:21.000Z | 2022-03-08T00:52:26.000Z | pituophis/cli.py | dotcomboom/Pituophis | fbd07497a79a637f7bba4916458561cfdde9d698 | [
"BSD-2-Clause"
] | 16 | 2019-02-12T00:54:52.000Z | 2020-10-28T16:27:35.000Z | pituophis/cli.py | dotcomboom/Pituophis | fbd07497a79a637f7bba4916458561cfdde9d698 | [
"BSD-2-Clause"
] | 2 | 2019-09-02T04:26:21.000Z | 2020-05-15T20:46:35.000Z | import importlib
import sys
import pituophis
# check if the user is running the script with the correct number of arguments
if len(sys.argv) < 2:
# if not, print the usage
print('usage: pituophis [command] cd [options]')
print('Commands:')
print(' serve [options]')
print(' fetch [url] [options]')
print('Server Options:')
print(' -H, --host=HOST\t\tAdvertised host (default: 127.0.0.1)')
print(' -p, --port=PORT\t\tPort to bind to (default: 70)')
print(' -a, --advertised-port=PORT\tPort to advertise')
print(' -d, --directory=DIR\t\tDirectory to serve (default: pub/)')
print(' -A, --alt-handler=HANDLER\tAlternate handler to use if 404 error is generated (python file with it defined as "def alt(request):")')
print(' -s, --send-period\t\tSend a period at the end of each response (default: False)')
print(' -D, --debug\t\t\tPrint requests as they are received (default: False)')
print(' -v, --version\t\t\tPrint version')
print('Fetch Options:')
print(' -o, --output=FILE\t\tFile to write to (default: stdout)')
else:
# check if the user is serving or fetching
if sys.argv[1] == 'serve':
# check for arguments
# host
host = '127.0.0.1'
if '-H' in sys.argv or '--host' in sys.argv:
host = sys.argv[sys.argv.index('-H') + 1]
# port
port = 70
if '-p' in sys.argv or '--port' in sys.argv:
port = int(sys.argv[sys.argv.index('-p') + 1])
# advertised port
advertised_port = None
if '-a' in sys.argv or '--advertised-port' in sys.argv:
advertised_port = int(sys.argv[sys.argv.index('-a') + 1])
# directory
pub_dir = 'pub/'
if '-d' in sys.argv or '--directory' in sys.argv:
pub_dir = sys.argv[sys.argv.index('-d') + 1]
# alternate handler
alt_handler = False
if '-A' in sys.argv or '--alt-handler' in sys.argv:
alt_handler = sys.argv[sys.argv.index('-A') + 1]
# get the function from the file
alt_handler = getattr(
importlib.import_module(alt_handler), 'handler')
# send period
send_period = False
if '-s' in sys.argv or '--send-period' in sys.argv:
send_period = True
# debug
debug = False
if '-D' in sys.argv or '--debug' in sys.argv:
debug = True
# start the server
pituophis.serve(host=host, port=port, advertised_port=advertised_port,
handler=pituophis.handle, pub_dir=pub_dir, alt_handler=alt_handler,
send_period=send_period, debug=debug)
elif sys.argv[1] == 'fetch':
# check for arguments
# url
url = sys.argv[2]
# output file
output = 'stdout'
if '-o' in sys.argv or '--output' in sys.argv:
output = sys.argv[sys.argv.index('-o') + 1]
# start the fetch
o = pituophis.get(url)
if output == 'stdout':
sys.stdout.buffer.write(o.binary)
else:
with open(output, 'wb') as f:
f.write(o.binary)
f.close() | 40.807692 | 145 | 0.568646 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,299 | 0.408106 |
faa43f92ed4419078ca8c87c5823af07d1f539bd | 1,711 | py | Python | test/synth_mcmc.py | chris-stock/pyglm | fff2e91b0936275f37a7860bd75f867b560f4993 | [
"MIT"
] | 1 | 2017-03-11T19:10:17.000Z | 2017-03-11T19:10:17.000Z | test/synth_mcmc.py | chris-stock/pyglm | fff2e91b0936275f37a7860bd75f867b560f4993 | [
"MIT"
] | null | null | null | test/synth_mcmc.py | chris-stock/pyglm | fff2e91b0936275f37a7860bd75f867b560f4993 | [
"MIT"
] | null | null | null | # Run as script using 'python -m test.synth'
import cPickle
import os
import scipy.io
from models.model_factory import *
from inference.gibbs import gibbs_sample
from utils.avg_dicts import average_list_of_dicts
from synth_harness import initialize_test_harness
from plotting.plot_results import plot_results
from population import Population
def run_synth_test():
""" Run a test with synthetic data and MCMC inference
"""
options, popn, data, popn_true, x_true = initialize_test_harness()
# If x0 specified, load x0 from file
x0 = None
if options.x0_file is not None:
with open(options.x0_file, 'r') as f:
print "Initializing with state from: %s" % options.x0_file
mle_x0 = cPickle.load(f)
# HACK: We're assuming x0 came from a standard GLM
mle_model = make_model('standard_glm', N=data['N'])
mle_popn = Population(mle_model)
mle_popn.set_data(data)
x0 = popn.sample()
x0 = convert_model(mle_popn, mle_model, mle_x0, popn, popn.model, x0)
# Perform inference
N_samples = 1000
x_smpls = gibbs_sample(popn, data, x0=x0, N_samples=N_samples)
# Save results
results_file = os.path.join(options.resultsDir, 'results.pkl')
print "Saving results to %s" % results_file
with open(results_file, 'w') as f:
cPickle.dump(x_smpls, f, protocol=-1)
# Plot average of last 20% of samples
smpl_frac = 0.2
plot_results(popn,
x_smpls[-1*int(smpl_frac*N_samples):],
popn_true=popn_true,
x_true=x_true,
resdir=options.resultsDir)
if __name__ == "__main__":
run_synth_test()
| 32.903846 | 81 | 0.661017 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 363 | 0.212157 |
faa4586ea3fbf6fea84324823f11f12d5b8ac80d | 1,297 | py | Python | pysh/transforms/autoimport.py | drslump/pysh | 673cdf2b5ea95dc3209cb294bb91cb2f298bb888 | [
"MIT"
] | 3 | 2018-07-09T04:39:24.000Z | 2020-11-27T05:44:56.000Z | pysh/transforms/autoimport.py | drslump/pysh | 673cdf2b5ea95dc3209cb294bb91cb2f298bb888 | [
"MIT"
] | null | null | null | pysh/transforms/autoimport.py | drslump/pysh | 673cdf2b5ea95dc3209cb294bb91cb2f298bb888 | [
"MIT"
] | 1 | 2018-08-02T21:57:11.000Z | 2018-08-02T21:57:11.000Z | """
Every name reference is swapped for a call to ``__autoimport__``, which
will check if it's part of the locals or globals, falling back to trying
an import before giving up.
"""
from importlib import import_module
from ast import NodeTransformer, copy_location, fix_missing_locations, \
AST, Call, Name, Load, Str, keyword
from typing import Any, Union, Dict
__all__ = ['__autoimport__']
class AutoImportTransformer(NodeTransformer):
def visit_Name(self, node: Name) -> Union[Name, Call]:
if not isinstance(node.ctx, Load):
return node
delegate = Call(
func=Name(id='__autoimport__', ctx=Load()),
args=[
Str(s=node.id)
],
keywords=[])
copy_location(delegate, node)
fix_missing_locations(delegate)
return delegate
def __autoimport__(name: str) -> Any:
import inspect
f_back = inspect.currentframe().f_back #type: ignore
if name in f_back.f_locals:
return f_back.f_locals[name]
if name in f_back.f_globals:
return f_back.f_globals[name]
try:
return import_module(name)
except ImportError:
pass
raise NameError(name)
def parser(node: AST) -> AST:
return AutoImportTransformer().visit(node)
| 23.160714 | 72 | 0.652274 | 447 | 0.344641 | 0 | 0 | 0 | 0 | 0 | 0 | 225 | 0.173477 |
faa460343b71b091ee5a98825ebabe0d6f30a3f0 | 4,983 | py | Python | spyre/spyre/widgets/task.py | zhong-lab/optics | 9de1942d9a128183ecb3d360b160b27126e7b8f0 | [
"BSD-2-Clause"
] | 1 | 2022-03-27T07:47:19.000Z | 2022-03-27T07:47:19.000Z | spyre/spyre/widgets/task.py | zhong-lab/optics | 9de1942d9a128183ecb3d360b160b27126e7b8f0 | [
"BSD-2-Clause"
] | null | null | null | spyre/spyre/widgets/task.py | zhong-lab/optics | 9de1942d9a128183ecb3d360b160b27126e7b8f0 | [
"BSD-2-Clause"
] | 4 | 2019-11-08T22:39:04.000Z | 2021-11-05T02:39:37.000Z | from PyQt5 import QtWidgets, QtCore, QtGui
from pyqtgraph import SignalProxy
class TaskWidget(QtWidgets.QWidget):
def __init__(self, task, rate_limit=0.01, parent=None):
super().__init__(parent=parent)
self.task = task
self.init_ui()
proxy_config = {
'signal': self.task.progressed,
'delay': 0.01,
'rateLimit': rate_limit,
'slot': self.update,
}
self.task.exception_raised.connect(lambda: self.update_run_state(state='error'))
self.task.running.connect(lambda r: self.update_run_state(state='run' if r else 'stop'))
self.proxy = SignalProxy(**proxy_config)
self.running = False
return
def init_ui(self):
task_label = QtWidgets.QLabel('<h3>{}</h3>'.format(self.task.name))
control_layout = QtWidgets.QHBoxLayout()
play_icon = self.style().standardIcon(QtWidgets.QStyle.SP_MediaPlay)
stop_icon = self.style().standardIcon(QtWidgets.QStyle.SP_MediaStop)
start_button = QtWidgets.QToolButton()
stop_button = QtWidgets.QToolButton()
start_button.clicked.connect(lambda: self.run_state(state='run'))
stop_button.clicked.connect(lambda: self.run_state(state='stop'))
start_button.setIcon(play_icon)
stop_button.setIcon(stop_icon)
control_layout.addWidget(start_button)
control_layout.addWidget(stop_button)
top_layout = QtWidgets.QHBoxLayout()
top_layout.addWidget(task_label)
top_layout.addLayout(control_layout)
self.progress_bar = QtWidgets.QProgressBar()
self.progress_bar.setTextVisible(True)
self.time_label = QtWidgets.QLabel()
self.state_label = QtWidgets.QLabel()
bottom_layout = QtWidgets.QHBoxLayout()
bottom_layout.addWidget(self.time_label)
bottom_layout.addStretch()
bottom_layout.addWidget(self.state_label)
outer_layout = QtWidgets.QVBoxLayout()
outer_layout.addLayout(top_layout)
outer_layout.addWidget(self.progress_bar)
outer_layout.addLayout(bottom_layout)
self.setLayout(outer_layout)
return
@QtCore.pyqtSlot(object)
def update(self, args):
if not self.running:
return
depth, n, total, elapsed = args
if total is not None:
iter_word = 'iterations' if total > 1 else 'iteration'
pct = 100 * n / total
if n:
tot_time = (elapsed / n) * total
rem_time = tot_time - elapsed
else:
rem_time = None
progfmt = "{completed} of {total} {iter_word} complete ({pct:1.1f}%)"
self.progress_bar.setFormat(progfmt.format(**{
'completed': n,
'total': total,
'iter_word': iter_word,
'pct': pct,
}))
self.progress_bar.setRange(0, 100)
self.progress_bar.setValue(pct)
timefmt = "{elapsed:s} elapsed" if rem_time is None else "{elapsed:s} elapsed; {rem:s} remaining"
self.time_label.setText(timefmt.format(**{
'elapsed': readable_seconds(elapsed),
'rem': readable_seconds(rem_time) if rem_time is not None else None,
}))
else:
iter_word = 'iterations' if n > 1 else 'iteration'
progfmt = "{completed} {iter_word} complete"
self.progress_bar.setRange(0, 0)
self.progress_bar.setValue(0)
self.progress_bar.setFormat(progfmt.format(**{
'completed': n,
'iter_word': iter_word,
}))
timefmt = "{elapsed:s} elapsed"
self.time_label.setText(timefmt.format(**{
'elapsed': readable_seconds(elapsed),
}))
return
def run_state(self, state):
if state == 'run':
self.task()
self.running = True
if state == 'error':
pass
if state == 'stop':
self.task.stop()
self.running = False
self.update_run_state(state)
return
def update_run_state(self, state):
if state == 'run':
self.state_label.setText('Running')
if state == 'error':
self.state_label.setText('Exception encountered')
if state == 'stop':
self.state_label.setText('Stopped')
self.progress_bar.setRange(0, 100)
self.progress_bar.setValue(0)
return
def readable_seconds(seconds):
seconds = int(seconds)
if not seconds:
return '0 s'
hours = seconds // 3600
mins = (seconds % 3600) // 60
secs = seconds % 60
htext = '{} h'.format(hours) if hours else ''
mtext = '{} m'.format(mins) if mins else ''
stext = '{} s'.format(secs) if secs else ''
readable = ' '.join(v for v in (htext, mtext, stext) if v)
return readable
| 35.848921 | 109 | 0.589003 | 4,489 | 0.900863 | 0 | 0 | 1,686 | 0.33835 | 0 | 0 | 483 | 0.09693 |
faa59189fcb0db287ca016ded409105514d4c263 | 15,742 | py | Python | tests/test_fluids_bw92.py | trhallam/digirock | 05b1199d741a384345a4930605be97369c9ec270 | [
"MIT"
] | null | null | null | tests/test_fluids_bw92.py | trhallam/digirock | 05b1199d741a384345a4930605be97369c9ec270 | [
"MIT"
] | 2 | 2022-02-28T08:51:53.000Z | 2022-02-28T13:24:33.000Z | tests/test_fluids_bw92.py | trhallam/digirock | 05b1199d741a384345a4930605be97369c9ec270 | [
"MIT"
] | null | null | null | """Test functions for pem.fluid.bw92 module
"""
import pytest
from pytest import approx
from _pytest.fixtures import SubRequest
from hypothesis import given, settings, strategies as st
import numpy as np
import digirock.fluids.bw92 as bw92
from .strategies import n_varshp_arrays
@pytest.fixture(scope="module")
def tol():
return {
"rel": 0.05, # relative testing tolerance in percent
"abs": 0.00001, # absolute testing tolerance
}
def test_GAS_R():
assert bw92.GAS_R == 8.31441
# p (MPa), t (degC)
@pytest.mark.parametrize(
"args,ans", (((10 * 1e6, 273.15), 0.00045422), ((50 * 1e6, 373.15), 0.00010747))
)
@given(data=st.data())
def test_gas_vmol(args, ans, data, tol):
(test_p, test_t), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_vmol(test_t, test_p)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), t (degC), m (methane molecular weight)
@pytest.mark.parametrize(
"args,ans",
(
((10 * 1e6, 273.15, 16.04), 35313.5783218),
((50 * 1e6, 373.15, 16.04), 149248.08786351),
),
)
@given(data=st.data())
def test_gas_density(args, ans, data, tol):
(test_p, test_t, test_m), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_density(test_m, test_t, test_p)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), t (degC)
@pytest.mark.parametrize(
"args,ans",
(((np.r_[10 * 1e6, 50 * 1e6], 273.15), 2e-08),),
)
def test_gas_isotherm_comp(args, ans, tol):
v1, v2 = bw92.gas_vmol(args[1], args[0])
assert bw92.gas_isotherm_comp(v1, v2, args[0][0], args[0][1]) == approx(ans)
# t (degC), m (methane molecular weight),
@pytest.mark.parametrize(
"args,ans",
(
((273.15, 16.04), 16.8278695),
((373.15, 16.04), 18.30335126),
),
)
@given(data=st.data())
def test_gas_isotherm_vp(args, ans, data, tol):
(test_t, test_m), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_isotherm_vp(test_m, test_t)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# G spec grav
@pytest.mark.parametrize(
"args,ans",
(((0.56,), 4.665312),), # methane
)
@given(data=st.data())
def test_gas_pseudocrit_pres(args, ans, data, tol):
(test_G,), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_pseudocrit_pres(test_G)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# fmt: off
# p (MPa), G spec grav
@pytest.mark.parametrize(
"args,ans",
(
((10 * 1e6, 0.56,), 2143479.36429546,),
((50 * 1e6, 0.56), 10717396.82147732),
),
)
# fmt: on
@given(data=st.data())
def test_gas_pseudored_pres(args, ans, data, tol):
(
test_p,
test_G,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_pseudored_pres(test_p, test_G)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# G spec grav
@pytest.mark.parametrize(
"args,ans",
(((0.56,), 190.34),), # methane
)
@given(data=st.data())
def test_gas_pseudocrit_temp(args, ans, data, tol):
(test_G,), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_pseudocrit_temp(test_G)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# t (degC), G (spec grav)
# fmt: off
@pytest.mark.parametrize(
"args,ans",
(
((273.15, 0.56,), 2.87012714,),
((373.15, 0.56), 3.39550278),
),
)
# fmt: on
@given(data=st.data())
def test_gas_pseudored_temp(args, ans, data, tol):
(
test_t,
test_G,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_pseudored_temp(test_t, test_G)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), t (degC), G (spec grav)
@pytest.mark.parametrize(
"args,ans",
(
((10 * 1e6, 273.15, 0.56), 0.5289487894),
((50 * 1e6, 373.15, 0.56), 0.46664469),
),
)
@given(data=st.data())
def test_gas_oga_density(args, ans, data, tol):
(
test_p,
test_t,
test_G,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_oga_density(test_t, test_p, test_G)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
def test_gas_oga_density_warning():
with pytest.warns(UserWarning):
bw92.gas_oga_density(4.5, 4.5, 1)
# p (MPa), t (degC), G (spec grav)
@pytest.mark.parametrize(
"args,ans",
(
((10 * 1e6, 273.15, 0.56), 673174274.6197122),
((50 * 1e6, 373.15, 0.56), 1.87375111e10),
),
)
@given(data=st.data())
def test_gas_adiabatic_bulkmod(args, ans, data, tol):
(
test_p,
test_t,
test_G,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_adiabatic_bulkmod(test_t, test_p, test_G)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), t (degC), G (spec grav)
@pytest.mark.parametrize(
"args,ans",
(
((10 * 1e6, 273.15, 0.56), 0.0204339351378),
((50 * 1e6, 373.15, 0.56), 0.03011878),
),
)
@given(data=st.data())
def test_gas_adiabatic_viscosity(args, ans, data, tol):
(
test_p,
test_t,
test_G,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.gas_adiabatic_viscosity(test_t, test_p / 1e6, test_G)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), rho (g/cc)
@pytest.mark.parametrize(
"args,ans",
(
((10 * 1e6, 0.8), 0.8068623025),
((10 * 1e6, 0.9), 0.90521056),
((50 * 1e6, 0.8), 0.83179781),
((50 * 1e6, 0.9), 0.92477031),
),
)
@given(data=st.data())
def test_oil_isothermal_density(args, ans, data, tol):
(
test_p,
test_rho,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_isothermal_density(test_rho, test_p / 1e6)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), rho (g/cc), t (degC)
@pytest.mark.parametrize(
"args,ans",
(
((10 * 1e6, 0.8, 273.15), 0.63475419),
((10 * 1e6, 0.9, 273.15), 0.71212423),
((50 * 1e6, 0.8, 273.15), 0.65437082),
((50 * 1e6, 0.9, 273.15), 0.72751178),
((10 * 1e6, 0.8, 373.15), 0.57827437),
((10 * 1e6, 0.9, 373.15), 0.6487601),
((50 * 1e6, 0.8, 373.15), 0.59614553),
((50 * 1e6, 0.9, 373.15), 0.65437082),
),
)
@given(data=st.data())
def test_oil_density(args, ans, data, tol):
(
test_p,
test_rho,
test_t
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_density(test_rho, test_p / 1e6, test_t)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho (g/cc), G (spec_grav), rg (L/L), t (degC)
@pytest.mark.parametrize(
"args,ans",
(
((0.8, 0.56, 120, 273.15), 1.57823582),
)
)
@given(data=st.data())
def test_oil_fvf(args, ans, data, tol):
(
test_rho,
test_G,
test_rg,
test_t,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_fvf(test_rho, test_G, test_rg, test_t)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho (g/cc), G (spec_grav), p (MPa), t (degC)
@pytest.mark.parametrize(
"args,ans",
(((0.8, 0.6, 50, 100), 415.709664),)
)
@given(data=st.data())
def test_oil_rg_rho(args, ans, data, tol):
(
test_rho,
test_G,
test_p,
test_t,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_rg(test_rho, test_G, test_p, test_t, mode="rho")
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho (g/cc), G (spec_grav), p (MPa), t (degC)
@pytest.mark.parametrize(
"args,ans",
(((45, 0.6, 50, 100), 415.709664),)
)
@given(data=st.data())
def test_oil_rg_api(args, ans, data, tol):
(
test_rho,
test_G,
test_p,
test_t,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_rg(test_rho, test_G, test_p, test_t, mode="api")
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
def test_oil_rg_bad_mode():
with pytest.raises(ValueError):
assert bw92.oil_rg(1, 1, 1, 1, mode="bad_mode")
# rho0, g, rg, b0
@pytest.mark.parametrize(
"args,ans",
(
((0.8, 0.6, 50, 1.1), 0.76),
((0.9, 0.6, 70, 1.1), 0.864),
((0.9, 0.6, 70, 0.0), 0.0)
),
)
@given(data=st.data())
def test_oil_rho_sat(args, ans, data, tol):
(
test_rho0,
test_g,
test_rg,
test_b0,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_rho_sat(test_rho0, test_g, test_rg, test_b0)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho0, rg, b0
@pytest.mark.parametrize(
"args,ans",
(
((0.8, 50, 1.1), 0.69264069264),
((0.9, 70, 1.1), 0.764655904),
((0.9, 70, 0.0), 0.0)),
)
@given(data=st.data())
def test_oil_rho_pseudo(args, ans, data, tol):
(
test_rho0,
test_rg,
test_b0,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_rho_pseudo(test_rho0, test_rg, test_b0)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho0,p,t,g,rg,b0
@pytest.mark.parametrize(
"args,ans",
(
((0.8, 50, 100, 0.6, 120), 1101.21832685),
),
)
@given(data=st.data())
def test_oil_velocity_nobo(args, ans, data, tol):
(
test_rho0,
test_p,
test_t,
test_g,
test_rg,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_velocity(test_rho0, test_p, test_t, test_g, test_rg, b0=None)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho0,p,t,g,rg,b0
@pytest.mark.parametrize(
"args,ans",
(
((0.8, 50, 100, 0.6, 120), 1206.74469093),
),
)
@given(data=st.data())
def test_oil_velocity_bo(args, ans, data, tol):
(
test_rho0,
test_p,
test_t,
test_g,
test_rg,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_velocity(test_rho0, test_p, test_t, test_g, test_rg, b0=np.r_[1.1])
assert np.allclose(test, ans, rtol=tol["rel"])
# assert np.squeeze(test).shape == result_shape
# rho, vp
@pytest.mark.parametrize(
"args,ans",
(
((0.8, 1200), 1.152),
),
)
@given(data=st.data())
def test_oil_bulkmod(args, ans, data, tol):
(
test_rho,
test_vp,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.oil_bulkmod(test_rho, test_vp)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), t (degC)
@pytest.mark.parametrize(
"args,ans", (((10 * 1e6, 273.15), 1063.23709), ((50 * 1e6, 373.15), 847.72401465))
)
@given(data=st.data())
@settings(deadline=None) # due to njit
def test_wat_velocity_pure(args, ans, data, tol):
(
test_p,
test_t,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.wat_velocity_pure(test_t, test_p / 1e6)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), t (degC), sal (ppm)
@pytest.mark.parametrize(
"args,ans", (((10 * 1e6, 273.15, 32000), 1095.70072), ((50 * 1e6, 373.15, 150000), 980.48475247))
)
@given(data=st.data())
@settings(deadline=None) # due to njit
def test_wat_velocity_brine(args, ans, data, tol):
(
test_p,
test_t,
test_sal
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.wat_velocity_brine(test_t, test_p / 1e6, test_sal / 1e6)
assert np.allclose(test, ans, rtol=tol["rel"])
# p (MPa), t (degC)
@pytest.mark.parametrize(
"args,ans", (((10 * 1e6, 273.15), 0.77622433), ((50 * 1e6, 373.15), 0.66363597))
)
@given(data=st.data())
def test_wat_density_pure(args, ans, data, tol):
(
test_p,
test_t,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.wat_density_pure(test_t, test_p / 1e6)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# p (MPa), t (degC), sal (ppm)
@pytest.mark.parametrize(
"args,ans", (((10 * 1e6, 273.15, 32000), 0.80405636), ((50 * 1e6, 373.15, 150000), 0.79606398))
)
@given(data=st.data())
def test_wat_density_brine(args, ans, data, tol):
(
test_p,
test_t,
test_sal
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.wat_density_brine(test_t, test_p / 1e6, test_sal / 1e6)
assert np.allclose(test, ans, rtol=tol["rel"])
# p (MPa), t (degC), sal (ppm)
@pytest.mark.parametrize(
"args,ans", (((10 * 1e6, 273.15, 32000), None), ((50 * 1e6, 373.15, 150000), None))
)
def test_wat_salinity_brine(args, ans):
(
test_p,
test_t,
test_sal
) = args
test_den = bw92.wat_density_brine(test_t, test_p / 1e6, test_sal / 1e6)
test = bw92.wat_salinity_brine(test_t, test_p / 1e6, test_den) * 1e6
assert test == approx(test_sal, abs=250)
# rho (g/cc), v (m/s)
@pytest.mark.parametrize(
"args,ans", (((1.0, 1300), 1.69), ((1.1, 1450), 2.31275))
)
@given(data=st.data())
def test_wat_bulkmod(args, ans, data, tol):
(
test_rho,
test_vp,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.wat_bulkmod(test_rho, test_vp)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho (g/cc), v (vfrac), ...
@pytest.mark.parametrize(
"args,ans",
(
((2, 0.5, 2, 0.5), 2),
((2, 0.5, 1, 0.5), 1.5),
((2, 0.3, 2, 0.3, 2,), 2),
((2, 0.5, 1, 0.5, 2,), 1.5),
)
)
@given(data=st.data())
def test_mixed_density(args, ans, data, tol):
args, result_shape = data.draw(n_varshp_arrays(args))
test = bw92.mixed_density(*args)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# rho (g/cc), v (m/s)
@pytest.mark.parametrize(
"args,ans", (((1.0, 1300), 1.69), ((1.1, 1450), 2.31275))
)
@given(data=st.data())
def test_bulkmod(args, ans, data, tol):
(
test_rho,
test_vp,
), result_shape = data.draw(n_varshp_arrays(args))
test = bw92.bulkmod(test_rho, test_vp)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
# k (GPa), v (vfrac), ...
@pytest.mark.parametrize(
"args,ans",
(
((2, 0.5, 2, 0.5), 2),
((2, 0.5, 1, 0.5), 1.3333),
((2, 0.3, 2, 0.3, 2,), 2),
((2, 0.5, 1, 0.5, 2,), 1.3333),
)
)
@given(data=st.data())
def test_woods_bulkmod(args, ans, data, tol):
args, result_shape = data.draw(n_varshp_arrays(args))
test = bw92.woods_bulkmod(*args)
assert np.allclose(test, ans, rtol=tol["rel"])
assert test.shape == result_shape
| 28.363964 | 102 | 0.581629 | 0 | 0 | 0 | 0 | 14,100 | 0.895693 | 0 | 0 | 1,556 | 0.098844 |
faa609968ff5a4b42dcd92861e9a5c5ca3e91929 | 3,103 | py | Python | apps/site/api/views/abstract_views.py | LocalGround/localground | aa5a956afe7a84a7763a3b23d62a9fd925831cd7 | [
"Apache-2.0"
] | 9 | 2015-05-29T22:22:20.000Z | 2022-02-01T20:39:00.000Z | apps/site/api/views/abstract_views.py | LocalGround/localground | aa5a956afe7a84a7763a3b23d62a9fd925831cd7 | [
"Apache-2.0"
] | 143 | 2015-01-22T15:03:40.000Z | 2020-06-27T01:55:29.000Z | apps/site/api/views/abstract_views.py | LocalGround/localground | aa5a956afe7a84a7763a3b23d62a9fd925831cd7 | [
"Apache-2.0"
] | 5 | 2015-03-16T20:51:49.000Z | 2017-02-07T20:48:49.000Z | from localground.apps.lib.helpers import get_timestamp_no_milliseconds
from localground.apps.site.api import filters
from localground.apps.site import models
from rest_framework import generics, status, exceptions
from localground.apps.site.api.serializers.user_profile_serializer import \
UserProfileSerializer
from django.core.exceptions import ValidationError
from rest_framework.exceptions import APIException
from localground.apps.site.api.permissions import \
CheckProjectPermissions, CheckUserCanPostToProject
class QueryableListCreateAPIView(generics.ListCreateAPIView):
def metadata(self, request):
# extend the existing metadata method in the parent class by adding a
# list of available filters
from localground.apps.lib.helpers import QueryParser
from django.utils.datastructures import SortedDict
ret = super(QueryableListCreateAPIView, self).metadata(request)
ret = SortedDict(ret)
try:
query = QueryParser(self.model, request.GET.get('query'))
ret['filters'] = query.to_dict_list()
except Exception:
pass
return ret
class QueryableListAPIView(generics.ListAPIView):
def metadata(self, request):
# extend the existing metadata method in the parent class by adding a
# list of available filters
from localground.apps.lib.helpers import QueryParser
from django.utils.datastructures import SortedDict
ret = super(QueryableListAPIView, self).metadata(request)
ret = SortedDict(ret)
try:
query = QueryParser(self.model, request.GET.get('query'))
ret['filters'] = query.to_dict_list()
except Exception:
pass
return ret
class QueryableRetrieveUpdateDestroyView(
generics.RetrieveUpdateDestroyAPIView):
def metadata(self, request):
# extend the existing metadata method in the parent class by adding a
# list of available filters
from localground.apps.lib.helpers import QueryParser
from django.utils.datastructures import SortedDict
ret = super(QueryableListCreateAPIView, self).metadata(request)
ret = SortedDict(ret)
try:
query = QueryParser(self.model, request.GET.get('query'))
ret['filters'] = query.to_dict_list()
except Exception:
pass
return ret
class MediaList(QueryableListCreateAPIView):
filter_backends = (filters.SQLFilterBackend, filters.RequiredProjectFilter)
permission_classes = (CheckProjectPermissions, CheckUserCanPostToProject)
ext_whitelist = []
def get_queryset(self):
if self.request.user.is_authenticated():
return self.model.objects.get_objects(self.request.user)
else:
return self.model.objects.get_objects_public(
access_key=self.request.GET.get('access_key')
)
class MediaInstance(generics.RetrieveUpdateDestroyAPIView):
def get_queryset(self):
return self.model.objects.select_related('owner').all()
| 36.081395 | 79 | 0.705769 | 2,563 | 0.825975 | 0 | 0 | 0 | 0 | 0 | 0 | 355 | 0.114405 |
faa6a4c65241a389b815a30946e018a7f9d065de | 1,859 | py | Python | mymoney/apps/banktransactiontags/tests/test_models.py | ychab/mymoney | 9ee665d40648fd0b95f6e90d82ccf2bfc791e8af | [
"BSD-3-Clause"
] | 67 | 2015-12-22T10:27:34.000Z | 2022-03-10T21:33:18.000Z | mymoney/apps/banktransactiontags/tests/test_models.py | clebercarmo/mymoney | 9ee665d40648fd0b95f6e90d82ccf2bfc791e8af | [
"BSD-3-Clause"
] | null | null | null | mymoney/apps/banktransactiontags/tests/test_models.py | clebercarmo/mymoney | 9ee665d40648fd0b95f6e90d82ccf2bfc791e8af | [
"BSD-3-Clause"
] | 35 | 2016-08-07T11:43:51.000Z | 2022-02-21T21:20:06.000Z | import unittest
from mymoney.apps.bankaccounts.factories import BankAccountFactory
from mymoney.core.factories import UserFactory
from ..factories import BankTransactionTagFactory
from ..models import BankTransactionTag
class ManagerTestCase(unittest.TestCase):
def setUp(self):
self.owner = UserFactory(username='owner')
self.not_owner = UserFactory(username='not_owner')
self.banktransactiontags = [
BankTransactionTagFactory(owner=self.owner),
BankTransactionTagFactory(owner=self.not_owner),
]
def tearDown(self):
UserFactory._meta.model.objects.all().delete()
BankTransactionTagFactory._meta.model.objects.all().delete()
def test_get_user_tags_without_bankaccount(self):
tags = BankTransactionTag.objects.get_user_tags_queryset(self.owner)
self.assertListEqual(
[self.banktransactiontags[0].pk],
sorted([tag.pk for tag in tags])
)
def test_get_user_tags_with_bankaccount(self):
superowner = UserFactory(username='superowner', user_permissions='admin')
banktransactiontag = BankTransactionTagFactory(owner=superowner)
BankAccountFactory(owners=[self.owner, superowner])
tags = BankTransactionTag.objects.get_user_tags_queryset(self.owner)
self.assertListEqual(
[
self.banktransactiontags[0].pk,
banktransactiontag.pk,
],
sorted([tag.pk for tag in tags])
)
class RelationshipTestCase(unittest.TestCase):
def test_delete_owner(self):
owner = UserFactory(username='owner')
banktransactiontag = BankTransactionTagFactory(owner=owner)
owner.delete()
with self.assertRaises(BankTransactionTag.DoesNotExist):
banktransactiontag.refresh_from_db()
| 32.051724 | 81 | 0.692846 | 1,631 | 0.877353 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.023669 |
faa7367cd6852d4396d5b354611f83726f61a9ab | 690 | py | Python | users/migrations/0017_auto_20190828_1311.py | dhanupandey12/Blog | fcd274b7249c255786b46cf81d6e949a903e9a53 | [
"MIT"
] | null | null | null | users/migrations/0017_auto_20190828_1311.py | dhanupandey12/Blog | fcd274b7249c255786b46cf81d6e949a903e9a53 | [
"MIT"
] | null | null | null | users/migrations/0017_auto_20190828_1311.py | dhanupandey12/Blog | fcd274b7249c255786b46cf81d6e949a903e9a53 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.5 on 2019-08-28 07:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0016_friends'),
]
operations = [
migrations.AlterModelOptions(
name='friends',
options={'verbose_name': 'Friend List', 'verbose_name_plural': 'Friend List'},
),
migrations.AlterModelOptions(
name='messagedata',
options={'verbose_name': 'Messages', 'verbose_name_plural': 'Messages'},
),
migrations.RenameField(
model_name='friends',
old_name='friends',
new_name='friendList',
),
]
| 25.555556 | 90 | 0.575362 | 605 | 0.876812 | 0 | 0 | 0 | 0 | 0 | 0 | 236 | 0.342029 |
faaa7770b0b932ab885702144bdccdde0f72ae42 | 819 | py | Python | NLP/Platsbanken/CustomStopwords.py | lambda-snail/PythonMix | f2308a9a9cc1caa120b45de15dfa58b3c807d498 | [
"MIT"
] | null | null | null | NLP/Platsbanken/CustomStopwords.py | lambda-snail/PythonMix | f2308a9a9cc1caa120b45de15dfa58b3c807d498 | [
"MIT"
] | null | null | null | NLP/Platsbanken/CustomStopwords.py | lambda-snail/PythonMix | f2308a9a9cc1caa120b45de15dfa58b3c807d498 | [
"MIT"
] | null | null | null | """ A list of stopwords to filter out, in addition to those that are already being filtered by the built-in toolkit. """
CustomStopwords = ['alltid', 'fler', 'ta', 'tar', 'sker', 'redan', 'who', 'what', 'gilla', 'big', 'something','fler',
'cv', 'snart', 'minst', 'kunna', '000', 'hr-plus', 'enligt', 'is/it', 'vill', 'samt',
'tjänsten', 'kommer', 'hos', 'goda', 'person', 'tror', 'skapa', 'ge', 'ger', 'sitta', 'sitter', 'sitt',
'även', 'del', 'ansökan', 'söker', 'både', 'arbeta', 'bland', 'annat', 'års', 'göra', 'gör', 'rätt',
'många']
Punctuation = ['”', '·', '’', "''", '--', '-', '–']
CompanyNames = ['visma', 'tietoevry', 'columbus', 'barkfors', '//www.instagram.com/columbussverige/']
Technologies = ['http', 'https']
| 58.5 | 123 | 0.5116 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 555 | 0.663876 |
faaaa6a2ed4e84a8136fbdc2d54d77b3af907a93 | 5,612 | py | Python | root_gnn/trainer.py | Calvin-Qiu/TopReco | e9f7f0067530bc9140cd5ca51a134882fe259dbd | [
"Apache-2.0"
] | null | null | null | root_gnn/trainer.py | Calvin-Qiu/TopReco | e9f7f0067530bc9140cd5ca51a134882fe259dbd | [
"Apache-2.0"
] | null | null | null | root_gnn/trainer.py | Calvin-Qiu/TopReco | e9f7f0067530bc9140cd5ca51a134882fe259dbd | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
from tensorflow.compat.v1 import logging
logging.set_verbosity("INFO")
logging.info("TF Version:{}".format(tf.__version__))
try:
import horovod.tensorflow as hvd
no_horovod = False
except ModuleNotFoundError:
logging.warning("No horvod module, cannot perform distributed training")
no_horovod = True
import os
import six
from types import SimpleNamespace
import pprint
import time
import functools
import numpy as np
from tensorflow.python.profiler import profiler_v2 as profiler
from graph_nets import utils_tf
from graph_nets import utils_np
import sonnet as snt
from root_gnn.utils import load_yaml
from root_gnn.src.datasets import graph
from root_gnn import model as all_models
from root_gnn import losses
verbosities = ['DEBUG','ERROR', "FATAL", "INFO", "WARN"]
printer = pprint.PrettyPrinter(indent=2)
def read_dataset(filenames):
"""
Read dataset...
"""
AUTO = tf.data.experimental.AUTOTUNE
tr_filenames = tf.io.gfile.glob(filenames)
n_files = len(tr_filenames)
dataset = tf.data.TFRecordDataset(tr_filenames)
dataset = dataset.map(graph.parse_tfrec_function, num_parallel_calls=AUTO)
return dataset, tr_filenames
def loop_dataset(datasets, batch_size):
if batch_size > 0:
in_list = []
target_list = []
for dataset in datasets:
inputs_tr, targets_tr = dataset
in_list.append(inputs_tr)
target_list.append(targets_tr)
if len(in_list) == batch_size:
inputs_tr = utils_tf.concat(in_list, axis=0)
targets_tr = utils_tf.concat(target_list, axis=0)
yield (inputs_tr, targets_tr)
else:
for dataset in datasets:
yield dataset
class TrainerBase(object):
def __init__(self, input_dir, output_dir, lr,
batch_size, num_epochs,
num_iters,
decay_lr=True, # if to use decay learning rate...
decay_lr_start_epoch=10,
patterns='*', distributed=False, verbose="INFO", *args, **kwargs):
self.model = None
self.loss_fcn = None
self.num_iters = num_iters
# datasets
self.input_dir = input_dir
self.output_dir = output_dir
# create optimizer
self.init_lr = lr
self.lr = tf.Variable(lr, trainable=False, name='lr', dtype=tf.float32)
self.optimizer = snt.optimizers.Adam(learning_rate=self.lr)
self.num_epochs = tf.constant(num_epochs, dtype=tf.int32)
self.decay_lr_start_epoch = tf.constant(decay_lr_start_epoch, dtype=tf.int32)
self.decay_lr = decay_lr # if use decay lr
# perform distributed training
self.distributed = distributed
# calcualte metrics to be recorded
self.metric_dict = {}
def setup_training_loop(self, model, loss_fcn):
input_signature = self.input_signature()
def update_step(inputs, targets):
print("Tracing update_step")
with tf.GradientTape() as tape:
output_ops = model(inputs, self.num_iters)
loss_ops_tr = loss_fcn(targets, output_ops)
loss_op_tr = tf.math.reduce_sum(loss_ops_tr) / tf.constant(self.num_iters, dtype=tf.float32)
gradients = tape.gradient(loss_op_tr, model.trainable_variables)
self.optimizer.apply(gradients, model.trainable_variables)
return loss_op_tr
self.training_step = tf.function(update_step, input_signature=input_signature)
def update_step(self, model, loss_fcn):
self.setup_training_loop()
self.train_one_epoch()
self.ckpt_manager.save()
self.after_train_one_epoch()
def eval(self, model):
raise NotImplementedError
def after_train_one_epoch(self):
pass
def validate_one_epoch(self):
for data in loop_dataset(self.data_val):
inputs, targets = data
outputs = self.apply(inputs)
if len(outputs) > 1:
outputs = outputs[-1]
self.update_metrics(targets, outputs)
def load_training_data(self, filenames):
self.data_train, _ = read_dataset(filenames)
self.ngraphs_train = sum([1 for _ in self.data_train])
def load_validating_data(self, filenames):
self.data_val, _ = read_dataset(filenames)
self.ngraphs_val = sum([1 for _ in self.data_val])
def load_testing_data(self, filenames):
self.data_test, _ = read_dataset(filenames)
self.ngraphs_test = sum([1 for _ in self.data_test])
def optimizer(self, lr):
self.optimizer = snt.optimizers.Adam(lr)
def input_signature(self):
with_batch_dim = False
input_list = []
target_list = []
for dd in self.data_train.take(self.train_batch_size).as_numpy_iterator():
input_list.append(dd[0])
target_list.append(dd[1])
inputs = utils_tf.concat(input_list, axis=0)
targets = utils_tf.concat(target_list, axis=0)
input_signature = (
graph.specs_from_graphs_tuple(inputs, with_batch_dim),
graph.specs_from_graphs_tuple(targets, with_batch_dim),
)
return input_signature
def train_one_epoch(self):
num_batches = 0
total_loss = 0
for inputs in loop_dataset(self.data_train):
inputs_tr, targets_tr = inputs
total_loss += self.training_step(inputs_tr, targets_tr).numpy()
num_batches += 1
return total_loss, num_batches
| 31.886364 | 108 | 0.65556 | 3,848 | 0.685674 | 551 | 0.098182 | 0 | 0 | 0 | 0 | 317 | 0.056486 |
faaad1e0a17c9ccfde15b59721ef3b9545cff063 | 871 | py | Python | tests/test_vera.py | j-faria/vera | 96cbdb61c98c3527416611155b29a03a2bc66b15 | [
"MIT"
] | null | null | null | tests/test_vera.py | j-faria/vera | 96cbdb61c98c3527416611155b29a03a2bc66b15 | [
"MIT"
] | null | null | null | tests/test_vera.py | j-faria/vera | 96cbdb61c98c3527416611155b29a03a2bc66b15 | [
"MIT"
] | null | null | null | # import pytest
def test_imports():
import vera
from vera import RV
def test_dace():
from vera.query_dace import get_observations
_ = get_observations('HD10180', verbose=False)
def test_read_rdb():
from vera import RV
from os.path import dirname, join
here = dirname(__file__)
s = RV(join(here, 'data_file.rdb'), star='dummy', sigmaclip=False)
print(s)
def test_DACE():
from vera import DACE
s = DACE.HD10180
print(s)
def test_KOBE(capsys):
from vera import KOBE
# not in target list
_ = KOBE.HD100
cap = capsys.readouterr()
assert cap.out == 'Cannot find "HD100" in KOBE target list.\n'
# no access to data
s = KOBE.KOBE_001
assert s is None
def test_plot():
import matplotlib.pyplot as plt
from vera import DACE
s = DACE.HD10180
s.plot()
plt.close('all')
| 18.531915 | 70 | 0.652124 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 134 | 0.153846 |
faab2433ad95cb7f0eab338db2a54fe92593e98a | 11,664 | py | Python | UMDPythonhangtestudo.py | FourplayHangman/Hangman-Kules | 47f3f0deb8f1ee6f62c17807c014ba3865e4bcf5 | [
"Apache-2.0"
] | null | null | null | UMDPythonhangtestudo.py | FourplayHangman/Hangman-Kules | 47f3f0deb8f1ee6f62c17807c014ba3865e4bcf5 | [
"Apache-2.0"
] | null | null | null | UMDPythonhangtestudo.py | FourplayHangman/Hangman-Kules | 47f3f0deb8f1ee6f62c17807c014ba3865e4bcf5 | [
"Apache-2.0"
] | 1 | 2017-11-27T01:41:10.000Z | 2017-11-27T01:41:10.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Project: Hangman
File: hangman.py
Author: Korvin F. Ezüst
Created: 2017-11-24
IDE: PyCharm Community Edition
Synopsis:
hangman.py [ARGUMENT]
Description:
A simple hangman game that runs in the command line.
To play the game, figure out a proverb by guessing
letters. If you guess a letter that's not in the
proverb and you've already guessed it, you get
a penalty. The game ends when you guess all letters
correctly or when the hangman is finished.
Optional arguments
-h, --help
show this docstring and exit
Notes:
The proverbs come from a text file in the resources folder.
The proverb file's first line contains an alphabet
including all the single characters the proverbs have
but excluding punctuation marks and characters not used
in non-contracted words like quotation marks or apostrophes.
For example for English it's abcdefghijklmnopqrstuvwxyz
and for Hungarian it's aábcdeéfghiíjklmnoóöőpqrstuúüűvwxyz
Each proverb is in a new line and there are no blank
lines in the file. Blank lines would cause the game
not to work properly.
The file ends with the END_OF_FILE string, after that
everything's ignored.
Exit codes:
0: Program exited without errors
1: one or modules couldn't be loaded
2: incorrect argument passed in command line
"""
import os
import random
import sys
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from lib import get_ui_strings
__author__ = "Korvin F. Ezüst"
__copyright__ = "Copyright (c) 2017., Korvin F. Ezüst"
__license__ = "Apache 2.0"
__version__ = "1.1"
__email__ = "dev@korvin.eu"
__status__ = "Development"
# TODO: test on Windows
def get_proverb(filename):
"""
This function reads a random line from a given file.
:param filename: absolute or relative path to a file
:type filename: str
:return: a proverb
:rtype: str
"""
_ = 0 # initialize here
with open(filename) as f:
for _, line in enumerate(f):
if "END_OF_FILE" in line:
_ -= 1
break
# starts with index 1 because the first (index 0) line contains
# the alphabet with all the letters used in proverbs in the file
x = random.randint(1, _)
with open(filename) as f:
for _, line in enumerate(f):
if _ == x:
return line[:-1]
def get_alphabet(filename):
"""
This function returns the alphabet, all the letters
used in the proverbs in the file. The alphabet is
the first line of the file.
:param filename: the proverbs file
:type filename: str
:return: uppercase alphabet
:rtype: str
"""
with open(filename) as f:
return f.readline().strip().upper()
def draw_hangman(x):
"""
Creates a simple ASCII art of a hangman step by step from 0 to 10,
then returns it as a string.
:param x: current step
:type x: int
:return: simple ASCII art
:rtype: str
"""
if x == 0:
img = "\n"
img += " " + "—" * 15 + "\n"
img += " |" + " " * 9 + "|\n"
img += " |\n" * 6
img += " " + "—" * 8
return img
#imgplot = plt.imshow(mpimg.imread('turtle7.jpg'))
#plt.show()
elif x == 1:
imgplot = plt.imshow(mpimg.imread('turtle6.jpg'))
plt.show()
elif x == 2:
imgplot = plt.imshow(mpimg.imread('turtle5.jpg'))
plt.show()
elif x == 3:
imgplot = plt.imshow(mpimg.imread('turtle4.jpg'))
plt.show()
elif x == 4:
imgplot = plt.imshow(mpimg.imread('turtle3.jpg'))
plt.show()
elif x == 5:
imgplot = plt.imshow(mpimg.imread('turtle2.jpg'))
plt.show()
else:
imgplot = plt.imshow(mpimg.imread('turtle1.jpg'))
plt.show()
# return img
def incomplete_proverb(pvb, lst, abc):
"""
Returns a string where the unknown letters are replaced with
underscores.
Assumes everything is uppercase.
:param abc: the alphabet used in the proverbs
:type abc: str
:param pvb: a proverb
:type pvb: str
:param lst: known letters
:type lst: list
:return: proverb with underscores replacing unknown letters
:rtype: str
"""
ret = ""
for c in pvb:
if c in abc and c not in lst:
ret += "_"
else:
ret += c
return ret
def wrong_guesses_to_display(lst):
"""
Make a string from a list
:param lst: list of strings
:type lst: list
:return: a string
:rtype: str
"""
ret = ""
for _ in lst:
if len(_) == 1:
if len(ret) > 0:
ret += ", " + _
else:
ret += _
return ret
def complete_proverb(pvb):
"""
Checks if the proverb is complete.
Assumes the proverb is converted to have underscores replacing
unknown letters.
Assumes everything is uppercase.
:param pvb: a proverb
:type pvb: str
:return: True | False
:rtype: bool
"""
if "_" not in pvb:
return True
return False
def letter_only(guess, abc):
"""
Checks if the player's guess is a single ASCII letter only.
:param abc: the alphabet used in the proverbs
:type abc: str
:param guess: the player's guess
:type guess: str
:return: True | False
:rtype: bool
"""
if len(guess) == 1 and guess.upper() in abc:
return True
return False
def used_letters(guess, pvb, lst):
"""
Checks if the player's guess is in the proverb. Adds it to the
list of used letters if it's not.
Assumes everything is uppercase.
:param guess: the player's guess, a single letter
:type guess: str
:param pvb: the proverb
:type pvb: str
:param lst: known letters
:type lst: list
:return: known letters updated and sorted
:rtype: list
"""
if guess not in pvb:
lst.append(guess)
return sorted(lst)
def in_proverb(guess, pvb):
"""
Checks if the player's guess is in the proverb.
Assumes everything is uppercase.
:param guess: a single letter
:type guess: str
:param pvb: the proverb
:type pvb: str
:return: True | False
:rtype: bool
"""
if guess in pvb:
return True
return False
def already_guessed(guess, lst):
"""
Checks if the player's guess was already made.
Assumes everything is uppercase.
:param guess: a single letter
:type guess: str
:param lst: the list of guesses
:type lst: list
:return: True | False
:rtype: bool
"""
if guess in lst:
return True
return False
def get_max_guess_number():
"""
Returns the number of guesses the player has
:return: max guess number
:rtype: int
"""
return 6
if __name__ == '__main__':
# Wrong argument message
message = "Argument unrecognized.\n" \
"Usage:\n" \
" game.py\n" \
" game.py -h\n" \
" game.py --help"
# Check arguments
if len(sys.argv) == 1:
pass
elif len(sys.argv) > 2:
print(message)
sys.exit(2)
elif sys.argv[1] == "-h" or sys.argv[1] == "--help":
print(__doc__)
sys.exit(2)
else:
print(message)
sys.exit(2)
language_file = os.path.join("resources", "lang.csv")
language_list = get_ui_strings.get_language_list(language_file)
# Set a string to clear the command line
# Tested only on Linux
cls = "\033[H\033[J"
# Clear command line
print(cls, end="")
# Ask player to choose language
for i, l in enumerate(language_list):
print(f" {i + 1}: {l}")
selection = 0
while selection < 1 or selection > len(language_list):
selection = input("--> ")
if selection == "exit" or selection == "quit":
sys.exit(0)
try:
selection = int(selection)
except ValueError:
pass
language = language_list[selection - 1]
# Get the strings corresponding to selected language
# used in-game from lang.csv
string_list = get_ui_strings.get_strings(language_file, language)
# File name and path of proverbs file
prv_file = string_list[1]
prv_path = os.path.join("resources", prv_file)
# Get proverb
proverb = get_proverb(prv_path)
# Get alphabet
alphabet = get_alphabet(prv_path)
# Welcome message
print(cls, end="")
print(string_list[4])
input()
# Bye message
bye = string_list[5]
# Uppercase proverb
proverb = proverb.upper()
# List of the letters guessed and not in the proverb
non_matches = []
# List of the letters guessed and in the proverb
matches = []
# The proverb with underscores replacing unknown letters
incomplete = incomplete_proverb(proverb, matches, alphabet)
message = ""
# Continue asking for input until the hangman
# or the game is finished
while len(non_matches) < get_max_guess_number():
print(cls, end="")
print(draw_hangman(len(non_matches)))
inc_guesses = wrong_guesses_to_display(sorted(non_matches))
# Print list of incorrect guesses
print(f"{string_list[6]}".replace("VARIABLE", f"{inc_guesses}"))
print(f"{string_list[7]}".replace("VARIABLE", f"{incomplete}"))
print(message)
# Get player input
g = None
while g is None:
# ask player for guess
g = input(f"{string_list[8]}")
if letter_only(g, alphabet) is False:
if g == "exit" or g == "quit":
print(bye)
sys.exit(0)
g = None
# print invalid input message
print(f"{string_list[9]}")
else:
g = g.upper()
# Check guess
if already_guessed(g, matches):
# correct guess already given
message = f"{string_list[10]}"
elif already_guessed(g, non_matches):
# incorrect guess already given
message = f"{string_list[11]}"
# append "penalty" to non_matches
non_matches.append("+1")
elif in_proverb(g, proverb):
matches.append(g)
message = ""
else:
non_matches.append(g)
message = ""
# recreate var incomplete with new data
incomplete = incomplete_proverb(proverb, matches, alphabet)
if complete_proverb(incomplete):
print("\n")
print(incomplete, "\n")
# win message
print(f"{string_list[12]}")
print(bye)
sys.exit(0)
print(cls, end="")
print(draw_hangman(len(non_matches)), "\n")
print(proverb.upper(), "\n")
# lose message
print(f"{string_list[13]}")
print(bye)
sys.exit(0) | 26.813793 | 73 | 0.561557 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6,289 | 0.538442 |
faab28a77efcc6a6f4a4d08a57dae03f29543153 | 4,974 | py | Python | tutorialworkflowresult.py | olegxtend/cdl_2021_demo | 63bc7e5f7c9f5297b2b14ad864bb2a05d913f0ed | [
"MIT"
] | 5 | 2021-07-16T16:18:21.000Z | 2021-11-28T03:57:13.000Z | tutorialworkflowresult.py | olegxtend/cdl_2021_demo | 63bc7e5f7c9f5297b2b14ad864bb2a05d913f0ed | [
"MIT"
] | null | null | null | tutorialworkflowresult.py | olegxtend/cdl_2021_demo | 63bc7e5f7c9f5297b2b14ad864bb2a05d913f0ed | [
"MIT"
] | 13 | 2021-07-16T15:46:05.000Z | 2021-08-03T05:26:49.000Z | import json
import os
import typing
import codecs
import typing
import os
import json
import dill
from dataclasses import dataclass, field
ENCODED_PICKLE = "encodedpickle"
class TutorialJsonIOManager(typing.List[str]):
"""
TutorialJsonIOManager will read step results from a json file
"""
def __init__(
self,
# NOTE Encoding scheme is a union of literals, not of type str
# but typing.Literal does not exist in python 3.7
# allow any type for now, to avoid mypy errors
encoding_scheme="base64",
file_name: str = "result.json",
field_name: str = "result",
):
self.encoding_scheme = encoding_scheme
self.file_name = file_name
self.field_name = field_name
def deserialize(self, val: typing.Any) -> typing.Any:
"""
If val is type List[str], then assume it is an encoded pickle
(3) encode string as bytes (2) base64 decode (1) unpickle to python object
Else if val is type str and of format `/app/_.json`
first read the data from the file and run deserialize on contents
Else assume it is just a raw value
return val
"""
def is_type_list_str(val: typing.Any) -> bool:
if not isinstance(val, list):
return False
for element in val:
if not isinstance(element, str):
return False
return True
# if val is type List[str], then assume it is an encoded pickle
if is_type_list_str(val):
val = "".join(val)
return dill.loads(codecs.decode(val.encode(), self.encoding_scheme))
# if val is `/app/_.json`, then it is a file we need to read before deserializing
if (
isinstance(val, str)
and val.startswith(os.sep + "app")
and val.endswith(".json")
):
return self.read(val)
# otherwise simply return val
return val
def read(self, file_path: str) -> typing.Any:
"""
files must be valid json, so actual value is embedded in the result field
e.g. {"type": "encodedpickle", "result": "__what we want as base64 encoded pickle__"}
"""
with open(file_path, "r") as f:
r = json.load(f)
return self.deserialize(r[self.field_name])
@dataclass
class TaskResult:
# map of input name to value
inputs: typing.Dict[str, typing.Any] = field(default_factory=dict)
result: typing.Optional[typing.Any] = None
@dataclass
class TutorialWorkflowResult:
# map of task name to TaskResult
tasks: typing.Dict[str, TaskResult] = field(default_factory=dict)
def __str__(self) -> str:
s = "\n" + " ┌" + "-" * (os.get_terminal_size().columns - 2) + "\n"
for step_name, step_result in self.tasks.items():
s += f" ├ {step_name:15s} : {step_result.result}\n"
s += " └" + "-" * (os.get_terminal_size().columns - 2) + "\n"
return s
def _deserialize_result(io_manager: TutorialJsonIOManager, result: typing.Dict[str, typing.Any]):
if result["type"] == ENCODED_PICKLE:
return io_manager.deserialize(result["result"])
else:
return result["result"]
def _deserialize_inputs(
io_manager: TutorialJsonIOManager, inputs: typing.Dict, workflow_result_json: typing.Dict
):
inputs_result = {}
for k in inputs:
# when input is raw value
if "type" in inputs[k]:
if inputs[k]["type"] == ENCODED_PICKLE:
inputs_result[k] = io_manager.deserialize(inputs[k]["value"])
else:
inputs_result[k] = inputs[k]["value"]
# when input is a result from a previous step
elif "sourceArtifactName" in inputs[k]:
inputs_result[k] = _deserialize_result(
io_manager,
workflow_result_json[inputs[k]["sourceStepID"]]["result"],
)
return inputs_result
def _load_workflowresult_from_dict(workflow_result_json: dict) -> TutorialWorkflowResult:
io_manager = TutorialJsonIOManager()
workflow_result = TutorialWorkflowResult()
for step_id in workflow_result_json.keys():
step = workflow_result_json[step_id]
step_inputs = {}
if "inputs" in step:
step_inputs = _deserialize_inputs(
io_manager, step["inputs"], workflow_result_json
)
step_result = None
if "result" in step:
step_result = _deserialize_result(io_manager, step["result"])
workflow_result.tasks[step["stepName"]] = TaskResult(
inputs=step_inputs,
result=step_result,
)
return workflow_result
def load_cached_workflowresult(file_path: str) -> TutorialWorkflowResult:
with open(file_path) as f:
workflow_result_json = json.load(f)
return _load_workflowresult_from_dict(workflow_result_json)
| 32.940397 | 97 | 0.620225 | 2,838 | 0.56988 | 0 | 0 | 648 | 0.13012 | 0 | 0 | 1,375 | 0.276104 |
faabfacf609a3780238f931eb1d3d9a233d1bf6c | 3,382 | py | Python | examples/sim_dust.py | Guo-Jian-Wang/cmbNNCS | cd55e0a2344aa5182d099cf559bc986ae0351cb7 | [
"MIT"
] | null | null | null | examples/sim_dust.py | Guo-Jian-Wang/cmbNNCS | cd55e0a2344aa5182d099cf559bc986ae0351cb7 | [
"MIT"
] | null | null | null | examples/sim_dust.py | Guo-Jian-Wang/cmbNNCS | cd55e0a2344aa5182d099cf559bc986ae0351cb7 | [
"MIT"
] | null | null | null | import sys
sys.path.append('../')
sys.path.append('../..')
import cmbnncs.utils as utils
import cmbnncs.spherical as spherical
import cmbnncs.simulator as simulator
import numpy as np
import time
start_time = time.time()
def sim_Dust(dust_seed, frequ, amplitude_randn, spectralIndex_randn, temp_randn):
### ComDust = simulator.DustComponents(nside, 3)
ComDust = simulator.DustComponents(nside, 1)#use this
## ComDust.ReadParameter('paramsML.ini')#don't use
#ParametersSampling() don't use when using model 3 in DustComponents(nside, 2)
ComDust.ParametersSampling()
print (ComDust.paramsample, '\n')
ComDust.RealizationSampling( seed = int(dust_seed), amplitude_randn=amplitude_randn,
spectralIndex_randn=spectralIndex_randn, temp_randn=temp_randn)
ComDust.WriteMap(frequencies = frequ)
out_put = ComDust.out_put
return out_put
#%% generate the Dust full map - training (test) data
nside = 512
# temp_randn = '0'
temp_randn = '0.05Multi'
# amplitude_randn = '0'; spectralIndex_randn = '0' #training set: 1000 #
amplitude_randn = '0'; spectralIndex_randn = '0.1One' #training set: 1000 ##
# amplitude_randn = '0'; spectralIndex_randn = '0.1Multi' #training set: 1000 #
# amplitude_randn = '0.1One'; spectralIndex_randn = '0' #training set: 1000 #
# amplitude_randn = '0.1One'; spectralIndex_randn = '0.1One' #training set: 1000 #
# amplitude_randn = '0.1One'; spectralIndex_randn = '0.1Multi' #training set: 1000 #
# amplitude_randn = '0.1Multi'; spectralIndex_randn = '0' #training set: 1000 #
# amplitude_randn = '0.1Multi'; spectralIndex_randn = '0.1One' #training set: 1000 #
# amplitude_randn = '0.1Multi'; spectralIndex_randn = '0.1Multi' #training set: 1000 #
part_n = 0 #0,1,...
part_size = 1000
frequencies = [100, 143, 217, 353] #for Planck
# frequencies = [85, 95, 145, 155, 220, 270] #for CMB-S4
print ('dust_freqs: %s'%frequencies, 'part_n: %s'%part_n, 'part_size: %s'%part_size, 'start_n: %s'%(part_n*part_size))
np.random.seed(2)#note!!!
Dustseed = np.random.choice(1000000, 50000, replace=False)
for i in range(part_size):
for freq in frequencies:
map_I, map_Q, map_U = sim_Dust(Dustseed[i+part_n*part_size], [freq], amplitude_randn,
spectralIndex_randn, temp_randn=temp_randn)
map_I_piece = spherical.sphere2piecePlane(map_I, nside=nside)
map_Q_piece = spherical.sphere2piecePlane(map_Q, nside=nside)
map_U_piece = spherical.sphere2piecePlane(map_U, nside=nside)
utils.savenpy('samples/full_map_nside%s/Foregrounds_oneModel/Dust/Dust_A%s_Beta%s_T%s_%sGHz_I'%(nside,amplitude_randn,spectralIndex_randn,temp_randn,freq),
'Dust_%s'%(i+part_n*part_size), map_I_piece, dtype=np.float32)
utils.savenpy('samples/full_map_nside%s/Foregrounds_oneModel/Dust/Dust_A%s_Beta%s_T%s_%sGHz_Q'%(nside,amplitude_randn,spectralIndex_randn,temp_randn,freq),
'Dust_%s'%(i+part_n*part_size), map_Q_piece, dtype=np.float32)
utils.savenpy('samples/full_map_nside%s/Foregrounds_oneModel/Dust/Dust_A%s_Beta%s_T%s_%sGHz_U'%(nside,amplitude_randn,spectralIndex_randn,temp_randn,freq),
'Dust_%s'%(i+part_n*part_size), map_U_piece, dtype=np.float32)
#%%
print ('\n', "Time elapsed: %.3f" %((time.time()-start_time)/60), "mins")
| 46.972222 | 163 | 0.702543 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,405 | 0.415435 |
faadc11e3f24c6373636e3bb5814da8ad0ad2c17 | 14,140 | py | Python | anima/env/nukeEnv.py | tws0002/anima | 73c256d1f7716a2db7933d6d8519a51333c7e5b4 | [
"BSD-2-Clause"
] | null | null | null | anima/env/nukeEnv.py | tws0002/anima | 73c256d1f7716a2db7933d6d8519a51333c7e5b4 | [
"BSD-2-Clause"
] | null | null | null | anima/env/nukeEnv.py | tws0002/anima | 73c256d1f7716a2db7933d6d8519a51333c7e5b4 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2012-2018, Anima Istanbul
#
# This module is part of anima-tools and is released under the BSD 2
# License: http://www.opensource.org/licenses/BSD-2-Clause
import os
import nuke
from nukescripts import *
from anima.env import empty_reference_resolution
from anima.env.base import EnvironmentBase
class Nuke(EnvironmentBase):
"""the nuke environment class
"""
name = "Nuke"
extensions = ['.nk']
def __init__(self, name='', version=None):
"""nuke specific init
"""
super(Nuke, self).__init__(name=name, version=version)
# and add you own modifications to __init__
# get the root node
self._root = self.get_root_node()
self._main_output_node_name = "MAIN_OUTPUT"
def get_root_node(self):
"""returns the root node of the current nuke session
"""
return nuke.toNode("root")
def save_as(self, version, run_pre_publishers=True):
""""the save action for nuke environment
uses Nukes own python binding
"""
# get the current version, and store it as the parent of the new version
current_version = self.get_current_version()
# first initialize the version path
version.update_paths()
# set the extension to '.nk'
version.extension = self.extensions[0]
# set created_with to let the UI show Nuke icon in versions list
version.created_with = self.name
# set project_directory
# self.project_directory = os.path.dirname(version.absolute_path)
# create the main write node
self.create_main_write_node(version)
# replace read and write node paths
# self.replace_external_paths()
# create the path before saving
try:
os.makedirs(version.absolute_path)
except OSError:
# path already exists OSError
pass
# set frame range
# if this is a shot related task set it to shots resolution
is_shot_related_task = False
shot = None
from stalker import Shot
for task in version.task.parents:
if isinstance(task, Shot):
is_shot_related_task = True
shot = task
break
# set scene fps
project = version.task.project
self.set_fps(project.fps)
if version.version_number == 1:
if is_shot_related_task:
# just set if the frame range is not 1-1
if shot.cut_in != 1 and shot.cut_out != 1:
self.set_frame_range(
shot.cut_in,
shot.cut_out
)
imf = shot.image_format
else:
imf = project.image_format
# TODO: set the render resolution later
# self.set_resolution(
# imf.width,
# imf.height,
# imf.pixel_aspect
# )
nuke.scriptSaveAs(version.absolute_full_path)
if current_version:
# update the parent info
version.parent = current_version
# update database with new version info
from stalker.db.session import DBSession
DBSession.commit()
return True
def export_as(self, version):
"""the export action for nuke environment
"""
# set the extension to '.nk'
version.update_paths()
version.extension = self.extensions[0]
nuke.nodeCopy(version.absolute_full_path)
return True
def open(self, version, force=False, representation=None,
reference_depth=0, skip_update_check=False):
"""the open action for nuke environment
"""
nuke.scriptOpen(version.absolute_full_path)
# set the project_directory
# self.project_directory = os.path.dirname(version.absolute_path)
# TODO: file paths in different OS'es should be replaced with the current one
# Check if the file paths are starting with a string matching one of the
# OS'es project_directory path and replace them with a relative one
# matching the current OS
# replace paths
# self.replace_external_paths()
# return True to specify everything was ok and an empty list
# for the versions those needs to be updated
return empty_reference_resolution()
def import_(self, version, use_namespace=True):
"""the import action for nuke environment
"""
nuke.nodePaste(version.absolute_full_path)
return True
def get_current_version(self):
"""Finds the Version instance from the current open file.
If it can't find any then returns None.
:return: :class:`~oyProjectManager.models.version.Version`
"""
full_path = self._root.knob('name').value()
return self.get_version_from_full_path(full_path)
def get_version_from_recent_files(self):
"""It will try to create a
:class:`~oyProjectManager.models.version.Version` instance by looking at
the recent files list.
It will return None if it can not find one.
:return: :class:`~oyProjectManager.models.version.Version`
"""
# use the last file from the recent file list
i = 1
while True:
try:
full_path = nuke.recentFile(i)
except RuntimeError:
# no recent file anymore just return None
return None
i += 1
version = self.get_version_from_full_path(full_path)
if version is not None:
return version
def get_version_from_project_dir(self):
"""Tries to find a Version from the current project directory
:return: :class:`~oyProjectManager.models.version.Version`
"""
versions = self.get_versions_from_path(self.project_directory)
version = None
if versions:
version = versions[0]
return version
def get_last_version(self):
"""gets the file name from nuke
"""
version = self.get_current_version()
# read the recent file list
if version is None:
version = self.get_version_from_recent_files()
# get the latest possible Version instance by using the workspace path
if version is None:
version = self.get_version_from_project_dir()
return version
def get_frame_range(self):
"""returns the current frame range
"""
#self._root = self.get_root_node()
startFrame = int(self._root.knob('first_frame').value())
endFrame = int(self._root.knob('last_frame').value())
return startFrame, endFrame
def set_frame_range(self, start_frame=1, end_frame=100,
adjust_frame_range=False):
"""sets the start and end frame range
"""
self._root.knob('first_frame').setValue(start_frame)
self._root.knob('last_frame').setValue(end_frame)
def set_fps(self, fps=25):
"""sets the current fps
"""
self._root.knob('fps').setValue(fps)
def get_fps(self):
"""returns the current fps
"""
return int(self._root.knob('fps').getValue())
def set_resolution(self, width, height, pixel_aspect=1.0):
"""Sets the resolution of the current scene
:param width: The width of the output image
:param height: The height of the output image
:param pixel_aspect: The pixel aspect ratio
"""
# TODO: set resolution later
pass
def get_main_write_nodes(self):
"""Returns the main write node in the scene or None.
"""
# list all the write nodes in the current file
all_main_write_nodes = []
for write_node in nuke.allNodes("Write"):
if write_node.name().startswith(self._main_output_node_name):
all_main_write_nodes.append(write_node)
return all_main_write_nodes
def create_main_write_node(self, version):
"""creates the default write node if there is no one created before.
"""
# list all the write nodes in the current file
main_write_nodes = self.get_main_write_nodes()
# check if there is a write node or not
if not len(main_write_nodes):
# create one with correct output path
main_write_node = nuke.nodes.Write()
main_write_node.setName(self._main_output_node_name)
main_write_nodes.append(main_write_node)
for main_write_node in main_write_nodes:
# set the output path
output_file_name = ""
output_file_name = version.task.project.code + "_"
# get the output format
output_format_enum = \
main_write_node.knob('file_type').value().strip()
if output_format_enum == '':
# set it to png by default
output_format_enum = 'png'
main_write_node.knob('file_type').setValue(output_format_enum)
elif output_format_enum == 'ffmpeg':
output_format_enum = 'mov'
elif output_format_enum == 'targa':
output_format_enum = 'tga'
output_file_name += '%s_v%03d' % (
version.nice_name, version.version_number
)
if output_format_enum != 'mov':
output_file_name += ".####." + output_format_enum
else:
output_file_name += '.' + output_format_enum
# check if it is a stereo comp
# if it is enable separate view rendering
# set the output path
output_file_full_path = os.path.join(
version.absolute_path,
'Outputs',
version.take_name,
'v%03d' % version.version_number,
output_format_enum,
output_file_name
).replace("\\", "/")
# create the path
try:
os.makedirs(
os.path.dirname(
output_file_full_path
)
)
except OSError:
# path already exists
pass
# set the output file path
main_write_node.knob("file").setValue(output_file_full_path)
def replace_external_paths(self, mode=0):
"""make paths relative to the project dir
"""
# TODO: replace file paths if project_directory changes
# check if the project_directory is still the same
# if it is do the regular replacement
# but if it is not then expand all the paths to absolute paths
# convert the given path to tcl environment script
from anima import utils
def rep_path(path):
return utils.relpath(self.project_directory, path, "/", "..")
# get all read nodes
allNodes = nuke.allNodes()
readNodes = [node for node in allNodes if node.Class() == "Read"]
writeNodes = [node for node in allNodes if node.Class() == "Write"]
readGeoNodes = [node for node in allNodes if node.Class() == "ReadGeo"]
readGeo2Nodes = [node for node in allNodes if
node.Class() == "ReadGeo2"]
writeGeoNodes = [node for node in allNodes if
node.Class() == "WriteGeo"]
def nodeRep(nodes):
"""helper function to replace path values
"""
[node["file"].setValue(
rep_path(
os.path.expandvars(
os.path.expanduser(
node["file"].getValue()
)
).replace('\\', '/')
)
) for node in nodes]
nodeRep(readNodes)
nodeRep(writeNodes)
nodeRep(readGeoNodes)
nodeRep(readGeo2Nodes)
nodeRep(writeGeoNodes)
@property
def project_directory(self):
"""The project directory.
Set it to the project root, and set all your paths relative to this
directory.
"""
root = self.get_root_node()
# TODO: root node gets lost, fix it
# there is a bug in Nuke, the root node get lost time to time find
# the source and fix it.
# if root is None:
# # there is a bug about Nuke,
# # sometimes it losses the root node, while it shouldn't
# # I can't find the source
# # so instead of using the root node,
# # just return the os.path.dirname(version.path)
#
# return os.path.dirname(self.version.path)
return root["project_directory"].getValue()
@project_directory.setter
def project_directory(self, project_directory_in):
project_directory_in = project_directory_in.replace("\\", "/")
root = self.get_root_node()
root["project_directory"].setValue(project_directory_in)
def create_slate_info(self):
"""Returns info about the current shot which will contribute to the
shot slate
:return: string
"""
version = self.get_current_version()
shot = version.task
# create a jinja2 template
import jinja2
template = jinja2.Template("""Project: {{shot.project.name}}
Shot: {{shot.name}}
Frame Range: {{shot.cut_in}}-{{shot.cut_out}}
Handles: +{{shot.handle_at_start}}, -{{shot.handle_at_end}}
Artist: {% for resource in shot.resources %}{{resource.name}}{%- if loop.index != 1%}, {% endif -%}{% endfor %}
Version: v{{'%03d'|format(version.version_number)}}
Status: {{version.task.status.name}}
""")
template_vars = {
"shot": shot,
"version": version
}
return template.render(**template_vars)
| 33.037383 | 111 | 0.585007 | 13,800 | 0.975955 | 0 | 0 | 1,088 | 0.076945 | 0 | 0 | 5,552 | 0.392645 |
faae62e74233d85e1942ad4875600b68b5c29875 | 99 | py | Python | trade_remedies_api/config/settings/bdd.py | uktrade/trade-remedies-api | fbe2d142ef099c7244788a0f72dd1003eaa7edce | [
"MIT"
] | 1 | 2020-08-13T10:37:15.000Z | 2020-08-13T10:37:15.000Z | trade_remedies_api/config/settings/bdd.py | uktrade/trade-remedies-api | fbe2d142ef099c7244788a0f72dd1003eaa7edce | [
"MIT"
] | 4 | 2020-09-10T13:41:52.000Z | 2020-12-16T09:00:21.000Z | trade_remedies_api/config/settings/bdd.py | uktrade/trade-remedies-api | fbe2d142ef099c7244788a0f72dd1003eaa7edce | [
"MIT"
] | null | null | null | from .base import * # noqa
INSTALLED_APPS += [
"api_test",
]
ROOT_URLCONF = "api_test.urls"
| 12.375 | 30 | 0.646465 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.313131 |
faaed7d726fb97374e33fe05e85c8cca35b6cc7d | 2,015 | py | Python | web/certificates/migrations/0011_doublecoutingregistration_doublecoutingregistrationinputoutput.py | MTES-MCT/biocarburants | ff084916e18cdbdc41400f36fa6cc76a5e05900e | [
"MIT"
] | 4 | 2020-03-22T18:13:12.000Z | 2021-01-25T10:33:31.000Z | web/certificates/migrations/0011_doublecoutingregistration_doublecoutingregistrationinputoutput.py | MTES-MCT/carbure | 2876756b760ab4866fa783bb40e61a046eebb1ab | [
"MIT"
] | 20 | 2020-07-06T14:33:14.000Z | 2022-03-15T16:54:17.000Z | web/certificates/migrations/0011_doublecoutingregistration_doublecoutingregistrationinputoutput.py | MTES-MCT/biocarburants | ff084916e18cdbdc41400f36fa6cc76a5e05900e | [
"MIT"
] | 4 | 2020-04-03T12:19:12.000Z | 2021-06-15T12:20:57.000Z | # Generated by Django 3.2.4 on 2021-07-22 09:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0175_lotv2_lots_v2_year_87d135_idx'),
('certificates', '0010_auto_20210509_1038'),
]
operations = [
migrations.CreateModel(
name='DoubleCoutingRegistration',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('certificate_id', models.CharField(max_length=64)),
('certificate_holder', models.CharField(max_length=256)),
('registered_address', models.TextField()),
('valid_from', models.DateField()),
('valid_until', models.DateField()),
],
options={
'verbose_name': 'Certificat Double Compte',
'verbose_name_plural': 'Certificats Double Compte',
'db_table': 'double_counting_registrations',
},
),
migrations.CreateModel(
name='DoubleCoutingRegistrationInputOutput',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('biofuel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.biocarburant')),
('certificate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='certificates.doublecoutingregistration')),
('feedstock', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.matierepremiere')),
],
options={
'verbose_name': 'Périmètre Certificat Double Compte',
'verbose_name_plural': 'Périmètres Certificats Double Compte',
'db_table': 'double_counting_registrations_scope',
},
),
]
| 43.804348 | 144 | 0.60794 | 1,893 | 0.937593 | 0 | 0 | 0 | 0 | 0 | 0 | 693 | 0.343239 |
faaf8287b96dcdd8bc10415fc576deae3677d866 | 2,206 | py | Python | floodsystem/plot.py | caizicharles/IA-Floodwarning | 2f4931d000ca98b6c304507a422aabb49b4bd231 | [
"MIT"
] | null | null | null | floodsystem/plot.py | caizicharles/IA-Floodwarning | 2f4931d000ca98b6c304507a422aabb49b4bd231 | [
"MIT"
] | null | null | null | floodsystem/plot.py | caizicharles/IA-Floodwarning | 2f4931d000ca98b6c304507a422aabb49b4bd231 | [
"MIT"
] | null | null | null | from .datafetcher import fetch_measure_levels
from .stationdata import build_station_list, update_water_levels
from .flood import stations_highest_rel_level
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
from floodsystem.station import inconsistent_typical_range_stations
import datetime
stations = build_station_list()
def run():
stations = build_station_list()
update_water_levels(stations)
station = stations_highest_rel_level(stations, 6)
stations_high_risk_level = []
for n in station:
stations_high_risk_level.append(n[0])
return stations_high_risk_level
stations_at_risk = run()
stations_at_risk.pop(0)
y = inconsistent_typical_range_stations(stations)
print(y)
update_water_levels(stations)
def plot_water_levels(station, dates, levels):
typical_range_high = []
typical_range_low = []
for i in range(len(dates)):
typical_range_high.append(typical_range[0])
typical_range_low.append(typical_range[1])
plt.plot(dates , levels , label="water level")
plt.xlabel("data")
plt.ylabel("water level (m)")
plt.xticks(rotation=45);
plt.title(station)
plt.tight_layout()
plt.plot(dates , typical_range_high , "-y" , label="typical high")
plt.plot(dates , typical_range_low , "-o" , label="typical low")
plt.legend()
plt.show()
counter = 0
for i in stations:
if i.name in stations_at_risk:
dt = 10
dates, levels = fetch_measure_levels(i.measure_id , dt = datetime.timedelta(days=dt))
typical_range = i.typical_range
plot_water_levels(i.name , dates , levels)
counter = counter + 1
if counter > 5:
raise RuntimeError("All of the 5 stations have displayed the outputs")
def plot_water_level_with_fit(station, dates, levels, p):
x = dates
y = levels
p_coeff = np.polyfit(x , y , p)
poly = np.poly1d(p_coeff)
plt.plot(x , y , '.')
plt.xlabel("time")
plt.ylabel("water level")
plt.xticks(rotation=45);
plt.title(station)
x1 = np.linspace(x[0] , x[-1] , 30)
plt.plot(x1 , poly(x1))
plt.show()
return poly | 28.649351 | 93 | 0.691296 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 143 | 0.064823 |
fab1ae1fd23f251523f0e24d567662098132f1c5 | 232 | py | Python | edx/problem_set_1/bob.py | spradeepv/dive-into-python | ec27d4686b7b007d21f9ba4f85d042be31ee2639 | [
"MIT"
] | null | null | null | edx/problem_set_1/bob.py | spradeepv/dive-into-python | ec27d4686b7b007d21f9ba4f85d042be31ee2639 | [
"MIT"
] | null | null | null | edx/problem_set_1/bob.py | spradeepv/dive-into-python | ec27d4686b7b007d21f9ba4f85d042be31ee2639 | [
"MIT"
] | null | null | null | s = raw_input()
find = 'bob'
count = 0
index = 0
while index < len(s):
index = s.find(find, index)
if index == -1:
break
index += 2
count += 1
print "Number of times bob occurs is:", count | 21.090909 | 45 | 0.521552 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 37 | 0.159483 |
fab1c59f92d41695f6c520dc6fa5d7c7dc7fd893 | 533 | py | Python | documentstore_migracao/main/__init__.py | jamilatta/document-store-migracao | dc0473fb3ccf8c6c5f8d599b353f8c9caf104f55 | [
"BSD-2-Clause"
] | null | null | null | documentstore_migracao/main/__init__.py | jamilatta/document-store-migracao | dc0473fb3ccf8c6c5f8d599b353f8c9caf104f55 | [
"BSD-2-Clause"
] | null | null | null | documentstore_migracao/main/__init__.py | jamilatta/document-store-migracao | dc0473fb3ccf8c6c5f8d599b353f8c9caf104f55 | [
"BSD-2-Clause"
] | null | null | null | """ module to methods to main """
import sys
from .migrate_isis import migrate_isis_parser
from .migrate_articlemeta import migrate_articlemeta_parser
from .tools import tools_parser
def main_migrate_articlemeta():
""" method main to script setup.py """
sys.exit(migrate_articlemeta_parser(sys.argv[1:]))
def main_migrate_isis():
sys.exit(migrate_isis_parser(sys.argv[1:]))
def tools():
sys.exit(tools_parser(sys.argv[1:]))
if __name__ == "__main__":
sys.exit(migrate_articlemeta_parser(sys.argv[1:]))
| 21.32 | 59 | 0.739212 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.153846 |
fab2d81b0d580f8beb6589e4107725b0beb05fc3 | 721 | py | Python | zang/domain/fraud_control_rule_element.py | vlastikczech/zang-python | 980f5243071404d6838554500a6955ff7bc2a0c7 | [
"MIT"
] | 1 | 2019-02-18T21:51:58.000Z | 2019-02-18T21:51:58.000Z | zang/domain/fraud_control_rule_element.py | vlastikczech/zang-python | 980f5243071404d6838554500a6955ff7bc2a0c7 | [
"MIT"
] | 6 | 2019-06-26T13:56:22.000Z | 2022-02-17T16:40:48.000Z | zang/domain/fraud_control_rule_element.py | vlastikczech/zang-python | 980f5243071404d6838554500a6955ff7bc2a0c7 | [
"MIT"
] | 6 | 2017-10-17T12:44:32.000Z | 2020-02-07T20:45:00.000Z | # -*- coding: utf-8 -*-
"""
zang.domain.fraud_control_rule_whitelisted
~~~~~~~~~~~~~~~~~~~
`FraudControlRuleElement` model
"""
from zang.domain.base_resource import BaseResource
from zang.domain.fraud_control_rule import FraudControlRule
class FraudControlRuleElement(BaseResource):
_map = {
'blocked': FraudControlRule,
'authorized': FraudControlRule,
'whitelisted': FraudControlRule,
}
def __init__(self):
super(FraudControlRuleElement, self).__init__()
@property
def blocked(self):
return self._blocked
@property
def authorized(self):
return self._authorized
@property
def whitelisted(self):
return self._whitelisted
| 20.6 | 59 | 0.674064 | 478 | 0.662968 | 0 | 0 | 197 | 0.273232 | 0 | 0 | 159 | 0.220527 |
fab458a70a67ab83469b992d3cb10888dc4cb5fd | 6,321 | py | Python | .qt_for_python/uic/installer.py | thomascswalker/bettergameexporter | 4db3683a599d523e28c2f93bdcac889277130153 | [
"MIT"
] | null | null | null | .qt_for_python/uic/installer.py | thomascswalker/bettergameexporter | 4db3683a599d523e28c2f93bdcac889277130153 | [
"MIT"
] | null | null | null | .qt_for_python/uic/installer.py | thomascswalker/bettergameexporter | 4db3683a599d523e28c2f93bdcac889277130153 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'installer.ui'
##
## Created by: Qt User Interface Compiler version 5.15.1
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
if not MainWindow.objectName():
MainWindow.setObjectName(u"MainWindow")
MainWindow.resize(550, 320)
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QSize(550, 320))
MainWindow.setMaximumSize(QSize(550, 320))
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName(u"centralwidget")
self.gridLayout = QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(u"gridLayout")
self.gridLayout.setContentsMargins(20, 20, 20, 20)
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.horizontalLayout.setContentsMargins(-1, 20, -1, -1)
self.maxVersionList = QComboBox(self.centralwidget)
self.maxVersionList.setObjectName(u"maxVersionList")
self.horizontalLayout.addWidget(self.maxVersionList)
self.maxVersionExplore = QToolButton(self.centralwidget)
self.maxVersionExplore.setObjectName(u"maxVersionExplore")
self.horizontalLayout.addWidget(self.maxVersionExplore)
self.gridLayout.addLayout(self.horizontalLayout, 1, 0, 1, 1)
self.horizontalLayout_2 = QHBoxLayout()
self.horizontalLayout_2.setObjectName(u"horizontalLayout_2")
self.horizontalLayout_2.setContentsMargins(-1, 20, -1, -1)
self.horizontalSpacer = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(self.horizontalSpacer)
self.uninstall = QPushButton(self.centralwidget)
self.uninstall.setObjectName(u"uninstall")
self.uninstall.setEnabled(False)
sizePolicy.setHeightForWidth(self.uninstall.sizePolicy().hasHeightForWidth())
self.uninstall.setSizePolicy(sizePolicy)
self.uninstall.setMinimumSize(QSize(120, 32))
self.horizontalLayout_2.addWidget(self.uninstall)
self.install = QPushButton(self.centralwidget)
self.install.setObjectName(u"install")
sizePolicy.setHeightForWidth(self.install.sizePolicy().hasHeightForWidth())
self.install.setSizePolicy(sizePolicy)
self.install.setMinimumSize(QSize(120, 32))
self.horizontalLayout_2.addWidget(self.install)
self.gridLayout.addLayout(self.horizontalLayout_2, 3, 0, 1, 1)
self.horizontalLayout_3 = QHBoxLayout()
self.horizontalLayout_3.setObjectName(u"horizontalLayout_3")
self.verticalLayout = QVBoxLayout()
self.verticalLayout.setObjectName(u"verticalLayout")
self.verticalLayout.setContentsMargins(0, -1, -1, -1)
self.label = QLabel(self.centralwidget)
self.label.setObjectName(u"label")
sizePolicy1 = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy1.setHorizontalStretch(0)
sizePolicy1.setVerticalStretch(0)
sizePolicy1.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy1)
self.label.setTextFormat(Qt.RichText)
self.label.setWordWrap(True)
self.verticalLayout.addWidget(self.label)
self.label_3 = QLabel(self.centralwidget)
self.label_3.setObjectName(u"label_3")
self.label_3.setWordWrap(True)
self.verticalLayout.addWidget(self.label_3)
self.horizontalLayout_3.addLayout(self.verticalLayout)
self.horizontalSpacer_2 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(self.horizontalSpacer_2)
self.gridLayout.addLayout(self.horizontalLayout_3, 0, 0, 1, 1)
self.horizontalLayout_4 = QHBoxLayout()
self.horizontalLayout_4.setObjectName(u"horizontalLayout_4")
self.label_2 = QLabel(self.centralwidget)
self.label_2.setObjectName(u"label_2")
self.label_2.setTextFormat(Qt.AutoText)
self.horizontalLayout_4.addWidget(self.label_2)
self.installPath = QLabel(self.centralwidget)
self.installPath.setObjectName(u"installPath")
self.installPath.setTextFormat(Qt.RichText)
self.installPath.setWordWrap(False)
self.horizontalLayout_4.addWidget(self.installPath)
self.horizontalSpacer_3 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(self.horizontalSpacer_3)
self.gridLayout.addLayout(self.horizontalLayout_4, 2, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QMetaObject.connectSlotsByName(MainWindow)
# setupUi
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"MainWindow", None))
self.maxVersionExplore.setText(QCoreApplication.translate("MainWindow", u"Open", None))
self.uninstall.setText(QCoreApplication.translate("MainWindow", u"Uninstall", None))
self.install.setText(QCoreApplication.translate("MainWindow", u"Install", None))
self.label.setText(QCoreApplication.translate("MainWindow", u"<html><head/><body><p><span style=\" font-size:16pt;\">Better Max Tools</span></p></body></html>", None))
self.label_3.setText(QCoreApplication.translate("MainWindow", u"This will install the better-max-tools package into an environment accessible by 3ds Max.", None))
self.label_2.setText(QCoreApplication.translate("MainWindow", u"Install location:", None))
self.installPath.setText("")
# retranslateUi
| 42.422819 | 175 | 0.701946 | 5,842 | 0.924221 | 0 | 0 | 0 | 0 | 0 | 0 | 981 | 0.155197 |
fab4d1310e9ef911cecc3613e34d95dcfd6c0157 | 404 | py | Python | paginas/migrations/0014_auto_20190206_2343.py | igor-pontes/Dolex | 4d65a288fbf6cdf44628994fea3c821e8af2ea61 | [
"MIT"
] | null | null | null | paginas/migrations/0014_auto_20190206_2343.py | igor-pontes/Dolex | 4d65a288fbf6cdf44628994fea3c821e8af2ea61 | [
"MIT"
] | 4 | 2020-07-14T13:02:48.000Z | 2021-06-10T20:27:09.000Z | paginas/migrations/0014_auto_20190206_2343.py | igor-pontes/Dolex | 4d65a288fbf6cdf44628994fea3c821e8af2ea61 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.5 on 2019-02-06 23:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('paginas', '0013_auto_20190206_1739'),
]
operations = [
migrations.AlterField(
model_name='players_lobby',
name='slot',
field=models.CharField(default=None, max_length=2),
),
]
| 21.263158 | 63 | 0.608911 | 311 | 0.769802 | 0 | 0 | 0 | 0 | 0 | 0 | 102 | 0.252475 |
fab57ed22c9e496c6b5d1c1f3f59197a8dfcfb21 | 2,231 | py | Python | tap_gleantap/executor.py | Radico/tap-gleantap | 1b2cafe6647224a60f68969c07c603cacca8ccd4 | [
"Apache-2.0"
] | null | null | null | tap_gleantap/executor.py | Radico/tap-gleantap | 1b2cafe6647224a60f68969c07c603cacca8ccd4 | [
"Apache-2.0"
] | null | null | null | tap_gleantap/executor.py | Radico/tap-gleantap | 1b2cafe6647224a60f68969c07c603cacca8ccd4 | [
"Apache-2.0"
] | null | null | null | import singer
from tap_kit import TapExecutor
from tap_kit.utils import (transform_write_and_count)
LOGGER = singer.get_logger()
class GleanExecutor(TapExecutor):
def __init__(self, streams, args, client):
"""
Args:
streams (arr[Stream])
args (dict)
client (BaseClient)
"""
super(GleanExecutor, self).__init__(streams, args, client)
self.url = 'https://api.gleantap.com/v1/ExternalApi'
self.api_key = self.client.config['api_key']
self.secret_key = self.client.config['secret_key']
def call_full_stream(self, stream):
"""
Method to call all fully synced streams
"""
request_config = {
'url': self.generate_api_url(stream),
'headers': self.build_headers(),
'params': None,
'run': True
}
LOGGER.info("Extracting {s} ".format(s=stream))
self.call_stream(stream, request_config)
def call_stream(self, stream, request_config):
offset = 0
while request_config['run']:
text = self.build_body(1000, offset)
res = self.client.make_request(request_config, body=text, method='POST')
records = res.json().get('members')
if not records:
records = []
elif not isinstance(records, list):
# subsequent methods are expecting a list
records = [records]
transform_write_and_count(stream, records)
LOGGER.info('Received {n} records with offset {b}'.format(n=len(records),
b=text['offset']))
if len(records) < 1000:
request_config['run'] = False
offset += 1000
def build_body(self, limit, offset):
return {
"api_key": self.api_key,
"secret_key": self.secret_key,
"limit": limit,
"offset": offset,
}
def build_headers(self):
"""
Included in all API calls
"""
return {
"Accept": "application/json;charset=UTF-8", # necessary for returning JSON
}
| 28.974026 | 88 | 0.543702 | 2,097 | 0.939937 | 0 | 0 | 0 | 0 | 0 | 0 | 555 | 0.248767 |
fab71b81531484d1696ade3aa93eda9a02495ebe | 4,028 | py | Python | design_patterns/factory_andri.py | andricampagnaro/documentacoes_e_testes | a12b1348dbb43ad72fe56a6287e228d6c031e36f | [
"MIT"
] | null | null | null | design_patterns/factory_andri.py | andricampagnaro/documentacoes_e_testes | a12b1348dbb43ad72fe56a6287e228d6c031e36f | [
"MIT"
] | 4 | 2021-06-08T21:55:17.000Z | 2022-01-13T02:57:49.000Z | design_patterns/factory_andri.py | andricampagnaro/documentacao_python3 | a12b1348dbb43ad72fe56a6287e228d6c031e36f | [
"MIT"
] | null | null | null | # import logging
# logging.basicConfig(filename='example.log',level=logging.DEBUG)
# logging.debug('This message should go to the log file')
# logging.info('So should this')
# logging.warning('And this, too ã')
class ValidaSmart():
def __init__(self):
self._carrega_modulos_externos()
def _carrega_modulos_externos(self):
self.layout_pessoas = LayoutPessoas()
self.layout_pessoas_enderecos = LayoutPessoasEnderecos()
self.layout_produtos = LayoutProdutos()
def executa(self):
print('[ValidaSmart] Executando...')
self.layout_pessoas.executa()
self.layout_pessoas_enderecos.executa()
self.layout_produtos.executa()
print('[ValidaSmart] Executado!')
########################################################
class LayoutPessoas():
def __init__(self):
self._carrega_modulos_externos()
def _carrega_modulos_externos(self):
pass
def executa(self):
print('[LayoutPessoas] Executando...')
print('[LayoutPessoas] Executado!')
class LayoutPessoasEnderecos():
def __init__(self):
self._carrega_modulos_externos()
def _carrega_modulos_externos(self):
pass
def executa(self):
print('[LayoutPessoasEnderecos] Executando...')
print('[LayoutPessoasEnderecos] Executado!')
class LayoutProdutos():
def __init__(self):
self._carrega_modulos_externos()
def _carrega_modulos_externos(self):
self.campo_ncm = CampoNCM([1, 2, 3, 4, 5])
def executa(self):
print('[LayoutProdutos] Executando...')
self.campo_ncm.confere_dados()
print('[LayoutProdutos] Executado!')
########################################################
def busca_ncm(ncm):
print(f'[Funcao Busca NCM] Buscando NCM {ncm}')
if ncm == 1:
print(f'[Funcao Busca NCM] NCM {ncm} é válido')
else:
print(f'[Funcao Busca NCM] NCM {ncm} é inválido')
########################################################
class CampoTipoString():
def __init__(self, lista_registros):
self.lista_registros = lista_registros
self._quantidade_registros = len(self.lista_registros)
self._nome_classe = self.__class__.__name__
@property
def tipo(self):
return 'string'
def confere_dados(self):
print(f'[{self._nome_classe}] Iniciando a conferencia padrão...')
self._verifica_tipo_campo()
self._verifica_tamanho_campo()
self._valida_registros_em_branco()
print(f'[{self._nome_classe}] Conferência padrão concluída.')
def _verifica_tipo_campo(self):
print(f'[{self._nome_classe}] Verificando tipo do campo...')
print(f'[{self._nome_classe}] Tipo do campo verificado.')
def _verifica_tamanho_campo(self):
print(f'[{self._nome_classe}] Verificando tamanho do campo...')
print(f'[{self._nome_classe}] Tamanho do campo verificado.')
def _valida_registros_em_branco(self):
print(f'[{self._nome_classe}] Validando caracteres em branco...')
print(f'[{self._nome_classe}] Validação de caracteres em branco concluída.')
########################################################
class CampoNCM(CampoTipoString):
def confere_dados(self):
super().confere_dados()
print(f'[{self._nome_classe}] Iniciando a conferencias específicas do campo...')
self._valida_caracteres_permitidos()
print(f'[{self._nome_classe}] Conferencias específicas do campo concluídas...')
def busca_ncms(self):
print('[CampoNCM] Buscando NCMs...')
for ncm in self.lista_registros:
busca_ncm(ncm)
print('[CampoNCM] Busca de NCMs concluída.')
def _valida_caracteres_permitidos(self):
print(f'[{self._nome_classe}] Validando caracteres...')
print(f'[{self._nome_classe}] Caracteres validados.')
if __name__ == '__main__':
valida_smart = ValidaSmart()
valida_smart.executa()
print(valida_smart.layout_produtos.campo_ncm.tipo) | 33.566667 | 88 | 0.635055 | 3,229 | 0.798467 | 0 | 0 | 53 | 0.013106 | 0 | 0 | 1,594 | 0.394164 |
fab73276314f39860cdf6b1e49f429594d065a0f | 14,376 | py | Python | codes/models/modules/sftmd_arch.py | Mark-sloan/IKC | f70af607e9434931c22e4971469aaed7683a22a3 | [
"Apache-2.0"
] | 1 | 2020-02-27T09:27:17.000Z | 2020-02-27T09:27:17.000Z | codes/models/modules/sftmd_arch.py | Mark-sloan/IKC | f70af607e9434931c22e4971469aaed7683a22a3 | [
"Apache-2.0"
] | null | null | null | codes/models/modules/sftmd_arch.py | Mark-sloan/IKC | f70af607e9434931c22e4971469aaed7683a22a3 | [
"Apache-2.0"
] | null | null | null | '''
architecture for sftmd
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
class Predictor(nn.Module):
def __init__(self, input_channel=3, code_len=10, ndf=64, use_bias=True):
super(Predictor, self).__init__()
self.ConvNet = nn.Sequential(*[
nn.Conv2d(input_channel, ndf, kernel_size=5, stride=1, padding=2),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=2, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, code_len, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
])
# self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)
self.globalPooling = nn.AdaptiveAvgPool2d((1,1))
def forward(self, input):
conv = self.ConvNet(input)
flat = self.globalPooling(conv)
return flat.view(flat.size()[:2]) # torch size: [B, code_len]
class Corrector(nn.Module):
def __init__(self, input_channel=3, code_len=10, ndf=64, use_bias=True):
super(Corrector, self).__init__()
self.ConvNet = nn.Sequential(*[
nn.Conv2d(input_channel, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=2, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=2, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, ndf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.2, True),
])
self.code_dense = nn.Sequential(*[
nn.Linear(code_len, ndf, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Linear(ndf, ndf, bias=use_bias),
nn.LeakyReLU(0.2, True),
])
self.global_dense = nn.Sequential(*[
nn.Conv2d(ndf * 2, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf * 2, ndf, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.2, True),
nn.Conv2d(ndf, code_len, kernel_size=1, stride=1, padding=0, bias=use_bias),
])
self.ndf = ndf
self.globalPooling = nn.AdaptiveAvgPool2d([1, 1])
def forward(self, input, code, res=False):
conv_input = self.ConvNet(input)
B, C_f, H_f, W_f = conv_input.size() # LR_size
conv_code = self.code_dense(code).view((B, self.ndf, 1, 1)).expand((B, self.ndf, H_f, W_f)) # h_stretch
conv_mid = torch.cat((conv_input, conv_code), dim=1)
code_res = self.global_dense(conv_mid)
# Delta_h_p
flat = self.globalPooling(code_res)
Delta_h_p = flat.view(flat.size()[:2])
if res:
return Delta_h_p
else:
return Delta_h_p + code
class SFT_Layer(nn.Module):
def __init__(self, ndf=64, para=10):
super(SFT_Layer, self).__init__()
self.mul_conv1 = nn.Conv2d(para + ndf, 32, kernel_size=3, stride=1, padding=1)
self.mul_leaky = nn.LeakyReLU(0.2)
self.mul_conv2 = nn.Conv2d(32, ndf, kernel_size=3, stride=1, padding=1)
self.add_conv1 = nn.Conv2d(para + ndf, 32, kernel_size=3, stride=1, padding=1)
self.add_leaky = nn.LeakyReLU(0.2)
self.add_conv2 = nn.Conv2d(32, ndf, kernel_size=3, stride=1, padding=1)
def forward(self, feature_maps, para_maps):
cat_input = torch.cat((feature_maps, para_maps), dim=1)
mul = F.sigmoid(self.mul_conv2(self.mul_leaky(self.mul_conv1(cat_input))))
add = self.add_conv2(self.add_leaky(self.add_conv1(cat_input)))
return feature_maps * mul + add
class SFT_Residual_Block(nn.Module):
def __init__(self, ndf=64, para=10):
super(SFT_Residual_Block, self).__init__()
self.sft1 = SFT_Layer(ndf=ndf, para=para)
self.sft2 = SFT_Layer(ndf=ndf, para=para)
self.conv1 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
self.conv2 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
def forward(self, feature_maps, para_maps):
fea1 = F.relu(self.sft1(feature_maps, para_maps))
fea2 = F.relu(self.sft2(self.conv1(fea1), para_maps))
fea3 = self.conv2(fea2)
return torch.add(feature_maps, fea3)
class SFTMD(nn.Module):
def __init__(self, input_channel=3, input_para=10, scale=4, min=0.0, max=1.0, residuals=16):
super(SFTMD, self).__init__()
self.min = min
self.max = max
self.para = input_para
self.num_blocks = residuals
self.conv1 = nn.Conv2d(input_channel, 64, 3, stride=1, padding=1)
self.relu_conv1 = nn.LeakyReLU(0.2)
self.conv2 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
self.relu_conv2 = nn.LeakyReLU(0.2)
self.conv3 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
sft_branch = []
for i in range(residuals):
sft_branch.append(SFT_Residual_Block())
self.sft_branch = nn.Sequential(*sft_branch)
for i in range(residuals):
self.add_module('SFT-residual' + str(i + 1), SFT_Residual_Block(ndf=64, para=input_para))
self.sft = SFT_Layer(ndf=64, para=input_para)
self.conv_mid = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
if scale == 4:
self.upscale = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=64 * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(in_channels=64, out_channels=64 * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.2, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=64*scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.2, inplace=True),
)
self.conv_output = nn.Conv2d(in_channels=64, out_channels=input_channel, kernel_size=9, stride=1, padding=4, bias=True)
def forward(self, input, ker_code):
B, C, H, W = input.size() # I_LR batch
B_h, C_h = ker_code.size() # Batch, Len=10
ker_code_exp = ker_code.view((B_h, C_h, 1, 1)).expand((B_h, C_h, H, W)) #kernel_map stretch
fea_bef = self.conv3(self.relu_conv2(self.conv2(self.relu_conv1(self.conv1(input)))))
fea_in = fea_bef
for i in range(self.num_blocks):
fea_in = self.__getattr__('SFT-residual' + str(i + 1))(fea_in, ker_code_exp)
fea_mid = fea_in
#fea_in = self.sft_branch((fea_in, ker_code_exp))
fea_add = torch.add(fea_mid, fea_bef)
fea = self.upscale(self.conv_mid(self.sft(fea_add, ker_code_exp)))
out = self.conv_output(fea)
return torch.clamp(out, min=self.min, max=self.max)
class Residual_Block(nn.Module):
def __init__(self):
super(Residual_Block, self).__init__()
self.conv1 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
self.conv2 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
self.conv3 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
self.conv4 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)
def forward(self, input):
fea = input
fea1 = self.lrelu(self.conv1(fea))
fea2 = self.lrelu(self.conv2(fea1))
fea3 = self.lrelu(self.conv3(fea2))
fea4 = self.conv4(fea3)
output = input + fea4
return output
class SRResNet(nn.Module):
def __init__(self, input_channel=3, input_para=32, scale=4, min=0.0, max=1.0, residuals=16):
super(SRResNet, self).__init__()
self.min = min
self.max = max
self.para = input_para
self.num_blocks = residuals
self.conv1 = nn.Conv2d(input_channel, 64, 3, stride=1, padding=1)
self.relu_conv1 = nn.LeakyReLU(0.2)
self.conv2 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
self.relu_conv2 = nn.LeakyReLU(0.2)
self.conv3 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
sft_branch = []
for i in range(residuals):
sft_branch.append(Residual_Block())
self.sft_branch = nn.Sequential(*sft_branch)
#for i in range(residuals):
# self.add_module('SFT-residual' + str(i + 1), SFT_Residual_Block(ndf=64, para=input_para))
#self.sft = SFT_Layer(ndf=64, para=input_para)
self.mul_conv1 = nn.Conv2d(64, 32, kernel_size=3, stride=1, padding=1)
self.mul_leaky = nn.LeakyReLU(0.2)
self.mul_conv2 = nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1)
self.conv_mid = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=True)
self.upscale = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=64*scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.2, inplace=True),
)
self.conv_output = nn.Conv2d(in_channels=64, out_channels=input_channel, kernel_size=9, stride=1, padding=4, bias=True)
def forward(self, input):
#B, C, H, W = input.size() # I_LR batch
#B_h, C_h = ker_code.size() # Batch, Len=32
#ker_code_exp = ker_code.view((B_h, C_h, 1, 1)).expand((B_h, C_h, H, W)) #kernel_map stretch
fea_bef = self.conv3(self.relu_conv2(self.conv2(self.relu_conv1(self.conv1(input)))))
fea_in = fea_bef
fea_mid = self.sft_branch(fea_in)
fea_add = torch.add(fea_mid, fea_bef)
fea = self.upscale(self.conv_mid(self.mul_conv2(self.mul_leaky(self.mul_conv1(fea_add)))))
out = self.conv_output(fea)
return torch.clamp(out, min=self.min, max=self.max)
class SFTMD_DEMO(nn.Module):
def __init__(self, input_channel=3, input_para=10, scala=4, min=0.0, max=1.0, residuals=16):
super(SFTMD_DEMO, self).__init__()
self.min = min
self.max = max
self.para = input_para
self.reses = residuals
self.conv1 = nn.Conv2d(input_channel + input_para, 64, 3, stride=1, padding=1)
self.relu_conv1 = nn.LeakyReLU(0.2)
self.conv2 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
self.relu_conv2 = nn.LeakyReLU(0.2)
self.conv3 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
for i in range(residuals):
self.add_module('SFT-residual' + str(i + 1), SFT_Residual_Block(ndf=64, para=input_para))
self.sft_mid = SFT_Layer(ndf=64, para=input_para)
self.conv_mid = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=False)
self.scala = scala
if scala == 4:
self.upscale = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, stride=1, padding=1, bias=False),
nn.PixelShuffle(2),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, stride=1, padding=1, bias=False),
nn.PixelShuffle(2),
nn.LeakyReLU(0.2, inplace=True),
)
elif scala == 3:
self.upscale = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=64*9, kernel_size=3, stride=1, padding=1, bias=False),
nn.PixelShuffle(3),
nn.LeakyReLU(0.2, inplace=True),
)
elif scala == 2:
self.upscale = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, stride=1, padding=1, bias=False),
nn.PixelShuffle(2),
nn.LeakyReLU(0.2, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, stride=1, padding=1, bias=False),
nn.PixelShuffle(2),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, stride=1, padding=1, bias=False),
nn.PixelShuffle(2),
nn.LeakyReLU(0.2, inplace=True),
)
self.conv_output = nn.Conv2d(in_channels=64, out_channels=input_channel, kernel_size=9, stride=1, padding=4, bias=False)
def forward(self, input, code, clip=False):
B, C, H, W = input.size()
B, C_l = code.size()
code_exp = code.view((B, C_l, 1, 1)).expand((B, C_l, H, W))
input_cat = torch.cat([input, code_exp], dim=1)
before_res = self.conv3(self.relu_conv2(self.conv2(self.relu_conv1(self.conv1(input_cat)))))
res = before_res
for i in range(self.reses):
res = self.__getattr__('SFT-residual' + str(i + 1))(res, code_exp)
mid = self.sft_mid(res, code_exp)
mid = F.relu(mid)
mid = self.conv_mid(mid)
befor_up = torch.add(before_res, mid)
uped = self.upscale(befor_up)
out = self.conv_output(uped)
return torch.clamp(out, min=self.min, max=self.max) if clip else out | 42.40708 | 128 | 0.61081 | 14,254 | 0.991514 | 0 | 0 | 0 | 0 | 0 | 0 | 643 | 0.044727 |
fab86be6f16580e58ee8836bf4504a1098307651 | 539 | py | Python | server/urls.py | Valchris/AngularJS-Django-Template | 10c90087984dcd9e6d29380eb4380824e65bcecf | [
"MIT"
] | 1 | 2015-07-29T04:28:26.000Z | 2015-07-29T04:28:26.000Z | server/urls.py | Valchris/AngularJS-Django-Template | 10c90087984dcd9e6d29380eb4380824e65bcecf | [
"MIT"
] | null | null | null | server/urls.py | Valchris/AngularJS-Django-Template | 10c90087984dcd9e6d29380eb4380824e65bcecf | [
"MIT"
] | null | null | null | from django.conf.urls import include, url
from django.contrib import admin
from glue.views import *
from glue.api import *
urlpatterns = [
# Examples:
# url(r'^$', 'server.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^api/user/data/', view=user_data),
url(r'^api/user/signout/', view=user_signout),
url(r'^api/user/signin/', view=user_signin),
url(r'^api/user/register/', view=user_register),
url(r'^admin', include(admin.site.urls)),
url(r'^', AngularView.as_view()),
]
| 26.95 | 52 | 0.64564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 191 | 0.35436 |
fab9927c333b405b4078ba0c4fbf717659119bd5 | 1,040 | py | Python | Webcam2YouTubeLive.py | JulianLiederer/ffmpeg-youtube-live | 63ce3b642d41c9368a3cf7b370b3ae7ccaaeeaa7 | [
"MIT"
] | 2 | 2020-03-30T13:55:32.000Z | 2020-06-23T21:37:45.000Z | Webcam2YouTubeLive.py | liederer/ffmpeg-youtube-live | 63ce3b642d41c9368a3cf7b370b3ae7ccaaeeaa7 | [
"MIT"
] | null | null | null | Webcam2YouTubeLive.py | liederer/ffmpeg-youtube-live | 63ce3b642d41c9368a3cf7b370b3ae7ccaaeeaa7 | [
"MIT"
] | 2 | 2022-01-02T18:40:06.000Z | 2022-02-19T12:32:19.000Z | #!/usr/bin/env python
"""
LIVE STREAM TO YOUTUBE LIVE using FFMPEG -- from webcam
https://www.scivision.co/youtube-live-ffmpeg-livestream/
https://support.google.com/youtube/answer/2853702
Windows: get DirectShow device list from:
ffmpeg -list_devices true -f dshow -i dummy
"""
from youtubelive_ffmpeg import youtubelive
import sys
#
if sys.platform.startswith('win'):
audiochan = 'audio="Internal Microphone"'
videochan = 'video="Integrated Camera"'
elif sys.platform.startswith('darwin'):
audiochan = 'default'
videochan = 'default'
elif sys.platform.startswith('linux'):
audiochan = 'default'
videochan = '/dev/video0'
if __name__ == '__main__':
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
from argparse import ArgumentParser
p = ArgumentParser()
p.add_argument('-fps',default=30, type=int)
p = p.parse_args()
P = {'fps': p.fps,
'audiochan': audiochan,
'videochan': videochan,
'vidsource': 'camera',
}
youtubelive(P)
| 26 | 56 | 0.678846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 461 | 0.443269 |
fabc177219e2e95776351ee5bdc5b7834e86aaf5 | 17,943 | py | Python | nebula/dao/strategy_dao.py | threathunterX/nebula_web | 2e32e6e7b225e0bd87ee8c847c22862f12c51bb1 | [
"Apache-2.0"
] | 2 | 2019-05-01T09:42:32.000Z | 2019-05-31T01:08:37.000Z | nebula/dao/strategy_dao.py | threathunterX/nebula_web | 2e32e6e7b225e0bd87ee8c847c22862f12c51bb1 | [
"Apache-2.0"
] | 1 | 2021-06-01T23:30:04.000Z | 2021-06-01T23:30:04.000Z | nebula/dao/strategy_dao.py | threathunterX/nebula_web | 2e32e6e7b225e0bd87ee8c847c22862f12c51bb1 | [
"Apache-2.0"
] | 5 | 2019-05-14T09:30:12.000Z | 2020-09-29T04:57:26.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import json
import logging
from threathunter_common.util import millis_now
from nebula_meta.model import Strategy
from .base_dao import BaseDao, BaseDefaultDao
from . import cache
from ..models.default import StrategyDefaultModel as Model, StrategyDefaultModel
from ..models import StrategyCustModel as CustModel
logger = logging.getLogger('nebula.dao.strategy')
#TODO more nodes
def is_strategy_weigh_cache_avail():
if cache.Strategy_Weigh_Cache is None:
logger.warn('strategy weigh cache is None')
return False
return True
def add_strategy_weigh_cache(s):
if not is_strategy_weigh_cache_avail():
return
new_weigh = get_strategy_weigh(s)
cache.Strategy_Weigh_Cache[new_weigh['name']] = new_weigh
def delete_strategy_weigh_cache(app=None, name=None):
# @todo
if not is_strategy_weigh_cache_avail():
return
weighs = cache.Strategy_Weigh_Cache.values()
if app:
if name:
weighs = list(filter(lambda w: w['app'] != app or w['name'] != name, weighs))
else:
weighs = list(filter(lambda x: x['app'] != app, weighs))
cache.Strategy_Weigh_Cache = dict((weigh['name'], weigh) for weigh in weighs)
else:
cache.Strategy_Weigh_Cache = dict()
def get_strategy_weigh(s):
blacklist_info = None
config = json.loads(s.config)
terms = config.get('terms', [])
for term in terms:
if term['left']['subtype'] == 'setblacklist':
blacklist_info = term['left']['config']
if blacklist_info is None:
logger.error(u'app:%s, name:%s 的策略没有设置黑名单的配置', s.app, s.name)
return {
'app': s.app,
'name': s.name,
'tags': (s.tags or '').split(','),
'category': s.category,
'score': s.score,
'expire': s.endeffect,
'remark': s.remark,
'test': True if s.status == 'test' else False,
'scope': term.get('scope', ''),
'checkpoints': blacklist_info.get('checkpoints', ''),
'checkvalue': blacklist_info.get('checkvalue', ''),
'checktype': blacklist_info.get('checktype', ''),
'decision': blacklist_info.get('decision', ''),
'ttl': blacklist_info.get('ttl', 300)
}
def update_strategy_weigh_cache(s):
if not is_strategy_weigh_cache_avail():
return
new_weigh = get_strategy_weigh(s)
cache.Strategy_Weigh_Cache[new_weigh['name']] = new_weigh
def init_strategy_weigh():
strategies = StrategyCustDao().list_all_strategies_raw()
result = dict()
for s in strategies:
weigh = get_strategy_weigh(s)
if not weigh:
continue
result[weigh['name']] = weigh
cache.Strategy_Weigh_Cache = result
class StrategyDefaultDao(BaseDefaultDao):
cached_online_strategies = set()
last_cache_update_ts = 0
def get_strategy_by_app_and_name(self, app, name):
"""
get strategy by app and name.
"""
query = self.session.query(Model)
result = query.filter(Model.name == name, Model.app == app).first()
if result:
return result.to_strategy()
def _get_model_by_app_and_name(self, app, name):
query = self.session.query(Model)
return query.filter(Model.name == name, Model.app == app).first()
def get_strategy_by_id(self, id):
"""
get strategy by id.
"""
query = self.session.query(Model)
result = query.filter(Model.id == id).first()
if result:
return result.to_strategy()
def list_all_strategies(self):
"""
get all strategies
"""
query = self.session.query(Model)
result = query.all() or []
result = [_.to_strategy() for _ in result]
return result
def list_all_strategies_by_status(self, status):
"""
get all strategies
"""
return filter(lambda s: s.status == status, self.list_all_strategies())
def list_all_strategies_by_app(self, app):
"""
get all strategies
"""
return filter(lambda s: s.app == app, self.list_all_strategies())
def list_all_strategies_in_effect(self):
now = millis_now()
result = self.list_all_strategies() or []
return filter(lambda s: s.start_effect <= now <= s.end_effect, result)
def list_all_online_strategy_names_in_effect(self):
now = millis_now()
result = self.list_all_strategies() or []
result = filter(lambda s: s.start_effect <= now <= s.end_effect and s.status == "online", result)
result = map(lambda s: s.name, result)
return result
def get_cached_online_strategies(self):
current = millis_now()
if current - StrategyDefaultDao.last_cache_update_ts< 5000:
return StrategyDefaultDao.cached_online_strategies
strategies = self.list_all_online_strategy_names_in_effect()
StrategyDefaultDao.cached_online_strategies = set(strategies)
StrategyDefaultDao.last_cache_update_ts = millis_now()
return StrategyDefaultDao.cached_online_strategies
def add_strategy(self, s):
new = StrategyDefaultModel.from_strategy(s)
new.last_modified = millis_now()
existing = self._get_model_by_app_and_name(s.app, s.name)
if existing:
# update
new.id = existing.id
self.session.merge(new)
update_strategy_weigh_cache(new)
else:
# insert
self.session.add(new)
add_strategy_weigh_cache(new)
self.session.commit()
def change_status(self, app, name, old_status, new_status):
result = self._get_model_by_app_and_name(app, name)
# check whether the internal status is right
if not result:
return
result_strategy = result.to_strategy()
if result_strategy.status != old_status:
return
result_strategy.status = new_status
new_model = StrategyDefaultModel.from_strategy(result_strategy)
new_model.id = result.id
self.session.merge(new_model)
self.session.commit()
def delete_strategy_by_app_and_name(self, app, name):
query = self.session.query(Model)
query.filter(Model.name == name, Model.app == app).delete()
self.session.commit()
delete_strategy_weigh_cache(app=app, name=name)
def delete_strategy(self, s):
self.delete_strategy_by_app_and_name(s.app, s.name)
def delete_strategy_list_by_app(self, app):
query = self.session.query(Model)
if app:
query.filter(Model.app == app).delete()
delete_strategy_weigh_cache(app=app)
else:
query.filter().delete()
delete_strategy_weigh_cache()
self.session.commit()
def clear(self):
"""
clear all the records
"""
query = self.session.query(Model)
query.delete()
self.session.commit()
delete_strategy_weigh_cache()
def count(self):
query = self.session.query(Model)
return query.count()
class StrategyCustDao(BaseDao):
cached_online_strategies = set()
last_cache_update_ts = 0
def get_strategy_by_app_and_name(self, app, name):
"""
get strategy by app and name. 定制的覆盖默认的strategy
@keep 保持接口功能不变,含义变了 with v1.0
"""
result = self._get_model_by_app_and_name(app, name)
if result:
return result.to_strategy()
def _get_model_by_app_and_name(self, app, name):
"""
只根据key获取strategy custmize优先default
@add within v2.0
"""
query = self.session.query(CustModel).filter(CustModel.app == app, CustModel.name == name)
cust_strategy = query.first()
if not cust_strategy:
query = StrategyDefaultDao().session.query(Model).filter(Model.app == app, Model.name == name)
return query.first()
else:
return cust_strategy
def _get_cust_model_by_app_name(self, app, name):
"""
只根据key获取定制化的strategy
@add within v2.0
"""
query = self.session.query(CustModel)
return query.filter(CustModel.app == app, CustModel.name == name).first()
def get_strategy_by_id(self, id):
"""
get strategy by id. custmize 优先于default
@keep 接口功能不变,含义变了 with v1.0
"""
query = self.session.query(CustModel).filter(CustModel.id == id)
cust_strategy = query.first()
if not cust_strategy:
query = StrategyDefaultDao().session.query(Model).filter(Model.id == id)
return query.first()
else:
return cust_strategy
query = self.session.query(CustModel)
result = query.filter(CustModel.id == id).first()
if result:
return result.to_strategy()
def get_cust_strategy_by_id(self, id):
"""
get cust strategy by id.
@add
"""
query = self.session.query(CustModel)
result = query.filter(CustModel.id == id).first()
if result:
return result.to_strategy()
def list_all_strategies_raw(self):
"""
@new v2.0
"""
default_query = StrategyDefaultDao().session.query(Model)
strategies = dict( ( (_.app, _.name), _) for _ in default_query.all())
# key: strategy obj
cust_query = self.session.query(CustModel)
for cq in cust_query.all():
strategies[(cq.app, cq.name)] = cq
return strategies.values()
def list_all_strategies(self):
"""
list all strategies, 取定制的和默认的strategies的合集,定制的覆盖默认的strategies
@keep 保持接口功能不变,含义变了 with v1.0
"""
default_query = StrategyDefaultDao().session.query(Model)
strategies = dict( ( (_.app, _.name), _.to_strategy()) for _ in default_query.all())
# key: strategy obj
cust_query = self.session.query(CustModel)
for cq in cust_query.all():
strategies[(cq.app, cq.name)] = cq.to_strategy()
return strategies.values()
def list_all_cust_strategies(self):
"""
list all custmize strategies
@add within v2.0
"""
query = self.session.query(CustModel)
result = query.all() or []
result = [_.to_strategy() for _ in result]
return result
def list_all_strategies_by_status(self, status):
"""
get strategies with certain status
@keep 保持接口功能不变 with v1.0
"""
return filter(lambda s: s.status == status, self.list_all_strategies())
def list_all_strategies_by_app(self, app):
"""
get strategies with certain status
@keep 保持接口功能不变 with v1.0
"""
return filter(lambda s: s.app == app, self.list_all_strategies())
def list_all_strategies_in_effect(self):
"""
get strategies not expire yet
@keep 保持接口功能不变 with v1.0
"""
now = millis_now()
result = self.list_all_strategies() or []
return filter(lambda s: s.start_effect <= now <= s.end_effect, result)
def list_all_online_strategy_names_in_effect(self):
"""
get online strategies not expire yet
@keep 保持接口功能不变 with v1.0
"""
now = millis_now()
result = self.list_all_strategies() or []
result = filter(lambda s: s.start_effect <= now <= s.end_effect and s.status == "online", result)
result = map(lambda s: s.name, result)
return result
def get_cached_online_strategies(self):
"""
@keep 保持接口功能不变 with v1.0
"""
current = millis_now()
if current - StrategyCustDao.last_cache_update_ts< 5000:
return StrategyCustDao.cached_online_strategies
strategies = self.list_all_online_strategy_names_in_effect()
StrategyCustDao.cached_online_strategies = set(strategies)
StrategyCustDao.last_cache_update_ts = millis_now()
return StrategyCustDao.cached_online_strategies
def add_strategy(self, s):
"""
only add custmize strategies, just override the default strategies, not delete key's strategies entirely.
@keep 保持接口功能不变,含义变了 with v1.0
"""
new = CustModel.from_strategy(s)
new.last_modified = millis_now()
existing = self._get_cust_model_by_app_name(s.app, s.name)
if existing:
# update
new.id = existing.id
new.group_id = existing.group_id
self.session.merge(new)
update_strategy_weigh_cache(new)
else:
# insert
self.session.add(new)
add_strategy_weigh_cache(new)
self.session.commit()
def change_status(self, app, name, old_status, new_status):
"""
only change custmize strategies
@keep 保持接口功能变了,含义变了 with v1.0
"""
result = self._get_model_by_app_and_name(app, name)
# check whether the internal status is right
if not result:
return
result_strategy = result.to_strategy()
if result_strategy.status != old_status:
return
result_strategy.status = new_status
new_model = CustModel.from_strategy(result_strategy)
new_model.id = result.id
self.session.merge(new_model)
self.session.commit()
update_strategy_weigh_cache(new_model)
def delete_strategy_by_app_and_name(self, app, name):
"""
现在只能删除custmize的strategy
@change 保持接口功能结果可能变了,含义也变了 with v1.0
"""
query = self.session.query(CustModel)
query.filter(CustModel.name == name, CustModel.app == app).delete()
self.session.commit()
delete_strategy_weigh_cache(app=app, name=name)
def delete_strategy(self, s):
"""
现在只能删除custmize的strategy
@change 保持接口功能结果可能变了,含义也变了 with v1.0
"""
self.delete_strategy_by_app_and_name(s.app, s.name)
def delete_strategy_list_by_app(self, app):
"""
现在只能删除custmize的strategy
@change 保持接口功能结果可能变了,含义也变了 with v1.0
"""
query = self.session.query(CustModel)
if app:
query.filter(CustModel.app == app).delete()
delete_strategy_weigh_cache(app=app)
else:
query.filter().delete()
delete_strategy_weigh_cache()
self.session.commit()
def clear(self):
"""
clear all Custmize strategy, reset to default strategy(different with b4)
@change 保持接口功能结果可能变了,含义也变了 with v1.0
"""
query = self.session.query(CustModel)
query.delete()
self.session.commit()
delete_strategy_weigh_cache()
def count(self):
"""
只获取custmize 的strategy个数
@change 保持接口功能结果可能变了,含义也变了 with v1.0
"""
query = self.session.query(CustModel)
return query.count()
if __name__ == "__main__":
js = """{
"app": "nebula",
"name": "test_strategy",
"remark": "test strategy",
"version": 1430694092730,
"status": "inedit",
"createtime": 1430693092730,
"modifytime": 1430693092730,
"starteffect": 1430693092730,
"endeffect": 1431095092730,
"terms": [
{
"left":
{
"type": "event",
"subtype": "",
"config": {
"event": ["nebula", "http_static"],
"field": "c_bytes"
}
},
"op": "between",
"right":
{
"type": "constant",
"subtype": "",
"config": {
"value": "1,200"
}
}
},
{
"left":
{
"type": "func",
"subtype": "count",
"config": {
"sourceevent": ["nebula", "http_dynamic"],
"condition": [
{
"left": "method",
"op": "==",
"right": "get"
}
],
"interval": 300,
"algorithm": "count",
"groupby": ["c_ip", "uri_stem"],
"trigger": {
"event": ["nebula", "http_static"],
"keys": ["c_ip","uri_stem"]
}
}
},
"op": "<",
"right":
{
"type": "constant",
"subtype": "",
"config": {
"value": "2"
}
}
}
]
}"""
dao = StrategyDefaultDao()
strategy = Strategy.from_json(js)
print StrategyDefaultModel.from_strategy(strategy)
dao.add_strategy(strategy)
for i in dao.list_all_strategies():
print i
dao.list_all_strategies()
dao.list_all_strategies_by_status("inedit")
dao.list_all_strategies_in_effect()
dao.count()
# dao.delete_strategy(dao.get_strategy_by_app_and_name("app", "name"))
| 32.68306 | 113 | 0.567687 | 12,848 | 0.6949 | 0 | 0 | 0 | 0 | 0 | 0 | 5,575 | 0.301531 |
fabc961b87da1b6806ebcaaaca91e938753fd2bc | 2,205 | py | Python | src/21_zip.py | TheFlipbook/python_challenge | 21bd42178088bcaafbe02c25a76bc4f2950509b2 | [
"MIT"
] | null | null | null | src/21_zip.py | TheFlipbook/python_challenge | 21bd42178088bcaafbe02c25a76bc4f2950509b2 | [
"MIT"
] | null | null | null | src/21_zip.py | TheFlipbook/python_challenge | 21bd42178088bcaafbe02c25a76bc4f2950509b2 | [
"MIT"
] | null | null | null | # http://www.pythonchallenge.com/pc/bin/hex.html
import bz2
import io
import urllib.request
import urllib.error
import zipfile
import zlib
out_dir = "_out/idiot"
prompt = "http://www.pythonchallenge.com/pc/hex/unreal.jpg"
prompt_top = "http://www.pythonchallenge.com/pc/hex/"
prompt_range = 1152983631
prompt_pass = b"redavni"
username = "butter"
password = "fly"
def open_section(start=None):
password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, prompt_top, username, password)
handler = urllib.request.HTTPBasicAuthHandler(password_mgr)
opener = urllib.request.build_opener(handler)
headers = {}
if start:
headers["Range"] = "bytes={}-".format(start)
request = urllib.request.Request(prompt, headers=headers)
response = opener.open(request)
return response.read()
def main():
data = open_section(start=prompt_range)
stream = io.BytesIO(data)
archive = zipfile.ZipFile(stream)
# Get Prompt
with archive.open("readme.txt", pwd=prompt_pass) as readme:
text = (b"".join(readme.readlines())).decode("ascii")
print(text)
# Inspect data
with archive.open("package.pack", pwd=prompt_pass) as package:
generation = package.read()
# Data ping-pongs between compression methods
zlib_header = b"x"
bz2_header = b"BZh"
# Reversing twice means we couldn't find a header
just_reversed = False
for x in range(2000):
if generation.startswith(zlib_header):
print("_", end=" ")
just_reversed = False
generation = zlib.decompress(generation)
elif generation.startswith(bz2_header):
print("B", end=" ")
just_reversed = False
generation = bz2.decompress(generation)
elif just_reversed:
break
else:
print("f")
just_reversed = True
generation = generation[::-1]
print(generation)
return archive
if __name__ == "__main__":
print(main())
# http://www.pythonchallenge.com/pc/hex/copper.html
| 24.5 | 67 | 0.631293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 433 | 0.196372 |
fabc996394a50d0044d4c07929a9f64d42ea2f3d | 86 | py | Python | awards/signals.py | fgmacedo/django-awards | a5307a96f8d39abdd466eb854049dd0f7b13eaee | [
"MIT"
] | null | null | null | awards/signals.py | fgmacedo/django-awards | a5307a96f8d39abdd466eb854049dd0f7b13eaee | [
"MIT"
] | 305 | 2017-05-16T17:45:58.000Z | 2022-03-18T07:20:22.000Z | awards/signals.py | fgmacedo/django-awards | a5307a96f8d39abdd466eb854049dd0f7b13eaee | [
"MIT"
] | null | null | null | from django.dispatch import Signal
badge_awarded = Signal(providing_args=["badge"])
| 17.2 | 48 | 0.790698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0.081395 |
fabd8c394c9b5ebf1b3c158c1fcc13c3e5dcf49b | 2,596 | py | Python | tests/test_auth/test_upload_passport.py | peterwade153/flybob | 85fcd401bffed9adb06e7943f0c748be822fac75 | [
"MIT"
] | 1 | 2019-09-09T15:04:07.000Z | 2019-09-09T15:04:07.000Z | tests/test_auth/test_upload_passport.py | peterwade153/flybob | 85fcd401bffed9adb06e7943f0c748be822fac75 | [
"MIT"
] | 26 | 2019-03-27T16:59:26.000Z | 2021-06-01T23:35:27.000Z | tests/test_auth/test_upload_passport.py | peterwade153/flybob | 85fcd401bffed9adb06e7943f0c748be822fac75 | [
"MIT"
] | null | null | null | import unittest
from unittest.mock import patch, Mock
from werkzeug.datastructures import FileStorage
import io
import json
from app import app
from app.models.base import db
from app.models.user import User
from app.auth.views import UserPassportphotoView
from app.auth import views
class AuthUploadPassportPhotoTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
app.testing = True
self.user_data = {
"username": "john123",
"email": "john123@john.com",
"password": "john1234556",
}
with app.app_context():
db.drop_all()
db.create_all()
# create admin user
user = User(
username="john123",
email="john123@john.com",
password="john1234556",
role=True,
)
user.save()
@patch.object(views.UserPassportphotoView, "post")
def test_upload_passport_photo(self, mock_post):
upload = UserPassportphotoView()
mock_post.return_value.status_code = 200
res = upload.post(
"/api/v1/auth/upload",
data=dict(file=(io.BytesIO(b"abcdef"), "test.jpg")),
headers={"Content-Type": "multipart/form-data"},
)
self.assertEqual(res.status_code, 200)
def test_upload_photo_with_non_allowed_ext(self):
res = self.app.post(
"/api/v1/auth/login",
data=json.dumps(self.user_data),
headers={"Content-Type": "application/json"},
)
token = json.loads(res.data.decode())["access_token"]
data = {"file": (io.BytesIO(b'my file contents'), 'hello.txt')}
result = self.app.post(
"/api/v1/auth/upload", buffered=True,
headers={
"Authorization": token,
"Content-Type" : 'multipart/form-data',
},
data=data,
)
self.assertEqual(result.status_code, 400)
def test_no_photo_upload(self):
res = self.app.post(
"/api/v1/auth/login",
data=json.dumps(self.user_data),
headers={"Content-Type": "application/json"},
)
token = json.loads(res.data.decode())["access_token"]
result = self.app.post(
"/api/v1/auth/upload", buffered=True,
headers={
"Authorization": token,
"Content-Type" : 'multipart/form-data',
},
data={},
)
self.assertEqual(result.status_code, 400)
| 28.844444 | 71 | 0.558937 | 2,307 | 0.888675 | 0 | 0 | 438 | 0.168721 | 0 | 0 | 517 | 0.199153 |
fabe4605f255d398162a6669d4a99f004a58a723 | 6,380 | py | Python | qchem/tests/test_two_particle.py | theRoughCode/pennylane | 317f82ef00c752beeef7d2412b88119a753467b4 | [
"Apache-2.0"
] | 1 | 2020-10-15T01:09:27.000Z | 2020-10-15T01:09:27.000Z | qchem/tests/test_two_particle.py | theRoughCode/pennylane | 317f82ef00c752beeef7d2412b88119a753467b4 | [
"Apache-2.0"
] | 1 | 2020-10-04T22:45:45.000Z | 2020-10-04T22:45:45.000Z | qchem/tests/test_two_particle.py | theRoughCode/pennylane | 317f82ef00c752beeef7d2412b88119a753467b4 | [
"Apache-2.0"
] | null | null | null | import os
import numpy as np
import pytest
from pennylane import qchem
from openfermion.hamiltonians import MolecularData
ref_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_ref_files")
table_1 = np.array(
[
[0.0, 0.0, 0.0, 0.0, 0.68238953],
[0.0, 1.0, 1.0, 0.0, 0.68238953],
[1.0, 0.0, 0.0, 1.0, 0.68238953],
[1.0, 1.0, 1.0, 1.0, 0.68238953],
[0.0, 0.0, 2.0, 2.0, 0.17900058],
[0.0, 1.0, 3.0, 2.0, 0.17900058],
[1.0, 0.0, 2.0, 3.0, 0.17900058],
[1.0, 1.0, 3.0, 3.0, 0.17900058],
[0.0, 2.0, 0.0, 2.0, 0.17900058],
[0.0, 3.0, 1.0, 2.0, 0.17900058],
[1.0, 2.0, 0.0, 3.0, 0.17900058],
[1.0, 3.0, 1.0, 3.0, 0.17900058],
[0.0, 2.0, 2.0, 0.0, 0.67073278],
[0.0, 3.0, 3.0, 0.0, 0.67073278],
[1.0, 2.0, 2.0, 1.0, 0.67073278],
[1.0, 3.0, 3.0, 1.0, 0.67073278],
[2.0, 0.0, 0.0, 2.0, 0.67073278],
[2.0, 1.0, 1.0, 2.0, 0.67073278],
[3.0, 0.0, 0.0, 3.0, 0.67073278],
[3.0, 1.0, 1.0, 3.0, 0.67073278],
[2.0, 0.0, 2.0, 0.0, 0.17900058],
[2.0, 1.0, 3.0, 0.0, 0.17900058],
[3.0, 0.0, 2.0, 1.0, 0.17900058],
[3.0, 1.0, 3.0, 1.0, 0.17900058],
[2.0, 2.0, 0.0, 0.0, 0.17900058],
[2.0, 3.0, 1.0, 0.0, 0.17900058],
[3.0, 2.0, 0.0, 1.0, 0.17900058],
[3.0, 3.0, 1.0, 1.0, 0.17900058],
[2.0, 2.0, 2.0, 2.0, 0.70510563],
[2.0, 3.0, 3.0, 2.0, 0.70510563],
[3.0, 2.0, 2.0, 3.0, 0.70510563],
[3.0, 3.0, 3.0, 3.0, 0.70510563],
]
)
table_2 = np.array(
[
[0.0, 0.0, 0.0, 0.0, 0.70510563],
[0.0, 1.0, 1.0, 0.0, 0.70510563],
[1.0, 0.0, 0.0, 1.0, 0.70510563],
[1.0, 1.0, 1.0, 1.0, 0.70510563],
]
)
table_3 = np.array(
[
[0.0, 0.0, 0.0, 0.0, 0.48731097],
[0.0, 1.0, 1.0, 0.0, 0.48731097],
[1.0, 0.0, 0.0, 1.0, 0.48731097],
[1.0, 1.0, 1.0, 1.0, 0.48731097],
[0.0, 0.0, 0.0, 2.0, -0.04857958],
[0.0, 1.0, 1.0, 2.0, -0.04857958],
[1.0, 0.0, 0.0, 3.0, -0.04857958],
[1.0, 1.0, 1.0, 3.0, -0.04857958],
[0.0, 0.0, 2.0, 0.0, -0.04857958],
[0.0, 1.0, 3.0, 0.0, -0.04857958],
[1.0, 0.0, 2.0, 1.0, -0.04857958],
[1.0, 1.0, 3.0, 1.0, -0.04857958],
[0.0, 0.0, 2.0, 2.0, 0.01306398],
[0.0, 1.0, 3.0, 2.0, 0.01306398],
[1.0, 0.0, 2.0, 3.0, 0.01306398],
[1.0, 1.0, 3.0, 3.0, 0.01306398],
[0.0, 2.0, 0.0, 0.0, -0.04857958],
[0.0, 3.0, 1.0, 0.0, -0.04857958],
[1.0, 2.0, 0.0, 1.0, -0.04857958],
[1.0, 3.0, 1.0, 1.0, -0.04857958],
[0.0, 2.0, 0.0, 2.0, 0.01306398],
[0.0, 3.0, 1.0, 2.0, 0.01306398],
[1.0, 2.0, 0.0, 3.0, 0.01306398],
[1.0, 3.0, 1.0, 3.0, 0.01306398],
[0.0, 2.0, 2.0, 0.0, 0.22361004],
[0.0, 3.0, 3.0, 0.0, 0.22361004],
[1.0, 2.0, 2.0, 1.0, 0.22361004],
[1.0, 3.0, 3.0, 1.0, 0.22361004],
[0.0, 2.0, 2.0, 2.0, 0.00748417],
[0.0, 3.0, 3.0, 2.0, 0.00748417],
[1.0, 2.0, 2.0, 3.0, 0.00748417],
[1.0, 3.0, 3.0, 3.0, 0.00748417],
[2.0, 0.0, 0.0, 0.0, -0.04857958],
[2.0, 1.0, 1.0, 0.0, -0.04857958],
[3.0, 0.0, 0.0, 1.0, -0.04857958],
[3.0, 1.0, 1.0, 1.0, -0.04857958],
[2.0, 0.0, 0.0, 2.0, 0.22361004],
[2.0, 1.0, 1.0, 2.0, 0.22361004],
[3.0, 0.0, 0.0, 3.0, 0.22361004],
[3.0, 1.0, 1.0, 3.0, 0.22361004],
[2.0, 0.0, 2.0, 0.0, 0.01306398],
[2.0, 1.0, 3.0, 0.0, 0.01306398],
[3.0, 0.0, 2.0, 1.0, 0.01306398],
[3.0, 1.0, 3.0, 1.0, 0.01306398],
[2.0, 0.0, 2.0, 2.0, 0.00748417],
[2.0, 1.0, 3.0, 2.0, 0.00748417],
[3.0, 0.0, 2.0, 3.0, 0.00748417],
[3.0, 1.0, 3.0, 3.0, 0.00748417],
[2.0, 2.0, 0.0, 0.0, 0.01306398],
[2.0, 3.0, 1.0, 0.0, 0.01306398],
[3.0, 2.0, 0.0, 1.0, 0.01306398],
[3.0, 3.0, 1.0, 1.0, 0.01306398],
[2.0, 2.0, 0.0, 2.0, 0.00748417],
[2.0, 3.0, 1.0, 2.0, 0.00748417],
[3.0, 2.0, 0.0, 3.0, 0.00748417],
[3.0, 3.0, 1.0, 3.0, 0.00748417],
[2.0, 2.0, 2.0, 0.0, 0.00748417],
[2.0, 3.0, 3.0, 0.0, 0.00748417],
[3.0, 2.0, 2.0, 1.0, 0.00748417],
[3.0, 3.0, 3.0, 1.0, 0.00748417],
[2.0, 2.0, 2.0, 2.0, 0.33788228],
[2.0, 3.0, 3.0, 2.0, 0.33788228],
[3.0, 2.0, 2.0, 3.0, 0.33788228],
[3.0, 3.0, 3.0, 3.0, 0.33788228],
]
)
@pytest.mark.parametrize(
("name", "core", "active", "table_exp", "v_core_exp"),
[
("h2_pyscf", None, None, table_1, 0),
("h2_pyscf", [0], None, table_2, 0.6823895331520422),
("h2_pyscf", None, [0, 1], table_1, 0),
("h2_pyscf", [0], [1], table_2, 0.6823895331520422),
("lih", [0], [1, 2], table_3, 1.6585666870874103),
],
)
def test_table_two_particle(name, core, active, table_exp, v_core_exp, tol):
r"""Test the table of two-particle matrix elements and the contribution of core orbitals
as implemented in the `two_particle` function of the `obs` module"""
hf_data = MolecularData(filename=os.path.join(ref_dir, name))
table, v_core = qchem.two_particle(hf_data.two_body_integrals, core=core, active=active)
assert np.allclose(table, table_exp, **tol)
assert np.allclose(v_core, v_core_exp, **tol)
v_me_1D = np.array([1, 2, 3, 4])
v_me_4D = np.full((2, 2, 2, 2), 0.5)
@pytest.mark.parametrize(
("v_me", "core", "active", "msg_match"),
[
(v_me_1D, [0], None, "'matrix_elements' must be a 4D array"),
(v_me_4D, [-1, 0, 1, 2], None, "Indices of core orbitals must be between 0 and"),
(v_me_4D, [0, 1, 2, 3], None, "Indices of core orbitals must be between 0 and"),
(v_me_4D, None, [-1, 0], "Indices of active orbitals must be between 0 and"),
(v_me_4D, None, [2, 6], "Indices of active orbitals must be between 0 and"),
],
)
def test_exceptions_two_particle(v_me, core, active, msg_match):
"""Test that the function `'two_particle'` throws an exception
if the dimension of the matrix elements array is not a 4D array or
if the indices of core and/or active orbitals are out of range."""
with pytest.raises(ValueError, match=msg_match):
qchem.two_particle(v_me, core=core, active=active)
| 37.529412 | 92 | 0.487461 | 0 | 0 | 0 | 0 | 1,770 | 0.277429 | 0 | 0 | 734 | 0.115047 |
fac02a78f618b71b9828bd71e56497f77be5f2b6 | 1,571 | py | Python | example_project/news_with_archive/migrations/0001_initial.py | richardbarran/django-minipub | f6df9b15cf49ba95c5aefed5355a7d3de0241c3f | [
"MIT"
] | 7 | 2016-02-19T12:52:01.000Z | 2021-07-07T05:10:41.000Z | example_project/news_with_archive/migrations/0001_initial.py | richardbarran/django-minipub | f6df9b15cf49ba95c5aefed5355a7d3de0241c3f | [
"MIT"
] | 2 | 2018-05-14T09:28:25.000Z | 2021-05-12T19:21:10.000Z | example_project/news_with_archive/migrations/0001_initial.py | richardbarran/django-minipub | f6df9b15cf49ba95c5aefed5355a7d3de0241c3f | [
"MIT"
] | 1 | 2021-03-24T00:44:22.000Z | 2021-03-24T00:44:22.000Z | # Generated by Django 2.0 on 2018-02-14 13:39
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('status', model_utils.fields.StatusField(choices=[('draft', 'draft'), ('published', 'published'), ('archived', 'archived')], default='draft', max_length=100, no_check_for_status=True, verbose_name='status')),
('status_changed', model_utils.fields.MonitorField(default=django.utils.timezone.now, monitor='status', verbose_name='status changed')),
('start', models.DateField(blank=True, null=True, verbose_name='start date')),
('end', models.DateField(blank=True, null=True, verbose_name='end date')),
('title', models.CharField(max_length=50, unique=True)),
('slug', models.SlugField()),
('body', models.TextField()),
],
options={
'abstract': False,
},
),
]
| 44.885714 | 225 | 0.621897 | 1,425 | 0.907066 | 0 | 0 | 0 | 0 | 0 | 0 | 282 | 0.179504 |
fac137087e41cae16ef6b6cc8d7e95ccb0632729 | 6,159 | py | Python | recipes/Python/578344_Simple_Finite_State_Machine_class_/recipe-578344.py | tdiprima/code | 61a74f5f93da087d27c70b2efe779ac6bd2a3b4f | [
"MIT"
] | 2,023 | 2017-07-29T09:34:46.000Z | 2022-03-24T08:00:45.000Z | recipes/Python/578344_Simple_Finite_State_Machine_class_/recipe-578344.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
] | 32 | 2017-09-02T17:20:08.000Z | 2022-02-11T17:49:37.000Z | recipes/Python/578344_Simple_Finite_State_Machine_class_/recipe-578344.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
] | 780 | 2017-07-28T19:23:28.000Z | 2022-03-25T20:39:41.000Z | #! /usr/bin/env python
""" Generic finite state machine class
Initialise the class with a list of tuples - or by adding transitions
Tony Flury - November 2012
Released under an MIT License - free to use so long as the author and other contributers are credited.
"""
class fsm(object):
""" A simple to use finite state machine class.
Allows definition of multiple states, condition functions from state to state and optional callbacks
"""
def __init__(self, states=[]):
self._states=states
self.currentState = None
def start(self,startState=None):
""" Start the finite state machine
"""
if not startState or not (startState in [x[0] for x in self._states]):
raise ValueError("Not a valid start state")
self.currentState = startState
def stop(self):
""" Stop the finite state machine
"""
# Bug fix 15 Dec 2012 - self.currentState should be reset, not startState - Identified by Holger Waldmann
self.currentState = None
def addTransition(self,fromState, toState, condition, callback=None):
""" Add a state transition to the list, order is irellevant, loops are undetected
Can only add a transition if the state machine isn't started.
"""
if not self.currentState:
raise ValueError("StateMachine already Started - cannot add new transitions")
# add a transition to the state table
self._states.append( (fromState, toState,condition, callback))
def event(self, value):
""" Trigger a transition - return a tuple (<new_state>, <changed>)
Raise an exception if no valid transition exists.
Callee needs to determine if the value will be consumed or re-used
"""
if not self.currentState:
raise ValueError("StateMachine not Started - cannot process event")
# get a list of transitions which are valid
self.nextStates = [ x for x in self._states\
if x[0] == self.currentState \
and (x[2]==True or (callable(x[2]) and x[2](value))) ]
if not self.nextStates:
raise ValueError("No Transition defined from state {0} with value '{1}'".format(self.currentState, value))
elif len(self.nextStates) > 1:
raise ValueError("Ambiguous transitions from state {0} with value '{1}' -> New states defined {2}".format(self.currentState, value, [x[0] for x in self.nextStates]))
else:
if len(self.nextStates[0]) == 4:
current, next, condition, callback = self.nextStates[0]
else:
current, next, condition = self.nextStates[0]
callback = None
self.currentState, changed = (next,True) \
if self.currentState != next else (next, False)
# Execute the callback if defined
if callable(callback):
callback(self, value)
return self.currentState, changed
def CurrentState(self):
""" Return the current State of the finite State machine
"""
return self.currentState
# -------------------------------------------------------------------------------------------------
# Example classes to demonstrate the use of the Finite State Machine Class
# They implement a simple lexical tokeniser.
# These classes are not neccesary for the FSM class to work.
# -------------------------------------------------------------------------------------------------
# Simple storage object for each token
class token(object):
def __init__(self, type):
self.tokenType = type
self.tokenText = ""
def addCharacter(self, char):
self.tokenText += char
def __repr__(self):
return "{0}<{1}>".format(self.tokenType, self.tokenText)
# Token list object - demonstrating the definition of state machine callbacks
class tokenList(object):
def __init__(self):
self.tokenList = []
self.currentToken = None
def StartToken(self, fss, value):
self.currentToken = token(fss.CurrentState())
self.currentToken.addCharacter(value)
def addCharacter(self, fss, value):
self.currentToken.addCharacter(value)
def EndToken(self, fss, value):
self.tokenList.append(self.currentToken)
self.currentToken = None
# Example code - showing population of the state machine in the constructor
# the Machine could also be constructed by multiple calls to addTransition method
# Example code is a simple tokeniser
# Machine transitions back to the Start state whenever the end of a token is detected
if __name__ == "__main__":
t = tokenList()
fs = fsm( [ ("Start","Start",lambda x: x.isspace() ),
("Start","Identifier",str.isalpha, t.StartToken ),
("Identifier","Identifier", str.isalnum, t.addCharacter ),
("Identifier","Start",lambda x: not x.isalnum(), t.EndToken ),
("Start","Operator", lambda x: x in "=+*/-()", t.StartToken ),
("Operator","Start", True, t.EndToken),
("Start","Number",str.isdigit, t.StartToken ),
("Number","Number",lambda x: x.isdigit() or x == ".", t.addCharacter ),
("Number","Start",lambda x: not x.isdigit() and x != ".", t.EndToken ),
("Start","StartQuote",lambda x: x == "\'"),
("StartQuote","String", lambda x: x != "\'", t.StartToken),
("String","String",lambda x: x != "\'", t.addCharacter ),
("String","EndQuote", lambda x: x == "\'", t.EndToken ),
("EndQuote","Start", True ) ] )
fs.start("Start")
a = " x123=MyString+123.65-'hello'*value"
c = 0
while c < len(a):
ret = fs.event(a[c])
# Make sure a transition back to start (from something else) does not consume the character.
if ret[0] != "Start" or (ret[0] == "Start" and ret[1] == False):
c += 1
ret = fs.event("")
print t.tokenList
| 41.06 | 178 | 0.585485 | 3,683 | 0.597987 | 0 | 0 | 0 | 0 | 0 | 0 | 2,704 | 0.439032 |
fac23535a62e06c8d2d429ebcc6d5025ccbe4a0b | 873 | py | Python | self-citation-check.py | Darkbladecr/pyresearch | e621923e33be234320ea4b7eafc556ff0f6565dc | [
"MIT"
] | 7 | 2016-08-18T16:06:50.000Z | 2021-09-14T23:53:26.000Z | self-citation-check.py | wenxuefeng3930/pyresearch | e621923e33be234320ea4b7eafc556ff0f6565dc | [
"MIT"
] | 1 | 2016-12-20T10:21:35.000Z | 2016-12-24T07:39:04.000Z | self-citation-check.py | wenxuefeng3930/pyresearch | e621923e33be234320ea4b7eafc556ff0f6565dc | [
"MIT"
] | 3 | 2018-05-02T05:13:42.000Z | 2022-03-12T16:07:56.000Z | from __future__ import division
from optparse import OptionParser
import numpy
parser = OptionParser()
parser.add_option("-i", "--input", dest="input", help="input file")
(options, args) = parser.parse_args()
def numpyImport(file):
output = numpy.load("%s.npy" % file)
output = output.item()
return output
allCites = numpyImport(options.input)
exCites = numpyImport("%s-self_exclude" % options.input)
fake = list()
for scopusId in allCites.keys():
if allCites[scopusId]['Citations'] > 0 and exCites[scopusId]['Citations'] > 0:
if 1 - (exCites[scopusId]['Citations'] / allCites[scopusId]['Citations']) > 0.4:
fake.append(scopusId)
print("%d vs %d" % (allCites[scopusId]['Citations'], exCites[scopusId]['Citations']))
print("%d articles were found to be heavily self-cited" % len(fake))
numpy.save('self-cited.npy', fake)
| 33.576923 | 97 | 0.681558 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 198 | 0.226804 |
fac2cf1e6f7f65f78b062d38d2f66e6dbe2d2d4a | 1,631 | py | Python | socialaccount/admin.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | 1 | 2017-04-27T19:35:42.000Z | 2017-04-27T19:35:42.000Z | socialaccount/admin.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | null | null | null | socialaccount/admin.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | null | null | null | from django.contrib import admin
from django import forms
from establishment.accounts.utils import get_user_search_fields
from .models import SocialApp, SocialAccount, SocialToken, SocialProvider
class SocialAppForm(forms.ModelForm):
class Meta:
model = SocialApp
exclude = []
widgets = {
"client_id": forms.TextInput(attrs={"size": "100"}),
"key": forms.TextInput(attrs={"size": "100"}),
"secret": forms.TextInput(attrs={"size": "100"})
}
class SocialAppAdmin(admin.ModelAdmin):
form = SocialAppForm
list_display = ("name", "provider_instance",)
filter_horizontal = ("sites",)
class SocialAccountAdmin(admin.ModelAdmin):
search_fields = []
raw_id_fields = ("user",)
list_display = ("user", "uid", "provider_instance")
list_filter = ("provider_instance",)
def get_search_fields(self, request):
return ["user__" + attr for attr in get_user_search_fields()]
class SocialTokenAdmin(admin.ModelAdmin):
raw_id_fields = ("app", "account",)
list_display = ("app", "account", "truncated_token", "expires_at")
list_filter = ("app", "app__provider_instance", "expires_at")
def truncated_token(self, token):
max_chars = 40
ret = token.token
if len(ret) > max_chars:
ret = ret[0:max_chars] + "...(truncated)"
return ret
truncated_token.short_description = "Token"
admin.site.register(SocialApp, SocialAppAdmin)
admin.site.register(SocialToken, SocialTokenAdmin)
admin.site.register(SocialAccount, SocialAccountAdmin)
admin.site.register(SocialProvider)
| 30.203704 | 73 | 0.677498 | 1,230 | 0.754139 | 0 | 0 | 0 | 0 | 0 | 0 | 273 | 0.167382 |
fac3be260753887f939d9f7cefb2d2cf2009eb52 | 1,665 | py | Python | tap_github/tap.py | edgarrmondragon/tap-github | 8eec764884801139651570fb9732fe3bfbe4617c | [
"Apache-2.0"
] | null | null | null | tap_github/tap.py | edgarrmondragon/tap-github | 8eec764884801139651570fb9732fe3bfbe4617c | [
"Apache-2.0"
] | null | null | null | tap_github/tap.py | edgarrmondragon/tap-github | 8eec764884801139651570fb9732fe3bfbe4617c | [
"Apache-2.0"
] | null | null | null | """GitHub tap class."""
from typing import List
from singer_sdk import Tap, Stream
from singer_sdk import typing as th # JSON schema typing helpers
from tap_github.streams import (
CommitsStream,
CommunityProfileStream,
IssueCommentsStream,
IssueEventsStream,
IssuesStream,
PullRequestsStream,
ReadmeStream,
RepositoryStream,
)
class TapGitHub(Tap):
"""GitHub tap class."""
name = "tap-github"
config_jsonschema = th.PropertiesList(
th.Property("user_agent", th.StringType),
th.Property("metrics_log_level", th.StringType),
th.Property("auth_token", th.StringType),
th.Property(
"searches",
th.ArrayType(
th.ObjectType(
th.Property("name", th.StringType, required=True),
th.Property("query", th.StringType, required=True),
)
),
),
th.Property("repositories", th.ArrayType(th.StringType)),
th.Property("start_date", th.DateTimeType),
th.Property("stream_maps", th.ObjectType()),
th.Property("stream_map_config", th.ObjectType()),
).to_dict()
def discover_streams(self) -> List[Stream]:
"""Return a list of discovered streams."""
return [
CommitsStream(tap=self),
CommunityProfileStream(tap=self),
IssueCommentsStream(tap=self),
IssueEventsStream(tap=self),
IssuesStream(tap=self),
PullRequestsStream(tap=self),
ReadmeStream(tap=self),
RepositoryStream(tap=self),
]
# CLI Execution:
cli = TapGitHub.cli
| 27.295082 | 71 | 0.606006 | 1,258 | 0.755556 | 0 | 0 | 0 | 0 | 0 | 0 | 268 | 0.160961 |
fac3e2d997ccbfcf9bd430f63bf90f4c1c8106fa | 14,513 | py | Python | grand/backends/sqlbackend.py | aplbrain/grand | d85669df17a40834a13478ae200e984e13b41650 | [
"Apache-2.0"
] | 31 | 2020-10-16T16:46:02.000Z | 2022-03-04T20:45:05.000Z | grand/backends/sqlbackend.py | aplbrain/grand | d85669df17a40834a13478ae200e984e13b41650 | [
"Apache-2.0"
] | 15 | 2020-10-15T16:28:49.000Z | 2022-02-10T16:41:32.000Z | grand/backends/sqlbackend.py | aplbrain/grand | d85669df17a40834a13478ae200e984e13b41650 | [
"Apache-2.0"
] | null | null | null | from typing import Hashable, Generator, Optional, Iterable
import time
import pandas as pd
import sqlalchemy
from sqlalchemy.pool import NullPool
from sqlalchemy.sql import select
from sqlalchemy import and_, or_, func
from .backend import Backend
_DEFAULT_SQL_URL = "sqlite:///"
_DEFAULT_SQL_STR_LEN = 64
class SQLBackend(Backend):
"""
A graph datastore that uses a SQL-like store for persistance and queries.
"""
def __init__(
self,
directed: bool = False,
node_table_name: str = None,
edge_table_name: str = None,
db_url: str = _DEFAULT_SQL_URL,
primary_key: str = "ID",
sqlalchemy_kwargs: dict = None,
) -> None:
"""
Create a new SQL-backed graph store.
Arguments:
node_table_name (str: "grand_Nodes"): The name to use for the node
table in DynamoDB.
edge_table_name (str: "grand_Edges"): The name to use for the edge
table in DynamoDB.
db_url (str: _DEFAULT_SQL_URL): The URL to use for the SQL db.
primary_key (str: "ID"): The default primary key to use for the
tables. Note that this key cannot exist in your metadata dicts.
"""
self._directed = directed
self._node_table_name = node_table_name or "grand_Nodes"
self._edge_table_name = edge_table_name or "grand_Edges"
self._primary_key = primary_key
self._edge_source_key = "Source"
self._edge_target_key = "Target"
sqlalchemy_kwargs = sqlalchemy_kwargs or {}
self._engine = sqlalchemy.create_engine(db_url, **sqlalchemy_kwargs)
self._connection = self._engine.connect()
self._metadata = sqlalchemy.MetaData()
if not self._engine.dialect.has_table(self._connection, self._node_table_name):
self._node_table = sqlalchemy.Table(
self._node_table_name,
self._metadata,
sqlalchemy.Column(
self._primary_key,
sqlalchemy.String(_DEFAULT_SQL_STR_LEN),
primary_key=True,
),
sqlalchemy.Column("_metadata", sqlalchemy.JSON),
)
self._node_table.create(self._engine)
else:
self._node_table = sqlalchemy.Table(
self._node_table_name,
self._metadata,
autoload=True,
autoload_with=self._engine,
)
if not self._engine.dialect.has_table(self._connection, self._edge_table_name):
self._edge_table = sqlalchemy.Table(
self._edge_table_name,
self._metadata,
sqlalchemy.Column(
self._primary_key,
sqlalchemy.String(_DEFAULT_SQL_STR_LEN),
primary_key=True,
),
sqlalchemy.Column("_metadata", sqlalchemy.JSON),
sqlalchemy.Column(
self._edge_source_key, sqlalchemy.String(_DEFAULT_SQL_STR_LEN)
),
sqlalchemy.Column(
self._edge_target_key, sqlalchemy.String(_DEFAULT_SQL_STR_LEN)
),
)
self._edge_table.create(self._engine)
else:
self._edge_table = sqlalchemy.Table(
self._edge_table_name,
self._metadata,
autoload=True,
autoload_with=self._engine,
)
# def __del__(self):
# self._connection.close()
def is_directed(self) -> bool:
"""
Return True if the backend graph is directed.
Arguments:
None
Returns:
bool: True if the backend graph is directed.
"""
return self._directed
def teardown(self, yes_i_am_sure: bool = False):
"""
Tear down this graph, deleting all evidence it once was here.
"""
if yes_i_am_sure:
self._node_table.drop(self._engine)
self._edge_table.drop(self._engine)
def add_node(self, node_name: Hashable, metadata: dict) -> Hashable:
"""
Add a new node to the graph.
Insert a new document into the nodes table.
Arguments:
node_name (Hashable): The ID of the node
metadata (dict: None): An optional dictionary of metadata
Returns:
Hashable: The ID of this node, as inserted
"""
self._connection.execute(
self._node_table.insert(),
**{self._primary_key: node_name, "_metadata": metadata},
)
return node_name
def all_nodes_as_iterable(self, include_metadata: bool = False) -> Generator:
"""
Get a generator of all of the nodes in this graph.
Arguments:
include_metadata (bool: False): Whether to include node metadata in
the response
Returns:
Generator: A generator of all nodes (arbitrary sort)
"""
results = self._connection.execute(self._node_table.select()).fetchall()
if include_metadata:
return [(row[self._primary_key], row["_metadata"]) for row in results]
return [row[self._primary_key] for row in results]
def has_node(self, u: Hashable) -> bool:
"""
Return true if the node exists in the graph.
Arguments:
u (Hashable): The ID of the node to check
Returns:
bool: True if the node exists
"""
return len(
self._connection.execute(
self._node_table.select().where(
self._node_table.c[self._primary_key] == u
)
).fetchall()
)
def add_edge(self, u: Hashable, v: Hashable, metadata: dict):
"""
Add a new edge to the graph between two nodes.
If the graph is directed, this edge will start (source) at the `u` node
and end (target) at the `v` node.
Arguments:
u (Hashable): The source node ID
v (Hashable): The target node ID
metadata (dict): Optional metadata to associate with the edge
Returns:
Hashable: The edge ID, as inserted.
"""
pk = f"__{u}__{v}"
if not self.has_node(u):
self.add_node(u, {})
if not self.has_node(v):
self.add_node(v, {})
try:
self._connection.execute(
self._edge_table.insert(),
**{
self._primary_key: pk,
self._edge_source_key: u,
self._edge_target_key: v,
"_metadata": metadata,
},
)
except sqlalchemy.exc.IntegrityError:
# Edge already exists
pass
return pk
def all_edges_as_iterable(self, include_metadata: bool = False) -> Generator:
"""
Get a list of all edges in this graph, arbitrary sort.
Arguments:
include_metadata (bool: False): Whether to include edge metadata
Returns:
Generator: A generator of all edges (arbitrary sort)
"""
return iter(
[
(e.Source, e.Target, e._metadata)
if include_metadata
else (e.Source, e.Target)
for e in self._connection.execute(self._edge_table.select()).fetchall()
]
)
def get_node_by_id(self, node_name: Hashable):
"""
Return the data associated with a node.
Arguments:
node_name (Hashable): The node ID to look up
Returns:
dict: The metadata associated with this node
"""
res = (
self._connection.execute(
self._node_table.select().where(
self._node_table.c[self._primary_key] == node_name
)
)
.fetchone()
._metadata
)
return res
def get_edge_by_id(self, u: Hashable, v: Hashable):
"""
Get an edge by its source and target IDs.
Arguments:
u (Hashable): The source node ID
v (Hashable): The target node ID
Returns:
dict: Metadata associated with this edge
"""
if self._directed:
pk = f"__{u}__{v}"
return (
self._connection.execute(
self._edge_table.select().where(
self._edge_table.c[self._primary_key] == pk
)
)
.fetchone()
._metadata
)
else:
return (
self._connection.execute(
self._edge_table.select().where(
or_(
(self._edge_table.c[self._primary_key] == f"__{u}__{v}"),
(self._edge_table.c[self._primary_key] == f"__{v}__{u}"),
)
)
)
.fetchone()
._metadata
)
def get_node_neighbors(
self, u: Hashable, include_metadata: bool = False
) -> Generator:
"""
Get a generator of all downstream nodes from this node.
Arguments:
u (Hashable): The source node ID
Returns:
Generator
"""
if self._directed:
res = self._connection.execute(
self._edge_table.select().where(
self._edge_table.c[self._edge_source_key] == u
)
).fetchall()
else:
res = self._connection.execute(
self._edge_table.select().where(
or_(
(self._edge_table.c[self._edge_source_key] == u),
(self._edge_table.c[self._edge_target_key] == u),
)
)
).fetchall()
if include_metadata:
return {
(
r[self._edge_source_key]
if r[self._edge_source_key] != u
else r[self._edge_target_key]
): r["_metadata"]
for r in res
}
return iter(
[
(
r[self._edge_source_key]
if r[self._edge_source_key] != u
else r[self._edge_target_key]
)
for r in res
]
)
def get_node_predecessors(
self, u: Hashable, include_metadata: bool = False
) -> Generator:
"""
Get a generator of all upstream nodes from this node.
Arguments:
u (Hashable): The source node ID
Returns:
Generator
"""
if self._directed:
res = self._connection.execute(
self._edge_table.select().where(
self._edge_table.c[self._edge_target_key] == u
)
).fetchall()
else:
res = self._connection.execute(
self._edge_table.select().where(
or_(
(self._edge_table.c[self._edge_target_key] == u),
(self._edge_table.c[self._edge_source_key] == u),
)
)
).fetchall()
if include_metadata:
return {
(
r[self._edge_source_key]
if r[self._edge_source_key] != u
else r[self._edge_target_key]
): r["_metadata"]
for r in res
}
return iter(
[
(
r[self._edge_source_key]
if r[self._edge_source_key] != u
else r[self._edge_target_key]
)
for r in res
]
)
def get_node_count(self) -> Iterable:
"""
Get an integer count of the number of nodes in this graph.
Arguments:
None
Returns:
int: The count of nodes
"""
return self._connection.execute(
select([func.count()]).select_from(self._node_table)
).scalar()
def ingest_from_edgelist_dataframe(
self, edgelist: pd.DataFrame, source_column: str, target_column: str
) -> None:
"""
Ingest an edgelist from a Pandas DataFrame.
"""
# Produce edge list:
edge_tic = time.time()
newlist = edgelist.rename(
columns={
source_column: self._edge_source_key,
target_column: self._edge_target_key,
}
)
newlist[self._primary_key] = edgelist.apply(
lambda x: f"__{x[source_column]}__{x[target_column]}", axis="columns"
)
newlist["_metadata"] = edgelist.apply(
lambda x: {
k: v for k, v in x.items() if k not in [source_column, target_column]
},
axis="columns",
)
newlist[
[
self._edge_source_key,
self._edge_target_key,
self._primary_key,
"_metadata",
]
].to_sql(
self._edge_table_name,
self._engine,
index=False,
if_exists="append",
dtype={"_metadata": sqlalchemy.JSON},
)
edge_toc = time.time() - edge_tic
# now ingest nodes:
node_tic = time.time()
nodes = edgelist[source_column].append(edgelist[target_column]).unique()
pd.DataFrame(
[
{
self._primary_key: node,
# no metadata:
"_metadata": {},
}
for node in nodes
]
).to_sql(
self._node_table_name,
self._engine,
index=False,
if_exists="replace",
dtype={"_metadata": sqlalchemy.JSON},
)
return {
"node_count": len(nodes),
"node_duration": time.time() - node_tic,
"edge_count": len(edgelist),
"edge_duration": edge_toc,
}
| 29.86214 | 87 | 0.506718 | 14,200 | 0.978433 | 0 | 0 | 0 | 0 | 0 | 0 | 3,979 | 0.274168 |
fac3e7ee67811ede3b6d8461b25dcb5790afb786 | 475 | py | Python | Searches/models.py | Sofia190/book_store_app | 3c32f269604948bb4a495802d17794a68188e3a5 | [
"MIT"
] | null | null | null | Searches/models.py | Sofia190/book_store_app | 3c32f269604948bb4a495802d17794a68188e3a5 | [
"MIT"
] | null | null | null | Searches/models.py | Sofia190/book_store_app | 3c32f269604948bb4a495802d17794a68188e3a5 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
from django.conf import settings
from django.db import models
from django.utils import timezone
# Create your models here.
class SearchQuery(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.CASCADE)
query = models.CharField(max_length=570)
timestamp = models.DateField(auto_now=False, auto_now_add=False, default=timezone.now())
| 12.837838 | 100 | 0.755789 | 265 | 0.557895 | 0 | 0 | 0 | 0 | 0 | 0 | 52 | 0.109474 |
fac72b976d32fee06945ec2a9773f4034447c6e7 | 2,875 | py | Python | non_semantic_speech_benchmark/distillation/write_savedmodel_to_disk.py | lix2k3/google-research | d0ec3bbce67641d3159a663e00288cbb578eda7d | [
"Apache-2.0"
] | 1 | 2021-04-12T15:50:58.000Z | 2021-04-12T15:50:58.000Z | non_semantic_speech_benchmark/distillation/write_savedmodel_to_disk.py | abbie0214/google-research | d154ffb7a92f7899807496cb8cb665fb5a01e65c | [
"Apache-2.0"
] | null | null | null | non_semantic_speech_benchmark/distillation/write_savedmodel_to_disk.py | abbie0214/google-research | d154ffb7a92f7899807496cb8cb665fb5a01e65c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Loads a graph and checkpoint, and writes to disk as a savedmodel.
"""
from absl import app
from absl import flags
from absl import logging
import tensorflow as tf
from non_semantic_speech_benchmark.distillation import models
flags.DEFINE_string('logdir', None, 'Dataset location.')
flags.DEFINE_string('checkpoint_filename', None, 'Optional.')
flags.DEFINE_string('output_directory', None, 'Place to write savedmodel.')
flags.DEFINE_bool('frontend', False, 'Whether to add the frontend.')
# Controls the model.
flags.DEFINE_integer('bottleneck_dimension', None, 'Dimension of bottleneck.')
flags.DEFINE_float('alpha', 1.0, 'Alpha controlling model size.')
flags.DEFINE_string('mobilenet_size', 'small', 'Size of mobilenet')
flags.DEFINE_bool('avg_pool', False, 'Whether to use average pool.')
flags.DEFINE_string('compressor', None,
'Whether to use bottleneck compression.')
flags.DEFINE_bool('qat', False, 'Whether to use quantization-aware training.')
FLAGS = flags.FLAGS
def load_and_write_model(keras_model_args, checkpoint_to_load,
output_directory):
model = models.get_keras_model(**keras_model_args)
checkpoint = tf.train.Checkpoint(model=model)
checkpoint.restore(checkpoint_to_load).expect_partial()
tf.keras.models.save_model(model, output_directory)
def main(unused_argv):
tf.compat.v2.enable_v2_behavior()
assert tf.executing_eagerly()
if FLAGS.checkpoint_filename:
raise ValueError('Implement me.')
else:
checkpoint_to_load = tf.train.latest_checkpoint(FLAGS.logdir)
assert FLAGS.logdir
keras_model_args = {
'bottleneck_dimension': FLAGS.bottleneck_dimension,
'output_dimension': None,
'alpha': FLAGS.alpha,
'mobilenet_size': FLAGS.mobilenet_size,
'frontend': FLAGS.frontend,
'avg_pool': FLAGS.avg_pool,
'compressor': FLAGS.compressor,
'qat': FLAGS.qat,
'tflite': False,
}
load_and_write_model(keras_model_args, checkpoint_to_load,
FLAGS.output_directory)
assert tf.io.gfile.exists(FLAGS.output_directory)
logging.info('Successfully wrote to: %s', FLAGS.output_directory)
if __name__ == '__main__':
flags.mark_flags_as_required(['logdir', 'output_directory'])
app.run(main)
| 35.060976 | 78 | 0.742261 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,289 | 0.448348 |
fac956be30414f4e9750c5fba11e0bd38288e8e4 | 585 | py | Python | event/migrations/0003_event_org.py | Ortus-Team/Moim | 57bdd94ffb0c3b5d7dc74396264074e2a9a7f84a | [
"MIT"
] | null | null | null | event/migrations/0003_event_org.py | Ortus-Team/Moim | 57bdd94ffb0c3b5d7dc74396264074e2a9a7f84a | [
"MIT"
] | 6 | 2020-06-05T17:44:24.000Z | 2022-02-09T23:15:16.000Z | event/migrations/0003_event_org.py | Ortus-Team/Moim | 57bdd94ffb0c3b5d7dc74396264074e2a9a7f84a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-01-10 08:34
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('org', '0001_initial'),
('event', '0002_auto_20180102_2143'),
]
operations = [
migrations.AddField(
model_name='event',
name='org',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='org', to='org.Org'),
),
]
| 25.434783 | 126 | 0.634188 | 394 | 0.673504 | 0 | 0 | 0 | 0 | 0 | 0 | 148 | 0.252991 |
fac9f7ba04b46ffb5f398af944e0de63eaf7216f | 9,806 | py | Python | mcp23008slave.py | notro/cp-smbusslave | 7aed8eedde2b391ef0d1c9fd246ac3531216b628 | [
"MIT"
] | null | null | null | mcp23008slave.py | notro/cp-smbusslave | 7aed8eedde2b391ef0d1c9fd246ac3531216b628 | [
"MIT"
] | null | null | null | mcp23008slave.py | notro/cp-smbusslave | 7aed8eedde2b391ef0d1c9fd246ac3531216b628 | [
"MIT"
] | 1 | 2020-04-23T10:29:11.000Z | 2020-04-23T10:29:11.000Z | import digitalio
import pulseio
from smbusslave import SMBusSlave
IODIR = 0x00
IPOL = 0x01
GPINTEN = 0x02
DEFVAL = 0x03
INTCON = 0x04
IOCON = 0x05
GPPU = 0x06
INTF = 0x07
INTCAP = 0x08
GPIO = 0x09
OLAT = 0x0a
IOCON_SEQOP = 1 << 5
IOCON_ODR = 1 << 2
IOCON_INTPOL = 1 << 1
# Pull up on interrupt pins are not supported
# Interrupts are not working yet, need PulseIn.value
class MCP23008Slave(SMBusSlave):
def __init__(self, pins, intpin=None):
if len(pins) == 0:
raise ValueError('pins is empty')
super().__init__()
pins.extend([None] * (8 - len(pins))) # Fill up with dummies
self.pins = [Pin(pin, i) for i, pin in enumerate(pins)]
self.int = None
if intpin:
self.int = digitalio.DigitalInOut(intpin)
self.int.switch_to_output(True)
self.protocol = SMBusSlave.SMBUS_BYTE_SEQ
self.max_reg = 0x0a
self.regs = [0] * (self.max_reg + 1)
self.regs[IODIR] = 0xff
self.debug2 = False
def check_events(self):
prev_intf = self.regs[INTF]
val = 0
for i in range(8):
val |= self.pins[i].interrupt << i
self.regs[INTF] = val
if self.regs[INTF] and not prev_intf:
val = 0
for i in range(8):
val |= self.pins[i].value << i
val |= self.regs[INTF] # In case we're slow and have lost it. Revisit if IPOL is supported
self.regs[INTCAP] = val
self.set_interrupt()
def readreg(self, reg):
if reg == GPIO:
val = 0
for i in range(8):
if self.regs[IODIR] & (1 << i): # Is this an input?
val |= self.pins[i].value << i
else:
val |= self.regs[OLAT] & (1 << i)
if self.regs[INTF]:
self.regs[INTF] = 0
self.clear_interrupt()
elif reg == INTCAP:
val = self.regs[INTCAP]
if self.regs[INTF]:
self.regs[INTF] = 0
self.clear_interrupt()
else:
val = self.regs[reg]
if self.debug2:
print(" 0x%02x==0x%02x" % (reg, val))
return val
def writereg(self, reg, val):
if self.debug2:
print(" 0x%02x=0x%02x" % (reg, val))
changed = self.regs[reg] ^ val
if reg == IODIR:
self.regs[IODIR] = val
self.setpinmode(changed)
elif reg == IPOL:
if val:
# Not used by the Linux driver
raise NotImplementedError('IPOL is not implemented')
elif reg == GPINTEN:
self.regs[GPINTEN] = val
self.setpinmode(changed)
elif reg == DEFVAL:
pass
elif reg == INTCON:
pass
elif reg == IOCON:
val &= 0b00111110
if val & IOCON_SEQOP:
# Not used by the Linux driver
raise NotImplementedError('IOCON:SEQOP is not implemented')
if self.int:
if changed & IOCON_ODR:
if val & IOCON_ODR:
self.int.drive_mode = digitalio.DriveMode.OPEN_DRAIN
else:
self.int.drive_mode = digitalio.DriveMode.PUSH_PULL
if changed & IOCON_INTPOL:
self.int.value = not val & IOCON_INTPOL
elif reg == GPPU:
self.regs[GPPU] = val
self.setpinmode(changed)
elif reg == INTF:
return # Read only
elif reg == INTCAP:
return # Read only
elif reg == GPIO or reg == OLAT:
if reg == GPIO:
self.regs[OLAT] = val
for i in range(8):
mask = 1 << i
if changed & mask and not self.regs[IODIR] & mask: # Changed and not input
self.pins[i].value = val & mask
self.regs[reg] = val
def setpinmode(self, changed):
for i in range(8):
mask = 1 << i
if changed & mask:
if self.regs[IODIR] & mask:
interrupt = self.regs[GPINTEN] & mask
pull = digitalio.Pull.UP if self.regs[GPPU] & mask else None
self.pins[i].switch_to_input(pull, interrupt)
else:
val = self.regs[OLAT] & mask
self.pins[i].switch_to_output(val)
def set_interrupt(self):
if self.debug2:
print('\nset_interrupt: INTF=%02x INTCAP=%02x\n' % (self.regs[INTF], self.regs[INTCAP]))
if self.int:
active = bool(self.regs[IOCON] & IOCON_INTPOL)
self.int.value = active
def clear_interrupt(self):
if self.debug2:
print('\nclear_interrupt\n')
if self.int:
active = bool(self.regs[IOCON] & IOCON_INTPOL)
self.int.value = not active
# Doubles as a DigitalInOut and PulseIn dummy for the Pin class
class DummyIO:
def __init__(self):
self.direction = digitalio.Direction.INPUT
self.drive_mode = digitalio.DriveMode.PUSH_PULL
self.value = False
self.pull = None
def switch_to_output(self, value=False, drive_mode=digitalio.DriveMode.PUSH_PULL):
self.direction = digitalio.Direction.OUTPUT
self.value = value
self.drive_mode = drive_mode
def switch_to_input(self, pull=None):
self.direction = digitalio.Direction.INPUT
self.pull = pull
if pull == digitalio.Pull.UP:
self.value = True
else:
self.value = False
def __len__(self):
return 0
class Pin:
def __init__(self, pin, index):
self.pin = pin
self.index = index
self.io = None
self.pulseio = None
self.pulseio_val = None
self.pulseio_maxlen = 10
self._interrupt = False
self.debug = False
if self.pin is None:
self.io = DummyIO()
self.pulseio = self.io
self.pulseio_val = False
else:
self._ensure_io()
def switch_to_output(self, value=False, drive_mode=digitalio.DriveMode.PUSH_PULL):
self._ensure_io()
if self.debug:
print('%d.switch_to_output(%r)' % (self.index, value,))
self.io.switch_to_output(value)
# Edge/level?
def switch_to_input(self, pull=None, interrupt=False):
if interrupt:
self._ensure_pulseio()
else:
self._ensure_io()
if self.debug:
print('%s.switch_to_input(%r)' % (self.index, pull,))
self.io.switch_to_input(pull)
@property
def value(self):
if self.io is not None:
val = bool(self.io.value)
if self.debug and self.pin:
print('%s.value == %r' % (self.index, val,))
return val
if self.pulseio is not None:
val = self._get_pulseio_value()
if val is not None:
if self.debug:
print('%s.value == %r (%d)' % (self.index, val, len(self.pulseio)))
return val
# Unable to determine value so look at the pin
self.pulseio.deinit()
tmp = digitalio.DigitalInOut(self.pin)
tmp.switch_to_input(None)
val = tmp.value
tmp.deinit()
self.pulseio = None
self._ensure_pulseio()
if self.debug:
print('%s.value(DIG) == %r' % (self.index, val,))
return val
raise ValueError('bug: neither io nor pulseio is set')
@value.setter
def value(self, val):
if self.io is None or self.io.direction == digitalio.Direction.INPUT:
raise AttributeError('Cannot set value when direction is input.')
val = bool(val)
self.io.value = val
if self.debug:
print('%s.value = %r' % (self.index, val,))
@property
def interrupt(self):
if self.pulseio is None:
return False
val = self._interrupt
self._interrupt = False
return val
def _get_pulseio_value(self):
pulses = [self.pulseio.popleft() for _ in range(len(self.pulseio))]
num_pulses = len(pulses)
if num_pulses == 0:
return self.pulseio_val
self._interrupt = True
if num_pulses == self.pulseio_maxlen:
return None
if self.pulseio_val is None:
self.pulseio_val = False
num_pulses += 1 # The 'missing' first edge
val = bool(self.pulseio_val ^ bool(num_pulses % 2))
self.pulseio_val = val
return val
def _ensure_io(self):
if self.pin is None:
return
if self.pulseio is not None:
if self.debug:
print('%s.PulseIn(%r).deinit()' % (self.index, self.pin,))
self.pulseio.deinit()
self.pulseio = None
if self.io is None:
if self.debug:
print('%d = DigitalInOut(%r)' % (self.index, self.pin,))
self.io = digitalio.DigitalInOut(self.pin)
def _ensure_pulseio(self):
if self.pin is None:
return
if self.io is not None:
if self.debug:
print('%s.DigitalInOut(%r).deinit()' % (self.index, self.pin,))
self.io.deinit()
self.io = None
if self.pulseio is None:
if self.debug:
print('%s = PulseIn(%r, maxlen=%d)' % (self.index, self.pin, self.pulseio_maxlen,))
self.pulseio = pulseio.PulseIn(self.pin, maxlen=self.pulseio_maxlen) # , idle_state=False)
self.pulseio_val = None
| 31.632258 | 103 | 0.532021 | 9,359 | 0.954416 | 0 | 0 | 1,466 | 0.1495 | 0 | 0 | 955 | 0.097389 |
facc8b55215e84d77bae17017885f0c7c6fa4a14 | 2,084 | py | Python | src/main/python/counts_tools/exec/deviation_analysis.py | cday97/beam | 7e1ab50eecaefafd04daab360f8b12bc7cab559b | [
"BSD-3-Clause-LBNL"
] | 123 | 2017-04-06T20:17:19.000Z | 2022-03-02T13:42:15.000Z | src/main/python/counts_tools/exec/deviation_analysis.py | cday97/beam | 7e1ab50eecaefafd04daab360f8b12bc7cab559b | [
"BSD-3-Clause-LBNL"
] | 2,676 | 2017-04-26T20:27:27.000Z | 2022-03-31T16:39:53.000Z | src/main/python/counts_tools/exec/deviation_analysis.py | cday97/beam | 7e1ab50eecaefafd04daab360f8b12bc7cab559b | [
"BSD-3-Clause-LBNL"
] | 60 | 2017-04-06T20:14:32.000Z | 2022-03-30T20:10:53.000Z | import ConfigParser
from datetime import datetime
import os
import sys
import numpy as np
import pandas as pd
import utils.counts
import utils.counts_deviation
__author__ = 'Andrew A Campbell'
# This script finds the days with the greatest deviation from some reference value (such as hourly means or medians)
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'ERROR: need to supply the path to the conifg file'
config_path = sys.argv[1]
conf = ConfigParser.ConfigParser()
conf.read(config_path)
# Paths
station_TS_dir = conf.get('Paths', 'station_TS_dir') # Path to station Time Series
ref_counts_file = conf.get('Paths', 'ref_counts_file')
out_file = conf.get('Paths', 'out_file') # Where to write the counts file
# Parameters
start_date = conf.get('Params', 'start_date')
end_date = conf.get('Params', 'end_date')
days = [int(d.strip()) for d in conf.get('Params', 'days').split(',')]
measure = conf.get('Params', 'measure')
# Get target dates
targ_dates = utils.counts.date_string_list(start_date, end_date, days)
# Create the counts file
ref = utils.counts.df_from_counts(ref_counts_file) # DF w/ mean flow for each link
measures = []
keepers = []
for i, stat in enumerate(ref.columns):
# Get path to stat ts file
print 'Processings station: %s' % str(stat)
print 'Number %d of %d' % (i, ref.shape[1])
ts_path = os.path.join(station_TS_dir, str(stat), 'time_series.csv')
c_dev = utils.counts_deviation.CountsDeviation(ts_path, targ_dates)
if c_dev.missing: # if there is missing data, we skip the whole station
print "Missing data. Skipping station: %s" % str(stat)
continue
c_dev.calc_measure(measure, reference=ref[stat])
measures.append(c_dev.measures[measure])
keepers.append(stat)
df = pd.DataFrame(measures).transpose()
df.columns = keepers
df.index = targ_dates
df.dropna(axis=1)
df['Max_Dev'] = df.apply(np.sum, axis=1)
df.to_csv(out_file)
| 32.5625 | 116 | 0.669866 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 668 | 0.320537 |
facf32ca0ec852661f76a8129eb47cff600e9536 | 104 | py | Python | boltstream/responses.py | geekpii/boltstream | 3a107372ac7d67c18fd257c75769f125c855c5b6 | [
"Apache-2.0"
] | 1,735 | 2020-12-07T02:06:43.000Z | 2022-03-18T01:47:06.000Z | boltstream/responses.py | sonrebmax/boltstream | 10a4398b165cc4440c38ab4cd657a74e00cba38b | [
"Apache-2.0"
] | 10 | 2020-12-07T02:41:23.000Z | 2021-12-22T06:47:56.000Z | boltstream/responses.py | sonrebmax/boltstream | 10a4398b165cc4440c38ab4cd657a74e00cba38b | [
"Apache-2.0"
] | 126 | 2020-12-07T03:17:44.000Z | 2022-03-12T18:06:27.000Z | from django.http import HttpResponse
class HttpResponseNoContent(HttpResponse):
status_code = 204
| 17.333333 | 42 | 0.807692 | 64 | 0.615385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
fad2a283163dcc8ae897624ab986731628e63b17 | 5,457 | py | Python | ChessRender/RenderFsmCommon/figure_manage.py | PavelLebed20/chess_classic | 72f7d08cadae8db9c65d61411bcdc8c79bfa04c3 | [
"Apache-2.0"
] | 1 | 2019-06-04T11:08:55.000Z | 2019-06-04T11:08:55.000Z | ChessRender/RenderFsmCommon/figure_manage.py | PavelLebed20/chess_classic | 72f7d08cadae8db9c65d61411bcdc8c79bfa04c3 | [
"Apache-2.0"
] | 115 | 2019-03-02T08:02:50.000Z | 2019-06-02T16:28:00.000Z | ChessRender/RenderFsmCommon/figure_manage.py | PavelLebed20/chess_classic | 72f7d08cadae8db9c65d61411bcdc8c79bfa04c3 | [
"Apache-2.0"
] | null | null | null | ###############################
# MODULE: Object settings #
# AUTHOR: Yangildin Ivan #
# LAST UPDATE: 08/04/2019 #
###############################
import copy
from enum import Enum, IntEnum
from direct.gui.OnscreenText import TransparencyAttrib
BLACK = (0.15, 0.15, 0.15, 1)
#BLACK = (0.0, 0.0, 0.0, 1)
WHITE = (0.75, 0.75, 0.75, 1)
class RenderState(Enum):
DEFAULT = -1
INPUT = 0
MENU = 1
GAME = 2
class RenderModels(Enum):
PLANE = 0
class RenderObject(IntEnum):
BLACK_KING = 0
BLACK_QUEEN = 1
BLACK_BISHOP = 2
BLACK_KNIGHT = 3
BLACK_ROOK = 4
BLACK_PAWN = 5
WHITE_KING = 6
WHITE_QUEEN = 7
WHITE_BISHOP = 8
WHITE_KNIGHT = 9
WHITE_ROOK = 10
WHITE_PAWN = 11
PLANE = 12
def figure_as_render_object(figure_latter):
res = 0
lower = figure_latter.lower()
if figure_latter.isupper():
res += 6
if lower == 'k':
res += 0
if lower == 'q':
res += 1
if lower == 'b':
res += 2
if lower == 'n':
res += 3
if lower == 'r':
res += 4
if lower == 'p':
res += 5
return RenderObject(res)
class FigureMngr:
def __init__(self, blackside_pack, whiteside_pack):
self.data_path = "ChessRender/data/"
self.whiteside_pack_name = self.data_path + "chess_figures/" + whiteside_pack + "/"
self.blackside_pack_name = self.data_path + "chess_figures/" + blackside_pack + "/"
self.textures = dict({
RenderObject.BLACK_KING : loader.loadTexture(self.blackside_pack_name + "bK.png"),
RenderObject.BLACK_QUEEN : loader.loadTexture(self.blackside_pack_name + "bQ.png"),
RenderObject.BLACK_BISHOP : loader.loadTexture(self.blackside_pack_name + "bB.png"),
RenderObject.BLACK_KNIGHT : loader.loadTexture(self.blackside_pack_name + "bN.png"),
RenderObject.BLACK_ROOK : loader.loadTexture(self.blackside_pack_name + "bR.png"),
RenderObject.BLACK_PAWN : loader.loadTexture(self.blackside_pack_name + "bP.png"),
RenderObject.WHITE_KING : loader.loadTexture(self.whiteside_pack_name + "wK.png"),
RenderObject.WHITE_QUEEN : loader.loadTexture(self.whiteside_pack_name + "wQ.png"),
RenderObject.WHITE_BISHOP : loader.loadTexture(self.whiteside_pack_name + "wB.png"),
RenderObject.WHITE_KNIGHT : loader.loadTexture(self.whiteside_pack_name + "wN.png"),
RenderObject.WHITE_ROOK : loader.loadTexture(self.whiteside_pack_name + "wR.png"),
RenderObject.WHITE_PAWN : loader.loadTexture(self.whiteside_pack_name + "wP.png"),
})
self.modeles = dict({
RenderObject.WHITE_KING: loader.loadModel(self.whiteside_pack_name + "king.egg"),
RenderObject.WHITE_QUEEN: loader.loadModel(self.whiteside_pack_name + "queen.egg"),
RenderObject.WHITE_BISHOP: loader.loadModel(self.whiteside_pack_name + "bishop.egg"),
RenderObject.WHITE_KNIGHT: loader.loadModel(self.whiteside_pack_name + "knight.egg"),
RenderObject.WHITE_ROOK: loader.loadModel(self.whiteside_pack_name + "rook.egg"),
RenderObject.WHITE_PAWN: loader.loadModel(self.whiteside_pack_name + "pawn.egg"),
RenderObject.BLACK_KING: loader.loadModel(self.blackside_pack_name + "king.egg"),
RenderObject.BLACK_QUEEN: loader.loadModel(self.blackside_pack_name + "queen.egg"),
RenderObject.BLACK_BISHOP: loader.loadModel(self.blackside_pack_name + "bishop.egg"),
RenderObject.BLACK_KNIGHT: loader.loadModel(self.blackside_pack_name + "knight.egg"),
RenderObject.BLACK_ROOK: loader.loadModel(self.blackside_pack_name + "rook.egg"),
RenderObject.BLACK_PAWN: loader.loadModel(self.blackside_pack_name + "pawn.egg"),
RenderObject.PLANE: loader.loadModel(self.data_path + "plane.egg")
})
def load_figure_model(self, figure_latter):
render_obj = figure_as_render_object(figure_latter)
obj = copy.deepcopy(self.modeles[render_obj])
if RenderObject.BLACK_KING <= RenderObject(render_obj) <= RenderObject.BLACK_PAWN:
obj.setColor(BLACK)
else:
obj.setColor(WHITE)
return obj
def load_figure_model_2D(self, figure_latter):
render_obj = figure_as_render_object(figure_latter)
return self.load_plane_object(render_obj)
def load_plane_object(self, render_object):
obj = copy.deepcopy(self.modeles[RenderObject.PLANE])
texture = copy.deepcopy(self.textures[render_object])
obj.set_texture(texture)
obj.setTransparency(TransparencyAttrib.MAlpha)
return obj
def load_cube(self):
self.data_path = "ChessRender/data/"
obj = loader.loadModel(self.data_path + "cube.egg")
return obj
def load_plane_textured(self, texture_path):
obj = copy.deepcopy(self.modeles[RenderObject.PLANE])
if texture_path is not None:
texture = loader.loadTexture(texture_path)
obj.set_texture(texture)
obj.setTransparency(TransparencyAttrib.MAlpha)
return obj
def load_skybox_white_side(self):
return loader.loadModel(self.whiteside_pack_name + "cubemap.bam")
def load_skybox_black_side(self):
return loader.loadModel(self.blackside_pack_name + "cubemap.bam")
| 37.376712 | 97 | 0.653839 | 4,698 | 0.860913 | 0 | 0 | 0 | 0 | 0 | 0 | 549 | 0.100605 |
fad48c005d43403bbab5e6857a996949ef7c14d5 | 1,202 | py | Python | hostel_project/hostel_webapp/views.py | harsh-ux/hostel-app | 89c341d55116913de81a3c7bd8a08d77729c39cb | [
"MIT"
] | null | null | null | hostel_project/hostel_webapp/views.py | harsh-ux/hostel-app | 89c341d55116913de81a3c7bd8a08d77729c39cb | [
"MIT"
] | null | null | null | hostel_project/hostel_webapp/views.py | harsh-ux/hostel-app | 89c341d55116913de81a3c7bd8a08d77729c39cb | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.views import generic
from django.shortcuts import redirect
from django.utils import timezone
from .models import Complaint
from .forms import ComplaintForm
class ComplaintList(generic.ListView):
queryset = Complaint.objects.order_by('date')
template_name = 'complaintlist.html'
def as_view(request):
return render(request, 'complaintlist.html', {})
hostels = [
'B1',
'B2',
'B5'
]
# Create your views here.
def index(request):
return render(request, 'index.html', {})
def about(request):
return render(request, 'about.html', {})
def login(request):
return render(request, 'login.html', {})
def complaint(request):
if request.method == "POST":
form = ComplaintForm()
if form.is_valid():
complaint = form.save(commit=False)
complaint.author = request.user
complaint.date = timezone.now()
complaint.save()
return redirect('complaint_detail', pk=complaint.pk)
else:
form = ComplaintForm()
return render(request, 'complaint.html', {'form': form})
def g4(request):
return render(request, 'g4.html', {})
| 25.041667 | 64 | 0.655574 | 212 | 0.176373 | 0 | 0 | 0 | 0 | 0 | 0 | 174 | 0.144759 |
fad4a6741d0059fc88b188ba3616c0961edb64af | 4,679 | py | Python | map_crawling/map_crawling_code/Map_crawling.py | yumei86/iRamen_linebot | 5b9f2653076faa04118f281970edf639288d79fd | [
"MIT"
] | 16 | 2020-12-29T11:23:01.000Z | 2022-01-01T01:55:42.000Z | map_crawling/map_crawling_code/Map_crawling.py | yumei86/iRamen_linebot | 5b9f2653076faa04118f281970edf639288d79fd | [
"MIT"
] | null | null | null | map_crawling/map_crawling_code/Map_crawling.py | yumei86/iRamen_linebot | 5b9f2653076faa04118f281970edf639288d79fd | [
"MIT"
] | 4 | 2020-12-27T14:43:35.000Z | 2021-02-26T09:35:03.000Z | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from time import sleep
import time
import csv
import pyautogui
#-------將網頁開啟的動作換為背景執行----------
options = webdriver.ChromeOptions()
options.add_argument("--headless")
options.add_experimental_option('excludeSwitches', ['enable-automation']) #把新版google的自動控制提醒關掉
#options.add_argument("window-size=1920,1080")
#options.add_argument("--start")
driver = webdriver.Chrome(executable_path='chromedriver',options=options)
link = 'https://www.google.com/maps/d/u/0/viewer?fbclid=IwAR3O8PKxMuqtqb2wMKoHKe4cCETwnT2RSCZSpsyPPkFsJ6NpstcrDcjhO2k&mid=1I8nWhKMX1j8I2bUkN4qN3-FSyFCCsCh7&ll=24.807740000000006%2C120.96740199999999&z=8'
driver.get(link)
time.sleep(3)
#-------關掉即將開幕的選單-------
first_cl = driver.find_element_by_xpath("//div[2]/div/div/div[2]/div/div/div[2]/div/div/div/div[3]")
first_cl.click()
#-------下滑網頁(較南邊的縣市需要)-------
#pyautogui.scroll(-10)
store_name = []
store_loca = []
store_char = []
store_time = []
store_tran = []
store_refs = []
store_tags = []
store_number = [167,47,59,68,37,50,12]
#-------把剩下項目打開--------
item = 2 #0,1,2,3,4,5,6 對應到store_number中的index 也就是map中不同的選單
#for item in range():
num_1 = str(item + 2)
start_search_btn = driver.find_element_by_xpath("//div["+ num_1 +"]/div/div[3]/div[2]/div/div")
start_search_btn.click()
n = store_number[item]
#-------打開評論爬資訊(要記得關掉)-------
for items in range(59): #這邊根據縣市,來改store_number的數字
num = str(items + 3)
driver.find_element_by_xpath("//div["+ num_1 +"]/div/div[3]/div["+ num +"]/div[2]/div").click()
time.sleep(1)
s1 = WebDriverWait(driver, 15).until(EC.presence_of_all_elements_located((By.XPATH, '//*[@class="qqvbed-p83tee-lTBxed"]')))
store_name.append(s1[0].text)
print(store_name)
time.sleep(1)
temp = s1[1].text.split("\n")
temp_new = []
for i in range(len(temp)):
if "▎特色:" == temp[i]:
one = i
if "▎營業時間:" == temp[i]:
two = i
if "▎鄰近地標或大眾運輸:" == temp[i]:
three = i
if "▎社團內參考食記:" == temp[i]:
four = i
if "▎標籤:" == temp[i]:
five = i
try:
l = int( two - one )-1
for k in range(l):
temp_new.append(temp[one+k+1])
ans = "".join(temp_new)
store_char.append(ans)
except:
store_time.append("無提供特色資訊")
temp_new = []
try:
l = int( three - two )-1
for k in range(l):
temp_new.append(temp[two+k+1])
ans = "".join(temp_new)
store_time.append(ans)
except:
store_time.append("無提供時間資訊")
temp_new = []
try:
l = int( four - three )-1
for k in range(l):
temp_new.append(temp[three+k+1])
ans = "".join(temp_new)
store_tran.append(ans)
except:
store_tran.append("無提供交通資訊")
temp_new = []
try:
l = int( five - four )-1
for k in range(l):
temp_new.append(temp[four+k+1])
ans = " ".join(temp_new)
store_refs.append(ans)
except:
store_refs.append("無提供評論資訊")
temp_new = []
try:
l = int( five )
temp_new.append(temp[five+1])
ans = "".join(temp_new)
store_tags.append(ans)
except:
store_tags.append("無提供標籤資訊")
temp_new = []
try:
location = WebDriverWait(driver, 15).until(EC.presence_of_all_elements_located((By.XPATH, '//*[@class="fO2voc-jRmmHf-MZArnb-Q7Zjwb"]')))
#print(location[0].text)
store_loca.append(location[0].text)
except:
store_loca.append("無提供地址資訊")
time.sleep(1)
driver.find_element_by_xpath("//div[3]/div/div/span/span/span").click()
time.sleep(1)
#---------------create new csv file------------------
ans = ['','','','','','','']
with open('Ramen_map_TY.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',') #以空白分隔欄位,建立 CSV 檔寫入器
writer.writerow(['name','address','characteristics','time','transport','tags','reference'])
for i in range(len(store_name)):
ans[0] = store_name[i]
ans[1] = store_loca[i]
ans[2] = store_char[i]
ans[3] = store_time[i]
ans[4] = store_tran[i]
ans[5] = store_tags[i]
ans[6] = store_refs[i]
writer.writerow([ans[0],ans[1],ans[2],ans[3],ans[4],ans[5],ans[6]])
driver.close() #關掉瀏覽器
| 27.046243 | 203 | 0.579397 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,557 | 0.308134 |
fada5720fcd6da7cbf8b1e56a59906ef13697176 | 2,052 | py | Python | blamepipeline/preprocess/match_article_entry.py | jmmaki/BlamePipeline | ca5f1b54ebe62bcbea4b13f70c15b14002133d7c | [
"MIT"
] | null | null | null | blamepipeline/preprocess/match_article_entry.py | jmmaki/BlamePipeline | ca5f1b54ebe62bcbea4b13f70c15b14002133d7c | [
"MIT"
] | null | null | null | blamepipeline/preprocess/match_article_entry.py | jmmaki/BlamePipeline | ca5f1b54ebe62bcbea4b13f70c15b14002133d7c | [
"MIT"
] | null | null | null | # encoding: utf-8
'''
Match articles with annotated data
'''
from collections import defaultdict
import argparse
from blamepipeline.preprocess.dataloader import Dataset
case1, case2 = 0, 0
def match_data(source):
dataset = Dataset(source)
articles = dataset.get_articles()
entries = dataset.get_entries()
date_articles = defaultdict(list)
for article in articles:
date_articles[article['date']].append(article)
print('{} dates of {} articles loaded.'.format(len(date_articles), len(articles)))
print('{} entries loaded.'.format(len(entries)))
title_match = 0
subtitle_match = 0
pairs = []
def matches(entry_title, article_title):
if not entry_title or len(entry_title) < 10:
return False
elif entry_title and article_title and entry_title == article_title:
return True
elif entry_title and entry_title in article_title:
return True
return False
for entry in entries:
for article in date_articles[entry['date']]:
if matches(entry['title'], article['title']):
title_match += 1
pairs.append((entry, article))
break
elif matches(entry['title'], article['subtitle']):
subtitle_match += 1
pairs.append((entry, article))
break
print('title match:', title_match)
print('subtitle match:', subtitle_match)
return pairs
def main(args):
if args.source == 'all':
sources = ['FOX']
else:
sources = [args.source.upper()]
for source in sources:
print(source)
pairs = match_data(source)
print('matched pairs:', len(pairs))
print('---')
global case1, case2
print(f'{case1}, {case2}')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='match articles and entries')
parser.add_argument('--source', type=str, choices=['all', 'fox'], default='all')
args = parser.parse_args()
main(args)
| 27 | 86 | 0.618421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 299 | 0.145712 |
fadaa73e2ce983f8ee457b6bf67edceb51e25ddc | 197 | py | Python | res/TensorFlowPythonExamples/examples/while/__init__.py | bogus-sudo/ONE-1 | 7052a817eff661ec2854ed2e7ee0de5e8ba82b55 | [
"Apache-2.0"
] | 255 | 2020-05-22T07:45:29.000Z | 2022-03-29T23:58:22.000Z | res/TensorFlowPythonExamples/examples/while/__init__.py | bogus-sudo/ONE-1 | 7052a817eff661ec2854ed2e7ee0de5e8ba82b55 | [
"Apache-2.0"
] | 5,102 | 2020-05-22T07:48:33.000Z | 2022-03-31T23:43:39.000Z | res/TensorFlowPythonExamples/examples/while/__init__.py | bogus-sudo/ONE-1 | 7052a817eff661ec2854ed2e7ee0de5e8ba82b55 | [
"Apache-2.0"
] | 120 | 2020-05-22T07:51:08.000Z | 2022-02-16T19:08:05.000Z | import tensorflow as tf
i = tf.compat.v1.constant(0, name="Hole")
c = lambda i: tf.compat.v1.less(i, 10)
b = lambda i: tf.compat.v1.add(i, 1)
r = tf.compat.v1.while_loop(c, b, [i], name="While")
| 24.625 | 52 | 0.654822 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 0.06599 |
fade8de862f192db4db57a6e8e338b8eea806aa8 | 1,628 | py | Python | tests/prosperpy/overlays/test_bollinger_bands.py | CaptainBriot/prosperpy | 831abb9c9e3b730c81895647e33a59854c4e4648 | [
"MIT"
] | 2 | 2018-01-28T06:11:37.000Z | 2018-02-04T16:01:30.000Z | tests/prosperpy/overlays/test_bollinger_bands.py | CaptainBriot/prosperpy | 831abb9c9e3b730c81895647e33a59854c4e4648 | [
"MIT"
] | 1 | 2018-03-20T12:10:40.000Z | 2018-03-21T00:08:04.000Z | tests/prosperpy/overlays/test_bollinger_bands.py | CaptainBriot/prosperpy | 831abb9c9e3b730c81895647e33a59854c4e4648 | [
"MIT"
] | 2 | 2019-04-06T14:33:26.000Z | 2020-06-25T23:34:32.000Z | import unittest
import decimal
import prosperpy
def get_prices():
prices = ['90.704', '92.900', '92.978', '91.802', '92.665', '92.684', '92.302', '92.773', '92.537', '92.949',
'93.204', '91.067', '89.832', '89.744', '90.399', '90.739', '88.018', '88.087', '88.844', '90.778',
'90.542', '91.389', '90.650']
return [decimal.Decimal(price) for price in prices]
class TestSimpleMovingAverage(unittest.TestCase):
def test_simple_moving_average(self):
prices = get_prices()
data = [('91.2422', '94.53214587189516', '87.95225412810484', '6.57989174379032'),
('91.16665', '94.36908071900080', '87.96421928099920', '6.40486143800160'),
('91.05025', '94.14840337741694', '87.95209662258306', '6.19630675483388')]
data = [(decimal.Decimal(item[0]), decimal.Decimal(item[1]), decimal.Decimal(item[2])) for item in data]
bollinger_bands = prosperpy.overlays.BollingerBands(prices[:20])
self.assertEqual(bollinger_bands.moving_average.value, decimal.Decimal('91.2503'))
self.assertEqual(bollinger_bands.upper, decimal.Decimal('94.53410225348604'))
self.assertEqual(bollinger_bands.lower, decimal.Decimal('87.96649774651396'))
self.assertEqual(bollinger_bands.bandwidth, decimal.Decimal('6.56760450697208'))
for price, value in zip(prices[20:], data):
bollinger_bands.add(price)
self.assertEqual(bollinger_bands.moving_average.value, value[0])
self.assertEqual(bollinger_bands.upper, value[1])
self.assertEqual(bollinger_bands.lower, value[2])
| 49.333333 | 113 | 0.651106 | 1,228 | 0.7543 | 0 | 0 | 0 | 0 | 0 | 0 | 446 | 0.273956 |
fae0583fbc853b8b77719ed67cdd0c3a13ee094a | 156 | py | Python | projects/course_intro/raw_input.py | ProgressBG-Python-Course/ProgressBG-VC-Python | 37f8be60fb73d558ebd9e3c2700c88dcb64e7f4f | [
"MIT"
] | null | null | null | projects/course_intro/raw_input.py | ProgressBG-Python-Course/ProgressBG-VC-Python | 37f8be60fb73d558ebd9e3c2700c88dcb64e7f4f | [
"MIT"
] | null | null | null | projects/course_intro/raw_input.py | ProgressBG-Python-Course/ProgressBG-VC-Python | 37f8be60fb73d558ebd9e3c2700c88dcb64e7f4f | [
"MIT"
] | null | null | null | machine_number = 43
user_number = int( raw_input("enter a number: ") )
print( type(user_number) )
if user_number == machine_number:
print("Bravo!!!")
| 15.6 | 50 | 0.692308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.179487 |
fae33f3464d3a11adc1fa5ba692617e0ef8bced7 | 15,377 | py | Python | algorithm.py | James0231/Data-Visualization-PJ | 053a1fdb26e07381e44cfd13cb989631e86b8b82 | [
"MIT"
] | 5 | 2019-01-24T02:53:43.000Z | 2020-10-17T09:01:52.000Z | algorithm.py | James0231/Data-Visualization-PJ | 053a1fdb26e07381e44cfd13cb989631e86b8b82 | [
"MIT"
] | 1 | 2020-02-20T07:55:50.000Z | 2021-11-18T12:31:10.000Z | algorithm.py | James0231/Data-Visualization-PJ | 053a1fdb26e07381e44cfd13cb989631e86b8b82 | [
"MIT"
] | 1 | 2022-03-16T05:49:17.000Z | 2022-03-16T05:49:17.000Z | import copy
import numpy as np
class FFDAlgorithm(object):
def __init__(self, num_x, num_y, num_z, filename, object_points): #定义三坐标轴上控制点个数
self.cp_num_x = num_x
self.cp_num_y = num_y
self.cp_num_z = num_z
self.object_points_initial = object_points
def cover_obj(self, initial=True): #对obj物体做一层网格覆盖
points = np.array(self.object_points_initial) #找到三坐标轴上最大、最小值,来生成最小网格
self.min_x, self.min_y, self.min_z = points.min(axis=0)
self.max_x, self.max_y, self.max_z = points.max(axis=0)
self.nx = (self.max_x - self.min_x) / (self.cp_num_x - 1)
self.ny = (self.max_y - self.min_y) / (self.cp_num_y - 1)
self.nz = (self.max_z - self.min_z) / (self.cp_num_z - 1)
self.changed = {}
if initial:
self.control_points = [#初始化控制点位置
[
[np.array([0.0, 0.0, 0.0]) for z in range(self.cp_num_z)]
for y in range(self.cp_num_y)
]
for x in range(self.cp_num_x)
]
self.cp_locations = [#依据网格大小生成控制点坐标
[
[
np.array(
[
self.min_x + x * self.nx,
self.min_y + y * self.ny,
self.min_z + z * self.nz,
]
)
for z in range(self.cp_num_z)
]
for y in range(self.cp_num_y)
]
for x in range(self.cp_num_x)
]
self.init_cp_locations = copy.deepcopy(
self.cp_locations #深拷贝一份控制点坐标
)
try:
del self.object_points
except:
pass
self.object_points = {}
for x in range(self.cp_num_x):
for y in range(self.cp_num_y):
for z in range(self.cp_num_z):
self.object_points[(x, y, z)] = set()
for point_index in range(len(self.object_points_initial)):
[x, y, z] = self.object_points_initial[point_index]
i = int((x - self.min_x) / self.nx)
j = int((y - self.min_y) / self.ny)
k = int((z - self.min_z) / self.nz)
self.object_points[(i, j, k)].add((point_index, x, y, z))
def read_ffd(self, path):
f = open(path, "r")
self.new_control_points = copy.deepcopy(self.control_points)
self.new_cp_locations = copy.deepcopy(
self.init_cp_locations
)
begin = False
while True:
line = f.readline() #按行读取ffd文件
if not begin:
if line.startswith("#"):
if "#dimension#" in line: #提取维度
line = f.readline()
self.dimension = int(line.split("\n")[0])
continue
if "#offsets of the control points#" in line: #提取偏移量
begin = True
x = 0
y = 0
continue
elif "#control grid size#" in line: #提取控制点个数
size = []
for _ in range(self.dimension):
line = f.readline()
size.append(int(line.split("\n")[0]))
continue
else:
continue
else:
continue
else:
if line == "\n":
x += 1
y = 0
if x == size[0]:
break
else:
continue
else:
line = line.split("\t")[:-1]
for z in range(len(line)):
try:
self.new_control_points[x][y][z] = np.array(
[np.float(i) for i in line[z].split(" ")]
)
except IndexError:
raise
y += 1
for x in range(len(self.new_control_points)):
for y in range(len(self.new_control_points[x])):
for z in range(len(self.new_control_points[x][y])):
self.new_cp_locations[x][y][z] += (
self.new_control_points[x][y][z]
* 3
* (self.nx + self.ny + self.nz)
/ 3 #偏移量较小时按照网格单位长度*偏移量
)
return
def save_cp(self, filename): #保存ffd文件
f = open(filename, "w")
f.write("#dimension#\n")
f.write("3\n")
f.write("#one to one#\n")
f.write("1\n")
f.write("#control grid size#\n")
f.write(str(self.cp_num_x) + "\n")
f.write(str(self.cp_num_y) + "\n")
f.write(str(self.cp_num_z) + "\n")
f.write("#control grid spacing#\n")
f.write(str(self.nx) + "\n")
f.write(str(self.ny) + "\n")
f.write(str(self.nz) + "\n")
f.write("#offsets of the control points#\n")
for x in range(len(self.control_points)):
for y in range(len(self.control_points[x])):
for z in range(len(self.control_points[x][y])):
f.write(
str(self.control_points[x][y][z][0])
+ " "
+ str(self.control_points[x][y][z][1])
+ " "
+ str(self.control_points[x][y][z][2])
+ "\t"
)
f.write("\n")
f.write("\n")
f.close()
return
def B(self, i, u): #B样条变换,下面class为bezier变换
if i == 0:
return (1 - u) ** 3 / 6
elif i == 1:
return (3 * u ** 3 - 6 * u ** 2 + 4) / 6
elif i == 2:
return (-3 * u ** 3 + 3 * u ** 2 + 3 * u + 1) / 6
elif i == 3:
return u ** 3 / 6
def T_local(self, object_point):
[x, y, z] = object_point
i = int((x - self.min_x) / self.nx) - 1
j = int((y - self.min_y) / self.ny) - 1
k = int((z - self.min_z) / self.nz) - 1
u = (x - self.min_x) / self.nx - int((x - self.min_x) / self.nx)
v = (y - self.min_y) / self.ny - int((y - self.min_y) / self.ny)
w = (z - self.min_z) / self.nz - int((z - self.min_z) / self.nz)
result = np.array([0.0, 0.0, 0.0])
for l in range(4):
if 0 <= i + l < self.cp_num_x:
for m in range(4):
if 0 <= j + m < self.cp_num_y:
for n in range(4):
if 0 <= k + n < self.cp_num_z:
result = (
result
+ self.B(l, u)
* self.B(m, v)
* self.B(n, w)
* self.control_points[i + l][j + m][k + n]
)
return result
def changed_reset(self):
del self.changed
self.changed = {}
def changed_update(self, id, location):
self.changed[id] = location
def update_control_point(self):
for (u, v, w), new_location in self.changed.items():
self.control_points[u][v][w] = (
new_location - self.cp_locations[u][v][w]
)
class FFD_Bezier(object):
def __init__(self, num_x, num_y, num_z, filename, object_points): #定义三坐标轴上控制点个数
self.cp_num_x = num_x
self.cp_num_y = num_y
self.cp_num_z = num_z
self.object_points_initial = object_points
def cover_obj(self, initial=True): #对obj物体做一层网格覆盖
points = np.array(self.object_points_initial) #找到三坐标轴上最大、最小值,来生成最小网格
self.min_x, self.min_y, self.min_z = points.min(axis=0)
self.max_x, self.max_y, self.max_z = points.max(axis=0)
self.nx = (self.max_x - self.min_x) / (self.cp_num_x - 1)
self.ny = (self.max_y - self.min_y) / (self.cp_num_y - 1)
self.nz = (self.max_z - self.min_z) / (self.cp_num_z - 1)
self.changed = {}
if initial:
self.control_points = [#初始化控制点位置
[
[np.array([0.0, 0.0, 0.0]) for z in range(self.cp_num_z)]
for y in range(self.cp_num_y)
]
for x in range(self.cp_num_x)
]
self.cp_locations = [#依据网格大小生成控制点坐标
[
[
np.array(
[
self.min_x + x * self.nx,
self.min_y + y * self.ny,
self.min_z + z * self.nz,
]
)
for z in range(self.cp_num_z)
]
for y in range(self.cp_num_y)
]
for x in range(self.cp_num_x)
]
self.init_cp_locations = copy.deepcopy(
self.cp_locations #深拷贝一份控制点坐标
)
try:
del self.object_points
except:
pass
self.object_points = {}
for x in range(self.cp_num_x):
for y in range(self.cp_num_y):
for z in range(self.cp_num_z):
self.object_points[(x, y, z)] = set()
for point_index in range(len(self.object_points_initial)):
[x, y, z] = self.object_points_initial[point_index]
i = int((x - self.min_x) / self.nx)
j = int((y - self.min_y) / self.ny)
k = int((z - self.min_z) / self.nz)
self.object_points[(i, j, k)].add((point_index, x, y, z))
def read_ffd(self, path):
f = open(path, "r")
self.new_control_points = copy.deepcopy(self.control_points)
self.new_cp_locations = copy.deepcopy(
self.init_cp_locations
)
begin = False
while True:
line = f.readline() #按行读取ffd文件
if not begin:
if line.startswith("#"):
if "#dimension#" in line: #提取维度
line = f.readline()
self.dimension = int(line.split("\n")[0])
continue
if "#offsets of the control points#" in line: #提取偏移量
begin = True
x = 0
y = 0
continue
elif "#control grid size#" in line: #提取控制点个数
size = []
for _ in range(self.dimension):
line = f.readline()
size.append(int(line.split("\n")[0]))
continue
else:
continue
else:
continue
else:
if line == "\n":
x += 1
y = 0
if x == size[0]:
break
else:
continue
else:
line = line.split("\t")[:-1]
for z in range(len(line)):
try:
self.new_control_points[x][y][z] = np.array(
[np.float(i) for i in line[z].split(" ")]
)
except IndexError:
raise
y += 1
for x in range(len(self.new_control_points)):
for y in range(len(self.new_control_points[x])):
for z in range(len(self.new_control_points[x][y])):
self.new_cp_locations[x][y][z] += (
self.new_control_points[x][y][z]
* 3
* (self.nx + self.ny + self.nz)
/ 3 #偏移量较小时按照网格单位长度*偏移量
)
return
def save_cp(self, filename): #保存ffd文件
f = open(filename, "w")
f.write("#dimension#\n")
f.write("3\n")
f.write("#one to one#\n")
f.write("1\n")
f.write("#control grid size#\n")
f.write(str(self.cp_num_x) + "\n")
f.write(str(self.cp_num_y) + "\n")
f.write(str(self.cp_num_z) + "\n")
f.write("#control grid spacing#\n")
f.write(str(self.nx) + "\n")
f.write(str(self.ny) + "\n")
f.write(str(self.nz) + "\n")
f.write("#offsets of the control points#\n")
for x in range(len(self.control_points)):
for y in range(len(self.control_points[x])):
for z in range(len(self.control_points[x][y])):
f.write(
str(self.control_points[x][y][z][0])
+ " "
+ str(self.control_points[x][y][z][1])
+ " "
+ str(self.control_points[x][y][z][2])
+ "\t"
)
f.write("\n")
f.write("\n")
f.close()
return
def Bezier(self, i, u):
if i == 0:
return (1 - u) ** 3
elif i == 1:
return 3 * u ** 3 - 6 * u ** 2 + 3 * u
elif i == 2:
return -3 * u ** 3 + 3 * u ** 2
elif i == 3:
return u ** 3
def T_local(self, object_point):
[x, y, z] = object_point
i = int((x - self.min_x) / self.nx) - 1
j = int((y - self.min_y) / self.ny) - 1
k = int((z - self.min_z) / self.nz) - 1
u = (x - self.min_x) / self.nx - int((x - self.min_x) / self.nx)
v = (y - self.min_y) / self.ny - int((y - self.min_y) / self.ny)
w = (z - self.min_z) / self.nz - int((z - self.min_z) / self.nz)
result = np.array([0.0, 0.0, 0.0])
for l in range(4):
if 0 <= i + l < self.cp_num_x:
for m in range(4):
if 0 <= j + m < self.cp_num_y:
for n in range(4):
if 0 <= k + n < self.cp_num_z:
result = (
result
+ self.Bezier(l, u)
* self.Bezier(m, v)
* self.Bezier(n, w)
* self.control_points[i + l][j + m][k + n]
)
return result
def changed_reset(self):
del self.changed
self.changed = {}
def changed_update(self, id, location):
self.changed[id] = location
def update_control_point(self):
for (u, v, w), new_location in self.changed.items():
self.control_points[u][v][w] = (
new_location - self.cp_locations[u][v][w]
)
| 38.929114 | 83 | 0.409703 | 15,828 | 0.997668 | 0 | 0 | 0 | 0 | 0 | 0 | 1,313 | 0.082761 |
fae347d92c6318856795474c7e5ee0e6b5c0295e | 31 | py | Python | src/DeepReinforcementLearning/__init__.py | Quantum56/AlphaZero-AI | 504522feb4e67211d5fb592f4b14a2cb8271d015 | [
"MIT"
] | 1 | 2019-11-12T01:55:36.000Z | 2019-11-12T01:55:36.000Z | src/DeepReinforcementLearning/__init__.py | Quantum56/AlphaZero-AI | 504522feb4e67211d5fb592f4b14a2cb8271d015 | [
"MIT"
] | 14 | 2019-11-12T00:09:26.000Z | 2022-02-10T00:46:30.000Z | src/DeepReinforcementLearning/__init__.py | Quantum56/AlphaZero-AI | 504522feb4e67211d5fb592f4b14a2cb8271d015 | [
"MIT"
] | null | null | null | # from AlphaZero-AI import game | 31 | 31 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 1 |
fae38456a099876d2c62dc6bca6f8fac9beb8f54 | 721 | py | Python | utilities/tests/testTwitterAPI.py | nikhilsajwan/uhackathon | 7bc4a6ac86959a8f9a639bf8aa984582d14b639f | [
"MIT"
] | 4 | 2019-07-02T22:59:48.000Z | 2021-03-03T21:14:09.000Z | utilities/tests/testTwitterAPI.py | SCCapstone/Social-Monomania | 61f35d59960e2695820b9b072b7663e156b05232 | [
"MIT"
] | 3 | 2020-06-06T00:48:24.000Z | 2021-06-02T01:06:20.000Z | utilities/tests/testTwitterAPI.py | SCCapstone/Social-Monomania | 61f35d59960e2695820b9b072b7663e156b05232 | [
"MIT"
] | null | null | null | from django.test import TestCase
from ..twitterAPI import search
class TwitterTestCase(TestCase):
"""Unit test to ensure that search string is found in 100 new reddit posts returned from API"""
def setUp(self):
self.queryString = "Police"
self.submissions = search(self.queryString)
def test_submissions_contain_query(self):
for submission in self.submissions.get('statuses'):
#Testing purposes below:
#print(type(submission))
#print(submission.encode("utf-8"))
#print(type(self.queryString))
#print(self.queryString)
self.assertTrue(self.queryString in submission.get('text'))
break | 37.947368 | 96 | 0.642164 | 655 | 0.90846 | 0 | 0 | 0 | 0 | 0 | 0 | 255 | 0.353675 |
fae3fe1eca892e27465fc00f29bc82ac27f5a740 | 2,130 | py | Python | recipes/mio/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 562 | 2019-09-04T12:23:43.000Z | 2022-03-29T16:41:43.000Z | recipes/mio/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 9,799 | 2019-09-04T12:02:11.000Z | 2022-03-31T23:55:45.000Z | recipes/mio/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 1,126 | 2019-09-04T11:57:46.000Z | 2022-03-31T16:43:38.000Z | from conans import ConanFile, tools
import os
required_conan_version = ">=1.33.0"
class MioConan(ConanFile):
name = "mio"
description = "Cross-platform C++11 header-only library for memory mapped file IO."
license = "MIT"
topics = ("mio", "mmap", "memory-mapping", "fileviewer")
homepage = "https://github.com/mandreyel/mio"
url = "https://github.com/conan-io/conan-center-index"
settings = "os", "compiler"
exports_sources = "patches/**"
@property
def _source_subfolder(self):
return "source_subfolder"
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
tools.check_min_cppstd(self, 11)
def package_id(self):
self.info.header_only()
def source(self):
tools.get(**self.conan_data["sources"][self.version],
destination=self._source_subfolder, strip_root=True)
def build(self):
for patch in self.conan_data.get("patches", {}).get(self.version, []):
tools.patch(**patch)
def package(self):
self.copy("LICENSE", dst="licenses", src=self._source_subfolder)
self.copy("*pp", dst="include", src=os.path.join(self._source_subfolder, "include"))
def package_info(self):
self.cpp_info.names["cmake_find_package"] = "mio"
self.cpp_info.names["cmake_find_package_multi"] = "mio"
self.cpp_info.components["mio-headers"].names["cmake_find_package"] = "mio-headers"
self.cpp_info.components["mio-headers"].names["cmake_find_package_multi"] = "mio-headers"
if self.settings.os == "Windows":
self.cpp_info.components["mio_full_winapi"].names["cmake_find_package"] = "mio_full_winapi"
self.cpp_info.components["mio_full_winapi"].names["cmake_find_package_multi"] = "mio_full_winapi"
self.cpp_info.components["mio_min_winapi"].names["cmake_find_package"] = "mio_min_winapi"
self.cpp_info.components["mio_min_winapi"].names["cmake_find_package_multi"] = "mio_min_winapi"
self.cpp_info.components["mio_min_winapi"].defines = ["WIN32_LEAN_AND_MEAN", "NOMINMAX"]
| 39.444444 | 109 | 0.668075 | 2,044 | 0.959624 | 0 | 0 | 76 | 0.035681 | 0 | 0 | 756 | 0.35493 |
fae455410ed9ea4b0f8b98a27a1ac01a68948851 | 3,330 | py | Python | auto-test/tools/tcpreplay_tools.py | asterfusion/Tapplet | 917020fce2aaa2678c36a91fb91f60b36142ad9e | [
"Apache-2.0"
] | 1 | 2019-12-30T11:49:35.000Z | 2019-12-30T11:49:35.000Z | auto-test/tools/tcpreplay_tools.py | asterfusion/Tapplet | 917020fce2aaa2678c36a91fb91f60b36142ad9e | [
"Apache-2.0"
] | null | null | null | auto-test/tools/tcpreplay_tools.py | asterfusion/Tapplet | 917020fce2aaa2678c36a91fb91f60b36142ad9e | [
"Apache-2.0"
] | null | null | null |
import os
import math
from scapy.all import *
from pytest_main import eth_config
from pytest_main import dump_eth_config
def getstatusoutput(cmd):
pipe = os.popen(cmd + " 2>&1", 'r')
text = pipe.read()
sts = pipe.close()
if sts is None: sts=0
if text[-1:] == "\n": text = text[:-1]
return sts, text
def getoutput(cmd):
result = getstatusoutput(cmd)
return result[1]
def send_all_pkts( pkts_dir , pkts_list):
for pkt_name in pkts_list:
# print("send pkt : " + pkts_dir + pkts_list)
ret = getoutput("/bin/bash ./tools/send_pkt_raw.sh " + eth_config + " " + pkts_dir + pkt_name)
ret_value = ret.split('\n')[-1]
# print(ret_value)
assert int(ret_value) == 0
tcpdump_output="./result/tresult.pcap"
def send_pkts_with_tcpdump( pkts_dir , pkt_name , waittime = 1):
sh_str_tmp = "/bin/bash ./tools/send_pkt_with_tcpdump.sh {0} {1} {2} {3} {4} 2>&1; echo send_pkt_with_tcpdump_result:$?"
sh_str = sh_str_tmp.format(eth_config , tcpdump_output , pkts_dir+pkt_name , dump_eth_config, waittime)
print(sh_str)
ret = getoutput(sh_str)
ret_value_list = ret.split('\n')
ret_value = -1
for line in ret_value_list:
if line.find("send_pkt_with_tcpdump_result") != -1:
tmp = line.split(":")
ret_value = int(tmp[1])
# print(ret)
assert int(ret_value) == 0
#this function user tcpdump options -Q so:tcpdump version must >4.9.1
def send_pkts_with_tcpdump_with_direction( pkts_dir , pkt_name , direction = "inout" ,waittime = 1):
sh_str_tmp = "/bin/bash ./tools/send_pkt_with_tcpdump_with_direction.sh {0} {1} {2} {3} {4} {5} 2>&1; echo send_pkt_with_tcpdump_result_with_direction:$?"
sh_str = sh_str_tmp.format(eth_config , tcpdump_output , pkts_dir+pkt_name , dump_eth_config, direction ,waittime)
print(sh_str)
ret = getoutput(sh_str)
ret_value_list = ret.split('\n')
ret_value = -1
for line in ret_value_list:
if line.find("send_pkt_with_tcpdump_result_with_direction") != -1:
tmp = line.split(":")
ret_value = int(tmp[1])
assert int(ret_value) == 0
def pkt_tcpdump_count(count = 1):
sh_str = "/bin/bash ./tools/pkt_tcpdump.sh {0} {1} {2} 2>&1; echo pkt_tcp_dump_result:$?".format( dump_eth_config, tcpdump_output, count)
print(sh_str)
ret = getoutput(sh_str)
ret_value_list = ret.split('\n')
ret_value = -1
for line in ret_value_list:
if line.find("pkt_tcp_dump_result") != -1:
tmp = line.split(":")
ret_value = int(tmp[1])
assert int(ret_value) == 0
def check_pkt_content(captured_pkt_list , check_pkts):
print(captured_pkt_list)
print(check_pkts)
assert len(captured_pkt_list) == len(check_pkts)
for i in range(len(check_pkts)):
a = raw(captured_pkt_list[i])
b = raw(check_pkts[i])
assert a == b
# editcap_num : New packages are generated from the <editcap_num> package
def editcap_pkt_num(pkt, begin_editcap_num, end_edtcap_num = None):
if end_edtcap_num != None:
editcap_str = "editcap -r {0} {0} {1}-{2}".format(pkt, begin_editcap_num, end_edtcap_num)
else:
editcap_str = "editcap -r {0} {0} {1}".format(pkt, begin_editcap_num)
ret = getstatusoutput(editcap_str)
assert ret[0] >= 0
| 37.41573 | 158 | 0.657057 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 805 | 0.241742 |
fae463c351e42ad7cf6fcfb323f650cd6cc418ae | 304 | py | Python | tests/test_statebus.py | invisible-college/tightrope | f0c96dd6702e9d4b730cffac70829b56f76077b6 | [
"MIT"
] | 1 | 2021-08-22T05:09:05.000Z | 2021-08-22T05:09:05.000Z | tests/test_statebus.py | invisible-college/tightrope | f0c96dd6702e9d4b730cffac70829b56f76077b6 | [
"MIT"
] | 3 | 2017-09-18T01:45:44.000Z | 2017-10-17T23:26:22.000Z | tests/test_statebus.py | invisible-college/tightrope | f0c96dd6702e9d4b730cffac70829b56f76077b6 | [
"MIT"
] | null | null | null | // Test calls to statebus server
var bus = require('statebus/server')();
bus.ws_client("/*", "ws://aws.local-box.org:45678");
x = bus.fetch("/paul/code");
console.log(JSON.stringify(x));
if (!x.written) {
console.log("No member .written found, setting it now");
x.written = "here it is";
}
save(x);
| 25.333333 | 58 | 0.654605 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 117 | 0.384868 |
fae48fbff8a06a587206ef5fa49056a7f5046d73 | 1,205 | py | Python | src/pyams_utils/interfaces/intids.py | Py-AMS/pyams-utils | 65b166596a8b9f66fb092a69ce5d53ac6675685e | [
"ZPL-2.1"
] | null | null | null | src/pyams_utils/interfaces/intids.py | Py-AMS/pyams-utils | 65b166596a8b9f66fb092a69ce5d53ac6675685e | [
"ZPL-2.1"
] | null | null | null | src/pyams_utils/interfaces/intids.py | Py-AMS/pyams-utils | 65b166596a8b9f66fb092a69ce5d53ac6675685e | [
"ZPL-2.1"
] | null | null | null | #
# Copyright (c) 2008-2015 Thierry Florac <tflorac AT ulthar.net>
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
"""PyAMS_utils.interfaces.intids module
Small set of interfaces used by IIntIds utilities.
"""
from zope.interface import Interface
from zope.schema import Int, TextLine
__docformat__ = 'restructuredtext'
from pyams_utils import _
#
# Generic interfaces
#
class IIndexLength(Interface):
"""Index length interface"""
count = Int(title=_("Indexed elements count"),
readonly=True)
class IUniqueID(Interface):
"""Interface used to get unique ID of an object"""
oid = TextLine(title="Unique ID",
description="Globally unique identifier of this object can be used to create "
"internal links",
readonly=True)
| 26.777778 | 97 | 0.699585 | 448 | 0.371784 | 0 | 0 | 0 | 0 | 0 | 0 | 807 | 0.66971 |
fae717b2d4db53ea73a947ac37133ff735c46b9c | 282 | py | Python | reo/migrations/0083_merge_20201207_1317.py | akuam1/REopt_Lite_API | fb5a88ee52351b725fda5c15712b617f6e97ddca | [
"BSD-3-Clause"
] | 41 | 2020-02-21T08:25:17.000Z | 2022-01-14T23:06:42.000Z | reo/migrations/0083_merge_20201207_1317.py | akuam1/REopt_Lite_API | fb5a88ee52351b725fda5c15712b617f6e97ddca | [
"BSD-3-Clause"
] | 167 | 2020-02-17T17:26:47.000Z | 2022-01-20T20:36:54.000Z | reo/migrations/0083_merge_20201207_1317.py | akuam1/REopt_Lite_API | fb5a88ee52351b725fda5c15712b617f6e97ddca | [
"BSD-3-Clause"
] | 31 | 2020-02-20T00:22:51.000Z | 2021-12-10T05:48:08.000Z | # Generated by Django 2.2.13 on 2020-12-07 13:17
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reo', '0075_auto_20201125_1947'),
('reo', '0082_chpmodel_chp_unavailability_hourly'),
]
operations = [
]
| 18.8 | 59 | 0.659574 | 196 | 0.695035 | 0 | 0 | 0 | 0 | 0 | 0 | 124 | 0.439716 |
fae76c8d74e47c824e067609790c8f64a424bb8f | 4,020 | py | Python | source/cifar/defense/base.py | DingfanChen/RelaxLoss | d21c82dee7016cd0cb6688a408104eeb0d832790 | [
"MIT"
] | 4 | 2022-02-08T06:39:13.000Z | 2022-03-27T13:57:13.000Z | source/cifar/defense/base.py | DingfanChen/RelaxLoss | d21c82dee7016cd0cb6688a408104eeb0d832790 | [
"MIT"
] | 1 | 2022-02-22T10:17:49.000Z | 2022-02-22T22:10:52.000Z | source/cifar/defense/base.py | DingfanChen/RelaxLoss | d21c82dee7016cd0cb6688a408104eeb0d832790 | [
"MIT"
] | null | null | null | import os
import sys
import time
import torch
import torch.nn as nn
import random
import numpy as np
import torchvision.transforms as transforms
FILE_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_ROOT = os.path.join(FILE_DIR, '../../../data')
sys.path.append(os.path.join(FILE_DIR, '../'))
sys.path.append(os.path.join(FILE_DIR, '../../'))
from dataset import CIFAR10, CIFAR100
from utils import BaseTrainer, Partition
class CIFARTrainer(BaseTrainer):
def set_dataloader(self):
"""The function to set the dataset parameters"""
if self.args.dataset == 'CIFAR10':
self.dataset = CIFAR10
self.num_classes = 10
self.dataset_size = 60000
elif self.args.dataset == 'CIFAR100':
self.dataset = CIFAR100
self.num_classes = 100
self.dataset_size = 60000
if self.args.if_data_augmentation:
print('With data augmentation')
transform_train = transforms.Compose([transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(), transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465),
(0.2023, 0.1994, 0.2010))])
else:
print('Without data augmentation')
transform_train = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465),
(0.2023, 0.1994, 0.2010))])
transform_test = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))])
self.transform_train = transform_train
self.transform_test = transform_test
### Set partition
if self.args.partition == 'target':
indices = np.arange(self.dataset_size).astype(int)
np.random.shuffle(indices)
np.save(os.path.join(self.save_dir, 'full_idx'), indices)
partition = Partition(dataset_size=self.dataset_size, indices=indices)
self.partition = partition
self.trainset_idx, self.testset_idx = partition.get_target_indices()
elif self.args.partition == 'shadow':
try:
target_path = os.path.join(self.save_dir.replace("shadow", ""), 'full_idx.npy')
indices = np.load(target_path)
print('Load indices from target model:', target_path)
except:
print('Cannot find target model, reinitialize indices')
indices = np.arange(self.dataset_size).astype(int)
np.random.shuffle(indices)
np.save(os.path.join(self.save_dir, 'full_idx'), indices)
partition = Partition(dataset_size=self.dataset_size, indices=indices)
self.partition = partition
self.trainset_idx, self.testset_idx = partition.get_shadow_indices()
## Set dataloader
trainset = self.dataset(root=self.data_root, indices=self.trainset_idx,
download=True, transform=self.transform_train)
testset = self.dataset(root=self.data_root, indices=self.testset_idx,
download=True, transform=self.transform_test)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=self.args.train_batchsize,
shuffle=True, num_workers=self.args.num_workers)
testloader = torch.utils.data.DataLoader(testset, batch_size=self.args.test_batchsize,
shuffle=False, num_workers=self.args.num_workers)
self.trainset = trainset
self.trainloader = trainloader
self.testset = testset
self.testloader = testloader
| 48.433735 | 119 | 0.582338 | 3,589 | 0.892786 | 0 | 0 | 0 | 0 | 0 | 0 | 321 | 0.079851 |
fae774df4ad53722905069cd6e306f9144ad0112 | 4,462 | py | Python | hangman.py | maritzapott/hangmangame | 773232abf8095ceef5ccfe38189c316064fc6f4b | [
"MIT"
] | null | null | null | hangman.py | maritzapott/hangmangame | 773232abf8095ceef5ccfe38189c316064fc6f4b | [
"MIT"
] | null | null | null | hangman.py | maritzapott/hangmangame | 773232abf8095ceef5ccfe38189c316064fc6f4b | [
"MIT"
] | 1 | 2022-02-19T04:20:47.000Z | 2022-02-19T04:20:47.000Z | # HANGMAN GAME
from collections import namedtuple
import main
game_board = namedtuple('game_board', ['board', 'mistakes', 'letters', 'status'])
def welcome():
"""Starts the game."""
print("Welcome")
word = main._choose_word()
_print_start_game()
_print_start_spaces(word)
game_board.letters = []
game_board.mistakes = -1
game_board.status = True
while game_board.status:
user_input = input("Guess a letter. To get a hint, type hint. To quit, type QUIT: \n").lower()
if user_input != 'QUIT' and user_input != 'hint' and user_input != "\n":
print('You guessed:', user_input, '\n')
_check_input(user_input, word)
_update_blank_spaces(user_input, word)
elif user_input == 'hint':
hint = main._hint(word)
print(hint.upper())
_print_board()
_update_blank_spaces(user_input, word)
else:
print("Thanks for playing!")
game_board.status = False
print("Your word was: ", word)
print('GAME OVER')
def _print_start_game() -> None:
"""Prints the starting game board."""
top = ' _____\n'
hang1 = '| |\n'
hang2 = '| |\n'
leg1 = ' |\n'
leg2 = ' |\n'
leg3 = ' |\n'
stand = '______\n'
game_board.board = [top + hang1 + hang2 + leg1 + leg2 + leg3 + stand]
_print_board()
def _print_start_spaces(word) -> None:
for spaces in word:
if spaces == " ":
print(" ", end='')
else:
print("_ ", end='')
print()
print()
def _check_input(user_input: str, word: str):
"""Checks if there is or isn't a wrong answer."""
count_letters = 0
for letters in word:
if user_input == letters:
count_letters += 1
if count_letters > 1:
print('You guessed correctly:', count_letters, 'letters\n')
else:
print('You guessed correctly:', count_letters, 'letter\n')
if count_letters == 0:
_wrong_answers()
else:
game_board.letters.append(user_input)
_print_board()
def _wrong_answers():
"""Prints the man on the hangman board."""
game_board.mistakes += 1
top = ' ____\n'
hang1 = ' | |\n'
hang2 = ' | |\n'
top_body = top + hang1 + hang2
wrong_answers = [' o |\n',' | |\n', '\| |\n', '\|/ |\n', '/ |\n', '/ \ |\n', ' _____\n']
rest_of_body = [' |\n', ' |\n', ' _____\n']
if game_board.mistakes == 0:
game_board.board = [top_body + wrong_answers[0] + rest_of_body[0] + rest_of_body[1] + rest_of_body[2]]
_print_board()
elif game_board.mistakes == 1:
game_board.board = [top_body + wrong_answers[0] + wrong_answers[1]+ rest_of_body[1] + rest_of_body[2]]
_print_board()
elif game_board.mistakes == 2:
game_board.board = [top_body + wrong_answers[0] + wrong_answers[2] + rest_of_body[1] + rest_of_body[2]]
_print_board()
elif game_board.mistakes == 3:
game_board.board = [top_body + wrong_answers[0] + wrong_answers[3] + rest_of_body[1] + rest_of_body[2]]
_print_board()
elif game_board.mistakes == 4:
game_board.board = [top_body + wrong_answers[0] + wrong_answers[3] + wrong_answers[4] + rest_of_body[2]]
_print_board()
elif game_board.mistakes == 5:
game_board.board = [top_body + wrong_answers[0] + wrong_answers[3] + wrong_answers[5] + rest_of_body[2]]
_print_board()
_game_over()
def _update_blank_spaces(user_input, word):
"""Prints out the letter spaces."""
for letter in word:
if letter == user_input:
print(letter, end='')
elif letter in game_board.letters:
print(letter, end='')
elif letter == ' ':
print(" ", end='')
else:
print('_ ', end='')
print()
print()
_check_winner(word)
def _print_board():
"""Prints the board."""
for piece in game_board.board:
print(piece)
def _check_winner(word):
"""Checks if there is a winner."""
how_many = 0
for letter in word:
if letter in game_board.letters:
how_many += 1
if letter == " ":
how_many += 1
if how_many == len(word):
print('WINNER')
game_board.status = False
def _game_over():
"""Ends game."""
game_board.status = False
if __name__ == "__main__":
welcome()
| 30.561644 | 112 | 0.571717 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 819 | 0.18355 |
fae78fe389b664367fb4e390660faee2c1fa36ea | 159 | py | Python | shows/Text.py | fenceFoil/canopto | 2692d19282c8c7090d7e4201716627789d38a821 | [
"BSD-3-Clause"
] | null | null | null | shows/Text.py | fenceFoil/canopto | 2692d19282c8c7090d7e4201716627789d38a821 | [
"BSD-3-Clause"
] | null | null | null | shows/Text.py | fenceFoil/canopto | 2692d19282c8c7090d7e4201716627789d38a821 | [
"BSD-3-Clause"
] | null | null | null | #!/bin/python3
import Canopto
import pygame
from pygame.locals import *
import time
import random
import colorsys
c = Canopto(2, 8, previewEnabled = True)
| 12.230769 | 40 | 0.761006 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.08805 |
faeb54d9605b6182b7e92333f3846926f9dfc119 | 8,246 | py | Python | ls/joyous/models/one_off_events.py | tjwalch/ls.joyous | 0ee50d3af71c066bddb2310948b02f74b52ee253 | [
"BSD-3-Clause"
] | 72 | 2018-03-16T16:35:08.000Z | 2022-03-23T08:09:33.000Z | polrev/ls/joyous/models/one_off_events.py | polrev-github/polrev-django | 99108ace1a5307b14c3eccb424a9f9616e8c02ae | [
"MIT"
] | 41 | 2018-03-25T20:36:52.000Z | 2022-03-10T08:59:27.000Z | polrev/ls/joyous/models/one_off_events.py | polrev-github/polrev-django | 99108ace1a5307b14c3eccb424a9f9616e8c02ae | [
"MIT"
] | 28 | 2018-08-13T22:36:09.000Z | 2022-03-17T12:24:15.000Z | # ------------------------------------------------------------------------------
# Joyous events models
# ------------------------------------------------------------------------------
import datetime as dt
from django.db import models
from django.db.models.query import ModelIterable
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from wagtail.core.models import Page
from wagtail.admin.edit_handlers import FieldPanel
from wagtail.images.edit_handlers import ImageChooserPanel
from ..utils.telltime import (todayUtc, getAwareDatetime, getLocalDatetime,
getLocalDate, getLocalTime)
from ..utils.telltime import timeFormat
from ..edit_handlers import TimePanel
from ..forms import FormDefender
from .groups import get_group_model_string
from .event_base import (ThisEvent, EventsByDayList,
EventManager, EventQuerySet, EventPageForm, EventBase)
# ------------------------------------------------------------------------------
# Helper types and constants
# ------------------------------------------------------------------------------
_1day = dt.timedelta(days=1)
_2days = dt.timedelta(days=2)
# ------------------------------------------------------------------------------
# Event models
# ------------------------------------------------------------------------------
class SimpleEventQuerySet(EventQuerySet):
def current(self):
qs = super().current()
return qs.filter(date__gte = todayUtc() - _1day)
def future(self):
qs = super().future()
return qs.filter(date__gte = todayUtc() - _1day)
def past(self):
qs = super().past()
return qs.filter(date__lte = todayUtc() + _1day)
def byDay(self, fromDate, toDate):
request = self.request
class ByDayIterable(ModelIterable):
def __iter__(self):
evods = EventsByDayList(fromDate, toDate)
for page in super().__iter__():
pageFromDate = getLocalDate(page.date,
page.time_from, page.tz)
pageToDate = getLocalDate(page.date,
page.time_to, page.tz)
thisEvent = ThisEvent(page, url=page.get_url(request))
evods.add(thisEvent, pageFromDate, pageToDate)
yield from evods
qs = self._clone()
qs._iterable_class = ByDayIterable
return qs.filter(date__range=(fromDate - _2days, toDate + _2days))
class SimpleEventPage(EventBase, Page, metaclass=FormDefender):
events = EventManager.from_queryset(SimpleEventQuerySet)()
class Meta:
verbose_name = _("event page")
verbose_name_plural = _("event pages")
default_manager_name = "objects"
parent_page_types = ["joyous.CalendarPage",
"joyous.SpecificCalendarPage",
"joyous.GeneralCalendarPage",
get_group_model_string()]
subpage_types = []
base_form_class = EventPageForm
date = models.DateField(_("date"), default=dt.date.today)
content_panels = Page.content_panels + [
FieldPanel('category'),
ImageChooserPanel('image'),
FieldPanel('date'),
TimePanel('time_from'),
TimePanel('time_to'),
FieldPanel('tz'),
] + EventBase.content_panels1
# Anything inheriting from models.Model needs its own __init__ or
# modeltranslation patch_constructor may break it
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@property
def when(self):
"""
A string describing when the event occurs (in the local time zone).
"""
return self._getLocalWhen(self.date)
def _getFromTime(self, atDate=None):
"""
Time that the event starts (in the local time zone).
"""
return getLocalTime(self.date, self.time_from, self.tz)
def _getFromDt(self):
"""
Datetime that the event starts (in the local time zone).
"""
return getLocalDatetime(self.date, self.time_from, self.tz)
def _getToDt(self):
"""
Datetime that the event ends (in the local time zone).
"""
return getLocalDatetime(self.date, self.time_to, self.tz)
# ------------------------------------------------------------------------------
class MultidayEventQuerySet(EventQuerySet):
def current(self):
qs = super().current()
return qs.filter(date_to__gte = todayUtc() - _1day)
def future(self):
qs = super().future()
return qs.filter(date_from__gte = todayUtc() - _1day)
def past(self):
qs = super().past()
return qs.filter(date_from__lte = todayUtc() + _1day)
def byDay(self, fromDate, toDate):
request = self.request
class ByDayIterable(ModelIterable):
def __iter__(self):
evods = EventsByDayList(fromDate, toDate)
for page in super().__iter__():
pageFromDate = getLocalDate(page.date_from,
page.time_from, page.tz)
pageToDate = getLocalDate(page.date_to,
page.time_to, page.tz)
thisEvent = ThisEvent(page, url=page.get_url(request))
evods.add(thisEvent, pageFromDate, pageToDate)
yield from evods
qs = self._clone()
qs._iterable_class = ByDayIterable
return qs.filter(date_to__gte = fromDate - _2days) \
.filter(date_from__lte = toDate + _2days)
class MultidayEventPageForm(EventPageForm):
def _checkStartBeforeEnd(self, cleaned_data):
startDate = cleaned_data.get('date_from', dt.date.min)
endDate = cleaned_data.get('date_to', dt.date.max)
if startDate > endDate:
self.add_error('date_to', _("Event cannot end before it starts"))
elif startDate == endDate:
super()._checkStartBeforeEnd(cleaned_data)
class MultidayEventPage(EventBase, Page, metaclass=FormDefender):
events = EventManager.from_queryset(MultidayEventQuerySet)()
class Meta:
verbose_name = _("multiday event page")
verbose_name_plural = _("multiday event pages")
default_manager_name = "objects"
parent_page_types = ["joyous.CalendarPage",
"joyous.SpecificCalendarPage",
"joyous.GeneralCalendarPage",
get_group_model_string()]
subpage_types = []
base_form_class = MultidayEventPageForm
date_from = models.DateField(_("start date"), default=dt.date.today)
date_to = models.DateField(_("end date"), default=dt.date.today)
content_panels = Page.content_panels + [
FieldPanel('category'),
ImageChooserPanel('image'),
FieldPanel('date_from'),
TimePanel('time_from'),
FieldPanel('date_to'),
TimePanel('time_to'),
FieldPanel('tz'),
] + EventBase.content_panels1
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@property
def when(self):
"""
A string describing when the event occurs (in the local time zone).
"""
return self._getLocalWhen(self.date_from, self.date_to)
def _getFromTime(self, atDate=None):
"""
Time that the event starts (in the local time zone).
"""
return getLocalTime(self.date_from, self.time_from, self.tz)
def _getFromDt(self):
"""
Datetime that the event starts (in the local time zone).
"""
return getLocalDatetime(self.date_from, self.time_from, self.tz)
def _getToDt(self):
"""
Datetime that the event ends (in the local time zone).
"""
return getLocalDatetime(self.date_to, self.time_to, self.tz)
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
| 37.652968 | 80 | 0.557361 | 6,578 | 0.79772 | 1,717 | 0.208222 | 367 | 0.044506 | 0 | 0 | 2,070 | 0.251031 |
faec8aa99a18a6be10e65d9e1fdc912f5b1a463a | 2,534 | py | Python | examples/titanic/assets/dataset/opener.py | eliaskousk/substra | 00b9cb6e8adfa4cffacc5e1df7f9d64ce1f3dde8 | [
"Apache-2.0"
] | 119 | 2019-10-25T13:30:58.000Z | 2021-06-02T12:13:04.000Z | examples/titanic/assets/dataset/opener.py | eliaskousk/substra | 00b9cb6e8adfa4cffacc5e1df7f9d64ce1f3dde8 | [
"Apache-2.0"
] | 182 | 2019-10-29T17:27:22.000Z | 2021-06-02T11:54:34.000Z | examples/titanic/assets/dataset/opener.py | eliaskousk/substra | 00b9cb6e8adfa4cffacc5e1df7f9d64ce1f3dde8 | [
"Apache-2.0"
] | 22 | 2019-10-25T13:45:41.000Z | 2021-05-11T20:55:22.000Z | import os
import pandas as pd
import random
import string
import numpy as np
import substratools as tools
class TitanicOpener(tools.Opener):
def get_X(self, folders):
data = self._get_data(folders)
return self._get_X(data)
def get_y(self, folders):
data = self._get_data(folders)
return self._get_y(data)
def save_predictions(self, y_pred, path):
with open(path, 'w') as f:
y_pred.to_csv(f, index=False)
def get_predictions(self, path):
return pd.read_csv(path)
def fake_X(self, n_samples=None):
data = self._fake_data(n_samples)
return self._get_X(data)
def fake_y(self, n_samples=None):
data = self._fake_data(n_samples)
return self._get_y(data)
@classmethod
def _get_X(cls, data):
return data.drop(columns=['Survived'])
@classmethod
def _get_y(cls, data):
return pd.DataFrame(data=data.get('Survived'), columns=['Survived'])
@classmethod
def _fake_data(cls, n_samples=None):
N_SAMPLES = n_samples if n_samples and n_samples <= 100 else 100
data = {
'PassengerId': list(range(N_SAMPLES)),
'Survived': [random.choice([True, False]) for k in range(N_SAMPLES)],
'Pclass': [random.choice([1, 2, 3]) for k in range(N_SAMPLES)],
'Name': ["".join(random.sample(string.ascii_letters, 10)) for k in range(N_SAMPLES)],
'Sex': [random.choice(['male', 'female']) for k in range(N_SAMPLES)],
'Age': [random.choice(range(7, 77)) for k in range(N_SAMPLES)],
'SibSp': [random.choice(range(4)) for k in range(N_SAMPLES)],
'Parch': [random.choice(range(4)) for k in range(N_SAMPLES)],
'Ticket': ["".join(random.sample(string.ascii_letters, 10)) for k in range(N_SAMPLES)],
'Fare': [random.choice(np.arange(15, 150, 0.01)) for k in range(N_SAMPLES)],
'Cabin': ["".join(random.sample(string.ascii_letters, 3)) for k in range(N_SAMPLES)],
'Embarked': [random.choice(['C', 'S', 'Q']) for k in range(N_SAMPLES)],
}
return pd.DataFrame(data)
@classmethod
def _get_data(cls, folders):
# find csv files
paths = []
for folder in folders:
paths += [os.path.join(folder, f) for f in os.listdir(folder) if f[-4:] == '.csv']
# load data
data = pd.DataFrame()
for path in paths:
data = data.append(pd.read_csv(path))
return data
| 33.786667 | 99 | 0.601026 | 2,424 | 0.95659 | 0 | 0 | 1,738 | 0.685872 | 0 | 0 | 187 | 0.073796 |
faedb7593b3fda8461621c6c98b5de97e2ad88e0 | 1,064 | py | Python | test/integration/test_natural_language_understanding_v1.py | jsstylos/waston-developer-cloud-python-sdk | 97de097b8c86622ab2f30f5386bb74321d28addf | [
"Apache-2.0"
] | 1,579 | 2015-10-08T14:02:17.000Z | 2022-02-28T10:49:21.000Z | test/integration/test_natural_language_understanding_v1.py | jsstylos/waston-developer-cloud-python-sdk | 97de097b8c86622ab2f30f5386bb74321d28addf | [
"Apache-2.0"
] | 749 | 2015-10-08T20:00:24.000Z | 2022-03-21T21:33:17.000Z | test/integration/test_natural_language_understanding_v1.py | jsstylos/waston-developer-cloud-python-sdk | 97de097b8c86622ab2f30f5386bb74321d28addf | [
"Apache-2.0"
] | 1,006 | 2015-10-24T06:30:58.000Z | 2022-03-23T07:10:04.000Z | # coding: utf-8
from unittest import TestCase
import os
import ibm_watson
import pytest
import json
import time
from ibm_watson.natural_language_understanding_v1 import Features, EntitiesOptions, KeywordsOptions
@pytest.mark.skipif(os.getenv('NATURAL_LANGUAGE_UNDERSTANDING_APIKEY') is None,
reason='requires NATURAL_LANGUAGE_UNDERSTANDING_APIKEY')
class TestNaturalLanguageUnderstandingV1(TestCase):
def setUp(self):
self.natural_language_understanding = ibm_watson.NaturalLanguageUnderstandingV1(version='2018-03-16')
self.natural_language_understanding.set_default_headers({
'X-Watson-Learning-Opt-Out': '1',
'X-Watson-Test': '1'
})
def test_analyze(self):
response = self.natural_language_understanding.analyze(
text='Bruce Banner is the Hulk and Bruce Wayne is BATMAN! '
'Superman fears not Banner, but Wayne.',
features=Features(entities=EntitiesOptions(), keywords=KeywordsOptions())).get_result()
assert response is not None
| 39.407407 | 109 | 0.727444 | 693 | 0.651316 | 0 | 0 | 850 | 0.798872 | 0 | 0 | 255 | 0.239662 |
faeec08412c17e1886d0f4332b15cb71403f5016 | 1,337 | py | Python | project/RealEstateMarketPlace/views/ListConversationsView.py | Mihaaai/RealEstateMarketplace | 9b9fa1376436801303e1ed0207ef09845a7d827e | [
"Apache-2.0"
] | null | null | null | project/RealEstateMarketPlace/views/ListConversationsView.py | Mihaaai/RealEstateMarketplace | 9b9fa1376436801303e1ed0207ef09845a7d827e | [
"Apache-2.0"
] | null | null | null | project/RealEstateMarketPlace/views/ListConversationsView.py | Mihaaai/RealEstateMarketplace | 9b9fa1376436801303e1ed0207ef09845a7d827e | [
"Apache-2.0"
] | null | null | null | from django.views.generic import ListView
from rest_framework import authentication, permissions
from ..models import Message,Listing,User
from django.db.models import Q
class ListConversationsView(ListView):
authentication_classes = (authentication.SessionAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
template_name = 'list_conversations_template.html'
context_object_name = 'conversations'
def get_queryset(self):
#get each listing for which there is at least a message by/from logged user
_listings = Listing.objects.filter(pk__in = Message.objects.filter(Q(receiver_id=self.request.user)|Q(sender_id=self.request.user)).values('listing_id').distinct())
conversations = {}
#for each listing, find all users whom which logged user talked to
for listing in _listings:
sender_id_list = Message.objects.filter(receiver_id=self.request.user).filter(listing_id = listing).values('sender_id').distinct()
receiver_id_list = Message.objects.filter(sender_id=self.request.user).filter(listing_id = listing).values('receiver_id').distinct()
users = User.objects.filter(Q(pk__in = sender_id_list)| Q(pk__in = receiver_id_list)).distinct()
conversations[listing] = users
return conversations | 51.423077 | 174 | 0.735976 | 1,166 | 0.872102 | 0 | 0 | 0 | 0 | 0 | 0 | 226 | 0.169035 |
faef976b28e6bb519f13efc13c691a451b2b70dc | 778 | py | Python | image/src/binarization.py | Wookhwang/Tensorflow_by_wook | 1388e2856564e93fa2e56b79530a7aaa3cec5b30 | [
"Apache-2.0"
] | null | null | null | image/src/binarization.py | Wookhwang/Tensorflow_by_wook | 1388e2856564e93fa2e56b79530a7aaa3cec5b30 | [
"Apache-2.0"
] | null | null | null | image/src/binarization.py | Wookhwang/Tensorflow_by_wook | 1388e2856564e93fa2e56b79530a7aaa3cec5b30 | [
"Apache-2.0"
] | null | null | null | import cv2 as cv
def nothing(x):
pass
cv.namedWindow('Binary')
cv.createTrackbar('threshold', 'Binary', 0, 255, nothing)
cv.setTrackbarPos('threshold', 'Binary', 167)
img_color = cv.imread('wallpaper-2994965.jpg', cv.IMREAD_COLOR)
cv.imshow('Color', img_color)
cv.waitKey(0)
img_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY)
cv.imshow('Gray', img_gray)
cv.waitKey(0)
while True:
low = cv.getTrackbarPos('threshold', 'Binary')
ret, img_binary = cv.threshold(img_gray, low, 255, cv.THRESH_BINARY_INV)
cv.imshow('Binary', img_binary)
img_result = cv.bitwise_and(img_color, img_color, mask = img_binary)
cv.imshow('result', img_result)
if cv.waitKey(1) & 0xFF == 27:
break
cv.destroyAllWindows()
| 22.228571 | 77 | 0.672237 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 117 | 0.150386 |
faf23c7862d37bd1a9a0c7bec5cf1daca9c2d615 | 1,210 | py | Python | member_join.py | zStartKiller/BotDiscordPython | 06283f03231c542fc72a64f43860d9d89b1f5c58 | [
"MIT"
] | 4 | 2021-12-03T18:23:13.000Z | 2021-12-31T00:18:29.000Z | member_join.py | zStartKiller/BotDiscordPython | 06283f03231c542fc72a64f43860d9d89b1f5c58 | [
"MIT"
] | null | null | null | member_join.py | zStartKiller/BotDiscordPython | 06283f03231c542fc72a64f43860d9d89b1f5c58 | [
"MIT"
] | 1 | 2021-12-03T18:29:57.000Z | 2021-12-03T18:29:57.000Z | import asyncio
import discord
from discord import channel
from discord.ext import commands
import random
from main import bot
class member_joining(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_member_join(self, member):
channel = bot.get_channel(912380810995335271)
embed=discord.Embed(title=":white_check_mark: │ Um novo membro entrou!", color=0xff0000)
embed.add_field(name=f"Temos um novo membro em nossa comunidade: ", value=f"{member.mention}", inline=False)
embed.set_footer(text=f"Todos os direitos reservados à: ...")
await channel.send(embed=embed)
@commands.Cog.listener()
async def on_member_remove(self, member):
channel = bot.get_channel(912380810995335271)
embed=discord.Embed(title=":x: │ Um membro saiu ;(", color=0xff0000)
embed.add_field(name=f"Um membro saiu de nossa comunidade: ", value=f"{member.mention}", inline=False)
embed.set_footer(text=f"Todos os direitos reservados à: ...")
await channel.send(embed=embed)
def setup(bot):
bot.add_cog(member_joining(bot))
print("Member | Carregado!") | 40.333333 | 117 | 0.683471 | 993 | 0.816612 | 0 | 0 | 886 | 0.728618 | 826 | 0.679276 | 295 | 0.242599 |
faf418ba43d0d5638a18bcd19474ba97144519be | 4,020 | py | Python | instauto/api/actions/structs/friendships.py | marvic2409/instauto | 6a36a7369e687b8eaf3bf9fee0cc42f6d9703734 | [
"MIT"
] | null | null | null | instauto/api/actions/structs/friendships.py | marvic2409/instauto | 6a36a7369e687b8eaf3bf9fee0cc42f6d9703734 | [
"MIT"
] | null | null | null | instauto/api/actions/structs/friendships.py | marvic2409/instauto | 6a36a7369e687b8eaf3bf9fee0cc42f6d9703734 | [
"MIT"
] | null | null | null | from . import common as cmmn
import logging
import uuid
from typing import Optional
from instauto.api.structs import Surface
logger = logging.getLogger(__name__)
class _Base(cmmn.Base):
_csrftoken: str = None
radio_type: str = 'wifi-none'
device_id: str = None
_uid: str = None
_uuid: str = None
user_id: str = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._enable_datapoint_from_client('_csrftoken')
self._enable_datapoint_from_client('device_id')
self._enable_datapoint_from_client('_uid')
self._enable_datapoint_from_client('_uuid')
self._custom_data['uuid'] = self.State.required
self._custom_data['user_id'] = self.State.required
self._custom_data['endpoint'] = self.State.required
self._custom_data['surface'] = self.State.optional
class Create(_Base):
def __init__(self, user_id: str, **kwargs):
"""Use this to create a friendship, i.e. follow a user."""
super().__init__(**kwargs)
self._data['endpoint'] = 'create'
self._data['user_id'] = user_id
class Destroy(_Base):
def __init__(self, user_id: str, surface: Optional[Surface] = None, **kwargs):
"""Use this to 'destroy' a friendship, i.e. unfollow."""
super().__init__(**kwargs)
self._data['endpoint'] = 'destroy'
self._data['user_id'] = user_id
self._data['surface'] = surface
self._defaults['surface'] = surface.profile
class Remove(_Base):
def __init__(self, user_id: str, **kwargs):
super().__init__(**kwargs)
self._data['endpoint'] = 'remove_follower'
self._data['user_id'] = user_id
class Show(cmmn.Base):
"""Retrieves the following information for a friendship:
{
"blocking": False,
"followed_by": False,
"following": False,
"incoming_request": False,
"is_bestie": False,
"is_blocking_reel": False,
"is_muting_reel": False,
"is_private": False,
"is_restricted": False,
"muting": False,
"outgoing_request": False,
"status": "ok"
}
"""
def __init__(self, user_id: str, **kwargs):
super().__init__(**kwargs)
self._custom_data['user_id'] = self.State.required
self._custom_data['endpoint'] = self.State.required
self._data['user_id'] = user_id
self._data['endpoint'] = cmmn.Base.State.required
class GetBase(cmmn.Base):
def __init__(self, user_id: str, surface: Optional[Surface] = None, **kwargs):
super().__init__(**kwargs)
self._custom_data['user_id'] = self.State.required
self._custom_data['rank_token'] = self.State.required
self._custom_data['search_surface'] = self.State.required
self._custom_data['max_id'] = self.State.required
self._custom_data['page'] = self.State.required
self._data['user_id'] = user_id
self._data['search_surface'] = surface
self._defaults['search_surface'] = Surface.follow_list
self._defaults['rank_token'] = uuid.uuid4()
self._defaults['max_id'] = None
self._defaults['page'] = 0
# The requests for getting followers and your following, look exactly the same
# but we want to keep them in seperate structs for clarity.
GetFollowers = GetFollowing = GetBase
class PendingRequests:
def __init__(self):
pass
class ApproveRequest(cmmn.Base):
def __init__(self, user_id: str, **kwargs):
super().__init__(**kwargs)
self._enable_datapoint_from_client('_csrftoken')
self._enable_datapoint_from_client('_uid')
self._enable_datapoint_from_client('_uuid')
self._custom_data['radio_type'] = self.State.required
self._custom_data['surface'] = self.State.required
self._custom_data['user_id'] = self.State.required
self._data['user_id'] = user_id
self._defaults['surface'] = Surface.follow_requests
self._defaults['radio_type'] = 'wifi-none'
| 32.16 | 82 | 0.651741 | 3,653 | 0.908706 | 0 | 0 | 0 | 0 | 0 | 0 | 1,105 | 0.274876 |
faf43a4433695f53b0308cbe7ff6a2053f4f2929 | 955 | py | Python | InteractiveBreak_Filtering/showAnswerFiltering.py | JoeGreiner/MorphologicalOperatorsDemo | fd5ba06548887629515964fc30d402d87630d597 | [
"BSD-3-Clause"
] | null | null | null | InteractiveBreak_Filtering/showAnswerFiltering.py | JoeGreiner/MorphologicalOperatorsDemo | fd5ba06548887629515964fc30d402d87630d597 | [
"BSD-3-Clause"
] | null | null | null | InteractiveBreak_Filtering/showAnswerFiltering.py | JoeGreiner/MorphologicalOperatorsDemo | fd5ba06548887629515964fc30d402d87630d597 | [
"BSD-3-Clause"
] | null | null | null | from IPython.display import display
from ipywidgets import widgets
button = widgets.Button(description="Click for answer")
output = widgets.Output()
display(button, output)
already_clicked = False
def on_button_clicked(b):
global already_clicked, button
if not already_clicked:
already_clicked = True
button.description = 'Answer:'
with output:
print('We added Salt & Pepper noise (minimum value – pepper – black pixels;'
' maximum value – salt – white pixels.\n\n'
'The median filter will sort the elements within the mask'
' and then return the center value of the sorted elements.\n\n'
'Therefore, by construction, it is unlikely that the median filter chooses a noisy pixel,\nas noisy pixels'
' are likely at the very begin/end of the sorted elements; and not a center pixel.')
button.on_click(on_button_clicked) | 39.791667 | 125 | 0.670157 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 459 | 0.476636 |
faf66d5e9e6ff74d2a82b5b0fd5dc4b83c98d750 | 426 | py | Python | respostas/migrations/0016_resposta_materia.py | Samio-Santos/Sistema_Questoes_Django | 415c28b386ac7848fdd244ba51c20239b730f4ae | [
"MIT"
] | null | null | null | respostas/migrations/0016_resposta_materia.py | Samio-Santos/Sistema_Questoes_Django | 415c28b386ac7848fdd244ba51c20239b730f4ae | [
"MIT"
] | null | null | null | respostas/migrations/0016_resposta_materia.py | Samio-Santos/Sistema_Questoes_Django | 415c28b386ac7848fdd244ba51c20239b730f4ae | [
"MIT"
] | null | null | null | # Generated by Django 3.2 on 2021-07-02 21:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('respostas', '0015_alter_resposta_banca'),
]
operations = [
migrations.AddField(
model_name='resposta',
name='materia',
field=models.CharField(blank=True, default=None, max_length=20, null=True),
),
]
| 22.421053 | 87 | 0.617371 | 335 | 0.786385 | 0 | 0 | 0 | 0 | 0 | 0 | 102 | 0.239437 |
faf67b2c9d286ee2d83587f71a298be32213ce3a | 523 | py | Python | libs/menus/menus.py | MilianoJunior/appSalva | d1ad23d06c57aa4b6d380ad637847b6842b68ccd | [
"MIT"
] | null | null | null | libs/menus/menus.py | MilianoJunior/appSalva | d1ad23d06c57aa4b6d380ad637847b6842b68ccd | [
"MIT"
] | null | null | null | libs/menus/menus.py | MilianoJunior/appSalva | d1ad23d06c57aa4b6d380ad637847b6842b68ccd | [
"MIT"
] | null | null | null | from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.toolbar import MDToolbar
class Menus():
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __call__(self):
box_central = MDBoxLayout(orientation='vertical')
# criar componentes
toolbar = MDToolbar(title='App Salva')
# navigation = NavegationMenu()()
#add componentes
box_central.add_widget(toolbar)
# box_central.add_widget(navigation)
return box_central | 26.15 | 57 | 0.66348 | 436 | 0.833652 | 0 | 0 | 0 | 0 | 0 | 0 | 139 | 0.265774 |
faf6934e3cb37291d228f183808eb0c338d26479 | 1,342 | py | Python | setup.py | t-ceccarini/deep-b-spline-approximation | 9e48b593717486bbdac9bf0269a5645830d76082 | [
"MIT"
] | null | null | null | setup.py | t-ceccarini/deep-b-spline-approximation | 9e48b593717486bbdac9bf0269a5645830d76082 | [
"MIT"
] | null | null | null | setup.py | t-ceccarini/deep-b-spline-approximation | 9e48b593717486bbdac9bf0269a5645830d76082 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Jan 13 23:29:22 2022
@author: Tommaso
"""
from setuptools import setup
VERSION = '0.2.8'
DESCRIPTION = 'A python package for bspline curve approximation using deep learning'
# Setting up
setup(
name='deep-b-spline-approximation',
packages=['deep_b_spline_approximation'],
version=VERSION,
author="Tommaso Ceccarini",
author_email="<tceccarini93@gmail.com>",
description=DESCRIPTION,
long_description_content_type="text/markdown",
url='https://github.com/t-ceccarini/deep-b-spline-approximation',
download_url='https://github.com/t-ceccarini/deep-b-spline-approximation/archive/refs/tags/v_0.2.8.tar.gz',
install_requires=['torch','prettytable','numpy','scipy','matplotlib'],
keywords=['python', 'deep learning', 'mlp', 'cnn', 'cagd', 'bspline', 'bezier'],
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Operating System :: Unix",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
]
)
| 35.315789 | 111 | 0.651267 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 916 | 0.682563 |
faf73cd0b10c574ff66ce3351fe78b6258b32478 | 1,697 | py | Python | source/db_api/crud/crud_documents.py | JungeAlexander/kbase_db_api | f3ec5e8b9ae509f9e8d962183efef21be61ef425 | [
"MIT"
] | 1 | 2021-09-19T14:31:44.000Z | 2021-09-19T14:31:44.000Z | source/db_api/crud/crud_documents.py | JungeAlexander/kbase_db_api | f3ec5e8b9ae509f9e8d962183efef21be61ef425 | [
"MIT"
] | 4 | 2020-10-13T08:41:49.000Z | 2021-04-29T18:05:40.000Z | source/db_api/crud/crud_documents.py | JungeAlexander/kbase_db_api | f3ec5e8b9ae509f9e8d962183efef21be61ef425 | [
"MIT"
] | null | null | null | from datetime import date
from typing import Iterable
from sqlalchemy.orm import Session
from db_api import models, schemas
def get_document(db: Session, document_id: str) -> models.Document:
return db.query(models.Document).filter(models.Document.id == document_id).first()
def get_documents_by_publication_date(
db: Session, document_date: date
) -> Iterable[models.Document]:
return (
db.query(models.Document)
.filter(models.Document.publication_date == document_date)
.all()
)
def get_documents(
db: Session, skip: int = 0, limit: int = 100
) -> Iterable[models.Document]:
return db.query(models.Document).offset(skip).limit(limit).all()
def get_document_ids(db: Session, skip: int = 0, limit: int = 100):
return db.query(models.Document.id).offset(skip).limit(limit).all()
def search_document_summary(
db: Session, query: str = "query"
) -> Iterable[models.Document]:
search = "%{}%".format(query)
return (
db.query(models.Document).filter(models.Document.summary.ilike(search)).all() # type: ignore
)
def create_document(db: Session, document: schemas.DocumentCreate) -> models.Document:
db_document = models.Document(**document.dict())
db.add(db_document)
db.commit()
db.refresh(db_document)
return db_document
def update_document(db: Session, document: schemas.DocumentUpdate) -> models.Document:
# TODO does not seem to update modified_date
new_document = models.Document(**document.dict())
old_document = get_document(db, new_document.id)
db.delete(old_document)
db.add(new_document)
db.commit()
db.refresh(new_document)
return new_document
| 28.762712 | 101 | 0.70772 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.041839 |
faf7d9ad8af45e7f843f96215575e918e08bca9f | 1,571 | py | Python | displ/wannier/update_dis.py | tflovorn/displ | 094c194c54f02d463353075c6ca82f457f1247fa | [
"MIT"
] | 4 | 2018-04-09T20:39:24.000Z | 2021-06-19T12:21:52.000Z | displ/wannier/update_dis.py | tflovorn/displ | 094c194c54f02d463353075c6ca82f457f1247fa | [
"MIT"
] | null | null | null | displ/wannier/update_dis.py | tflovorn/displ | 094c194c54f02d463353075c6ca82f457f1247fa | [
"MIT"
] | 4 | 2018-04-09T20:39:41.000Z | 2021-06-19T12:21:53.000Z | from argparse import ArgumentParser
import os
from displ.pwscf.parseScf import fermi_from_scf
from displ.wannier.wannier_util import global_config
from displ.wannier.build import Update_Disentanglement
def _main():
parser = ArgumentParser(description="Update disentanglement window in W90 input")
parser.add_argument('--subdir', type=str, default=None,
help="Subdirectory under work_base for all job dirs")
parser.add_argument('prefix', type=str,
help="Prefix of system to update")
parser.add_argument('outer_min', type=float,
help="Distance below E_F to start outer window")
parser.add_argument('outer_max', type=float,
help="Distance above E_F to stop outer window")
parser.add_argument('inner_min', type=float,
help="Distance below E_F to start inner window")
parser.add_argument('inner_max', type=float,
help="Distance above E_F to stop inner window")
args = parser.parse_args()
gconf = global_config()
base_path = os.path.expandvars(gconf["work_base"])
if args.subdir is not None:
base_path = os.path.join(base_path, args.subdir)
wandir = os.path.join(base_path, args.prefix, "wannier")
scf_path = os.path.join(wandir, "scf.out")
E_Fermi = fermi_from_scf(scf_path)
outer = [args.outer_min, args.outer_max]
inner = [args.inner_min, args.inner_max]
win_path = os.path.join(wandir, "{}.win".format(args.prefix))
Update_Disentanglement(win_path, E_Fermi, outer, inner)
if __name__ == "__main__":
_main()
| 39.275 | 85 | 0.701464 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 394 | 0.250796 |
faf7efb34d352ee908682f16b6b86e45d3b88d2e | 942 | py | Python | setup.py | maoterodapena/pysouliss | 6dd3022d629505b44e2f7e41c16e4b48a4302c75 | [
"MIT"
] | 9 | 2017-08-03T19:19:55.000Z | 2019-02-13T12:06:42.000Z | setup.py | maoterodapena/pysouliss | 6dd3022d629505b44e2f7e41c16e4b48a4302c75 | [
"MIT"
] | null | null | null | setup.py | maoterodapena/pysouliss | 6dd3022d629505b44e2f7e41c16e4b48a4302c75 | [
"MIT"
] | 4 | 2017-08-15T18:07:53.000Z | 2019-05-22T18:39:13.000Z | """Setup file for souliss package."""
import os
from setuptools import setup, find_packages
if os.path.exists('README.rst'):
README = open('README.rst').read()
else:
README = ''
setup(
name='pysouliss',
version='0.0.5',
description='Python API for talking to a Souliss gateway gateway',
long_description=README,
url='https://github.com/maoterodapena/pysouliss',
author='Miguel Otero',
author_email='maoterodapena@gmail.com',
license='MIT License',
install_requires=['paho-mqtt'],
packages=find_packages(exclude=['tests', 'tests.*']),
keywords=['sensor', 'actuator', 'IoT', 'DYI'],
zip_safe=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Topic :: Home Automation',
])
| 30.387097 | 70 | 0.638004 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 488 | 0.518047 |
faf8407971775c6567b8c1eefdb799ea80949760 | 3,485 | py | Python | scriptlib/scriptfile.py | Orbtial/scriptlib | 9c3d1754da1eb875ecd89a6b6213457b9aa221d6 | [
"MIT"
] | null | null | null | scriptlib/scriptfile.py | Orbtial/scriptlib | 9c3d1754da1eb875ecd89a6b6213457b9aa221d6 | [
"MIT"
] | null | null | null | scriptlib/scriptfile.py | Orbtial/scriptlib | 9c3d1754da1eb875ecd89a6b6213457b9aa221d6 | [
"MIT"
] | null | null | null | '''
ScriptFile
File Manager for the scriptlib package
Developed by Orbtial
'''
#Custom Imports
from . import scriptui
#Standard Imports
import os
def initPTPDIR(filePathAttr):
"""
Returns a string representation of the path to the file's parent directory.
Should be initialised and stored before using any other function from the brickscript library that works with files.
:param filePathAttr: Should be filled in with __file__ attribute from file calling this function.
:return: String representing path to parent directory of file
"""
return os.path.dirname(os.path.realpath(filePathAttr))
def goToPath(ptpdir, path):
"""
Shifts current working directory to path specified relative to current working directory.
:param ptpdir: String generated from initPTPDIR().
:param path: String representing path to move to relative to current working directory.
"""
os.chdir(ptpdir+"/{}".format(path))
def wFileData(ptpdir, path, filename, data, isOverwrite):
"""
Appends or overwrites text to file specified.
:param ptpdir: String generated from initPTPDIR().
:param path: String representing path to file starting from parent directory of file.
:param filename: String representing name of file to be written or appended to and should include filetype extension.
:param data: String representing text of which should be written or appended to said file.
:param isOverwrite: Boolean indicating whether the file should be overwritten instead of being appended by default.
"""
goToPath(ptpdir, path)
mode = "a"
if isOverwrite: mode = "w"
with open(filename, mode) as f:
f.write(data)
def rFileData(ptpdir, path, filename):
"""
Returns text from file specified as a raw string.
:param ptpdir: String generated from initPTPDIR().
:param path: String representing path to file starting from parent directory of file.
:param filename: String representing name of file to be written or appended to and should include filetype extension.
:return: String representing text contained inside file.
"""
goToPath(ptpdir, path)
with open(filename, "r") as f:
data = "".join(f.readlines())
return data
def gInternalFile(ptpdir, path, question):
"""
UI element that allows a user to choose a file from a directory, provided the file is not prepended with a "." character.
:param ptpdir: String generated from initPTPDIR().
:param path: String representing path to directory starting from parent directory of file.
:param question: String representing prompt to be displayed to user.
:return: String representing name of file chosen by user.
"""
goToPath(ptpdir, path)
items = os.listdir()
filename = scriptui.gList(question, [x for x in items if x[0] != "."], False)
print(filename)
return filename
def mFile(ptpdir, path, data, fileType):
"""
UI element that allows the user to generate a file with data provided a filename and type extension.
:param ptpdir: String generated from initPTPDIR().
:param path: String representing path to directory containing new file, starting from parent directory of file.
:param data: String representing initial text of file created.
:param fileType: String representing type extension of file to be created.
"""
goToPath(ptpdir, path)
while True:
scriptui.refresh()
filename = input("Name of new file (exclude extension): ")
if os.path.exists(filename+fileType):
scriptui.errorMessage("That file already exists!")
else:
break
wFileData(ptpdir, path, filename+fileType, data, True)
| 36.684211 | 122 | 0.760689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,523 | 0.72396 |
fafa3983ded0e46039f34e45b4c99afb412de3eb | 6,823 | py | Python | egg5_config.py | eastgenomics/egg5_dias_CEN_config | d59765e44c43f0044417bf66a4d174b41c6a671b | [
"MIT"
] | null | null | null | egg5_config.py | eastgenomics/egg5_dias_CEN_config | d59765e44c43f0044417bf66a4d174b41c6a671b | [
"MIT"
] | 3 | 2021-11-08T13:32:42.000Z | 2022-01-07T09:16:00.000Z | egg5_config.py | eastgenomics/egg5_dias_CEN_config | d59765e44c43f0044417bf66a4d174b41c6a671b | [
"MIT"
] | null | null | null | import sys
sys.path.append("/mnt/storage/apps/software/dias_config")
from dias_dynamic_files import (
genes2transcripts,
bioinformatic_manifest,
genepanels_file,
)
assay_name = "CEN" # Core Endo Neuro
assay_version = "v1.1.4"
ref_project_id = "project-Fkb6Gkj433GVVvj73J7x8KbV"
# Single workflow
ss_workflow_id = "{}:workflow-G5gzKx8433GYp8x7FkjV1J2j".format(ref_project_id)
sentieon_stage_id = "stage-Fy6fpk040vZZPPbq96Jb2KfK"
sentieon_R1_input_stage = "{}.reads_fastqgzs".format(sentieon_stage_id)
sentieon_R2_input_stage = "{}.reads2_fastqgzs".format(sentieon_stage_id)
sentieon_sample_input_stage = "{}.sample".format(sentieon_stage_id)
fastqc_fastqs_input_stage = "stage-Fy6fpV840vZZ0v6J8qBQYqZF.fastqs"
ss_beds_inputs = {
# vcf_qc
"stage-Fy6fqy040vZV3Gj24vppvJgZ.bed_file ID": "file-Fpz2X0Q433GVK5xxPvzqvVPB",
"stage-Fy6fqy040vZV3Gj24vppvJgZ.bed_file": "",
# region coverage
"stage-G21GzGj433Gky42j42Q5bJkf.input_bed ID": "file-Fpz2X0Q433GVK5xxPvzqvVPB",
"stage-G21GzGj433Gky42j42Q5bJkf.input_bed": "",
# mosdepth
"stage-Fy6fvYQ40vZV1y8p9GYKPYyQ.bed ID": "file-Fpz2X0Q433GVK5xxPvzqvVPB",
"stage-Fy6fvYQ40vZV1y8p9GYKPYyQ.bed": "",
# picard
"stage-Fy6fx2Q40vZbFVxZ283xXGVY.bedfile ID": "file-G5jjzG0433GgkQ093K2p8PxQ", # CEN Capture Bed
"stage-Fy6fx2Q40vZbFVxZ283xXGVY.bedfile": ""
}
# Multi workflow
happy_stage_id = "stage-Fq1BPKj433Gx3K4Y8J35j0fv"
happy_stage_prefix = "{}.prefix".format(happy_stage_id)
happy_stage_bed = {
"{}.panel_bed ID".format(happy_stage_id): "file-G620390433GYGY34Jq6Zq1Xf",
"{}.panel_bed".format(happy_stage_id): "file-G620390433GYGY34Jq6Zq1Xf"
}
female_threshold = 3
male_threshold = 1
somalier_relate_stage_id = "stage-G5j1jJj433GpFY3v0JZQ2ZZ0"
multi_stage_input_dict = {
"stage-Fybykxj433GV7vJKFGf3yVkK.SampleSheet": {
"app": None, "subdir": "", "pattern": "SampleSheet.csv$",
},
"{}.query_vcf".format(happy_stage_id): {
"app": "sentieon-dnaseq", "subdir": "",
"pattern": "^NA12878-.*-EGG5_markdup_recalibrated_Haplotyper.vcf.gz$",
},
"{}.somalier_extract_file".format(somalier_relate_stage_id): {
"app": "somalier_extract", "subdir": "",
"pattern": "-E '(.*).somalier$'"
},
}
ms_workflow_id = "{}:workflow-G5j1j28433GYkv4gPpPG8g11".format(ref_project_id)
# MultiQC
mqc_applet_id = "app-G6FyybQ4f4xqqpFfGqg34y2Y"
mqc_config_file = "{}:file-G82027Q433Gfx69zGvjq7PqQ".format(ref_project_id)
# Reports
xlsx_flanks = 495
cds_file = "{}:file-GB7p8x8433GVkY6F36Qx06z2".format(ref_project_id)
cds_file_for_athena = "{}:file-GB7pXZ0433GX1YJKF5382bbX".format(ref_project_id)
vep_config = "{}:file-GB69B1j43Vx9f0ZYGbKf9xQ1".format(ref_project_id)
generate_bed_vep_stage_id = "stage-G9P8p104vyJJGy6y86FQBxkv"
vep_stage_id = "stage-G9Q0jzQ4vyJ3x37X4KBKXZ5v"
generate_workbook_stage_id = "stage-G9P8VQj4vyJBJ0kg50vzVPxY"
generate_bed_athena_stage_id = "stage-Fyq5yy0433GXxz691bKyvjPJ"
athena_stage_id = "stage-Fyq5z18433GfYZbp3vX1KqjB"
rpt_workflow_id = "{}:workflow-GBQ985Q433GYJjv0379PJqqg".format(ref_project_id)
rpt_stage_input_dict = {
# generate_bed
"{}.sample_file".format(generate_bed_athena_stage_id): {
"app": "mosdepth", "subdir": "",
"pattern": "-E '{}(.*).per-base.bed.gz.csi$'"
},
"{}.sample_file".format(generate_bed_vep_stage_id): {
"app": "mosdepth", "subdir": "",
"pattern": "-E '{}(.*).per-base.bed.gz.csi$'"
},
# vep
"{}.vcf".format(vep_stage_id): {
"app": "sentieon-dnaseq", "subdir": "",
"pattern": "-E '{}(.*)[^g].vcf.gz$'"
},
# athena
"{}.mosdepth_files".format(athena_stage_id): {
"app": "mosdepth", "subdir": "",
# athena requires both per-base files and reference files
"pattern": "-E '{}(.*)(per-base.bed.gz$|reference)'"
},
}
rpt_dynamic_files = {
# inputs for generate bed for vep
"{}.exons_nirvana ID".format(generate_bed_vep_stage_id): cds_file,
"{}.exons_nirvana".format(generate_bed_vep_stage_id): "",
"{}.nirvana_genes2transcripts ID".format(generate_bed_vep_stage_id): genes2transcripts,
"{}.nirvana_genes2transcripts".format(generate_bed_vep_stage_id): "",
"{}.gene_panels ID".format(generate_bed_vep_stage_id): genepanels_file,
"{}.gene_panels".format(generate_bed_vep_stage_id): "",
"{}.manifest ID".format(generate_bed_vep_stage_id): bioinformatic_manifest,
"{}.manifest".format(generate_bed_vep_stage_id): "",
# inputs for generate bed for athena
"{}.exons_nirvana ID".format(generate_bed_athena_stage_id): cds_file,
"{}.exons_nirvana".format(generate_bed_athena_stage_id): "",
"{}.nirvana_genes2transcripts ID".format(generate_bed_athena_stage_id): genes2transcripts,
"{}.nirvana_genes2transcripts".format(generate_bed_athena_stage_id): "",
"{}.gene_panels ID".format(generate_bed_athena_stage_id): genepanels_file,
"{}.gene_panels".format(generate_bed_athena_stage_id): "",
"{}.manifest ID".format(generate_bed_athena_stage_id): bioinformatic_manifest,
"{}.manifest".format(generate_bed_athena_stage_id): "",
# inputs for athena
"{}.exons_file ID".format(athena_stage_id): cds_file_for_athena,
"{}.exons_file".format(athena_stage_id): ""
}
# reanalysis
rea_stage_input_dict = {
# vep
"{}.vcf".format(vep_stage_id): {
"app": "sentieon-dnaseq", "subdir": "",
"pattern": "-E '{}(.*)[^g].vcf.gz$'"
},
# athena
"{}.mosdepth_files".format(athena_stage_id): {
"app": "mosdepth", "subdir": "",
# athena requires both per-base files and reference files
"pattern": "-E '{}(.*)(per-base.bed.gz$|reference)'"
},
}
rea_dynamic_files = {
# inputs for generate bed for vep
"{}.exons_nirvana ID".format(generate_bed_vep_stage_id): cds_file,
"{}.exons_nirvana".format(generate_bed_vep_stage_id): "",
"{}.nirvana_genes2transcripts ID".format(generate_bed_vep_stage_id): genes2transcripts,
"{}.nirvana_genes2transcripts".format(generate_bed_vep_stage_id): "",
"{}.gene_panels ID".format(generate_bed_vep_stage_id): genepanels_file,
"{}.gene_panels".format(generate_bed_vep_stage_id): "",
# inputs for generate bed for athena
"{}.exons_nirvana ID".format(generate_bed_athena_stage_id): cds_file,
"{}.exons_nirvana".format(generate_bed_athena_stage_id): "",
"{}.nirvana_genes2transcripts ID".format(generate_bed_athena_stage_id): genes2transcripts,
"{}.nirvana_genes2transcripts".format(generate_bed_athena_stage_id): "",
"{}.gene_panels ID".format(generate_bed_athena_stage_id): genepanels_file,
"{}.gene_panels".format(generate_bed_athena_stage_id): "",
# inputs for athena
"{}.exons_file ID".format(athena_stage_id): cds_file_for_athena,
"{}.exons_file".format(athena_stage_id): ""
} | 38.988571 | 100 | 0.716254 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,244 | 0.475451 |
fafa474f05d327bb0527aea85277fe69e94fc722 | 105 | py | Python | axiom/__init__.py | schehata/axiom-py | a2e34ec54606359a1c06b5ec12b0a4898ade66ee | [
"MIT"
] | 4 | 2021-03-27T12:24:04.000Z | 2021-10-21T07:04:21.000Z | axiom/__init__.py | schehata/axiom-py | a2e34ec54606359a1c06b5ec12b0a4898ade66ee | [
"MIT"
] | 3 | 2021-04-01T21:19:34.000Z | 2022-01-10T13:58:01.000Z | axiom/__init__.py | schehata/axiom-py | a2e34ec54606359a1c06b5ec12b0a4898ade66ee | [
"MIT"
] | 4 | 2021-07-30T12:27:39.000Z | 2022-01-28T23:38:17.000Z | """
Axiom Python Client
"""
__version__ = "0.1.0-beta.2"
from .client import *
from .datasets import *
| 11.666667 | 28 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 41 | 0.390476 |
fafb8734f2572ef0c2b5074b37edc6e0b8a53374 | 1,070 | py | Python | test_credentialData.py | mireille1999/Credential-Locker | fc7e8c9035a3d89c889ab7db603eec4ec3d62522 | [
"Unlicense"
] | null | null | null | test_credentialData.py | mireille1999/Credential-Locker | fc7e8c9035a3d89c889ab7db603eec4ec3d62522 | [
"Unlicense"
] | null | null | null | test_credentialData.py | mireille1999/Credential-Locker | fc7e8c9035a3d89c889ab7db603eec4ec3d62522 | [
"Unlicense"
] | null | null | null | import unittest
from credentialData import CredentialData
class TestCredentials(unittest.TestCase):
def setUp(self):
"""
setUp method
"""
self.new_credential = CredentialData("Instagram", "mimi", "mireille")
def test_init(self):
"""
testing initialization
"""
self.assertEqual(self.new_credential.platform, "Instagram")
self.assertEqual(self.new_credential.username, "mimi")
self.assertEqual(self.new_credential.password, "mireille")
def tearDown(self):
CredentialData.credentials = []
def test_save_credential(self):
"""
test if credential is saved in the credentials list
"""
self.new_credential.save_credential()
self.assertEqual(len(CredentialData.credentials), 1)
def test_display_credentials(self):
"""
test display credentials method
"""
self.assertEqual(CredentialData.display_credentials(),CredentialData.credentials)
if __name__ == '__main__':
unittest.main()
| 26.75 | 89 | 0.648598 | 960 | 0.897196 | 0 | 0 | 0 | 0 | 0 | 0 | 276 | 0.257944 |
fafbbbaa0af64eaf97c6dbad155b8d4576676728 | 1,130 | py | Python | scripts/study_case/ID_13/torch_geometric/transforms/random_rotate.py | kzbnb/numerical_bugs | bc22e72bcc06df6ce7889a25e0aeed027bde910b | [
"Apache-2.0"
] | 8 | 2021-06-30T06:55:14.000Z | 2022-03-18T01:57:14.000Z | scripts/study_case/ID_13/torch_geometric/transforms/random_rotate.py | kzbnb/numerical_bugs | bc22e72bcc06df6ce7889a25e0aeed027bde910b | [
"Apache-2.0"
] | 1 | 2021-06-30T03:08:15.000Z | 2021-06-30T03:08:15.000Z | scripts/study_case/ID_13/torch_geometric/transforms/random_rotate.py | kzbnb/numerical_bugs | bc22e72bcc06df6ce7889a25e0aeed027bde910b | [
"Apache-2.0"
] | 2 | 2021-11-17T11:19:48.000Z | 2021-11-18T03:05:58.000Z | import numbers
import random
import math
import torch
from scripts.study_case.ID_13.torch_geometric.transforms import LinearTransformation
class RandomRotate(object):
def __init__(self, degrees, axis=0):
if isinstance(degrees, numbers.Number):
degrees = (-abs(degrees), abs(degrees))
assert isinstance(degrees, (tuple, list)) and len(degrees) == 2
self.degrees = degrees
self.axis = axis
def __call__(self, data):
degree = math.pi * random.uniform(*self.degrees) / 180.0
sin, cos = math.sin(degree), math.cos(degree)
if data.pos.size(1) == 2:
matrix = [[cos, sin], [-sin, cos]]
else:
if self.axis == 0:
matrix = [[1, 0, 0], [0, cos, sin], [0, -sin, cos]]
elif self.axis == 1:
matrix = [[cos, 0, -sin], [0, 1, 0], [sin, 0, cos]]
else:
matrix = [[cos, sin, 0], [-sin, cos, 0], [0, 0, 1]]
return LinearTransformation(torch.tensor(matrix))(data)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, self.degrees)
| 33.235294 | 84 | 0.565487 | 987 | 0.873451 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0.00708 |
fafc611515c806788d4e3c897fee66ebe01e6548 | 4,606 | py | Python | hsmodels/schemas/base_models.py | hydroshare/hsmodels | ebd834aa9ac82b5775e83378443e86dafa2395f0 | [
"BSD-3-Clause"
] | null | null | null | hsmodels/schemas/base_models.py | hydroshare/hsmodels | ebd834aa9ac82b5775e83378443e86dafa2395f0 | [
"BSD-3-Clause"
] | 7 | 2021-10-15T20:30:49.000Z | 2022-03-07T14:56:39.000Z | hsmodels/schemas/base_models.py | hydroshare/hsmodels | ebd834aa9ac82b5775e83378443e86dafa2395f0 | [
"BSD-3-Clause"
] | 1 | 2021-09-16T17:51:19.000Z | 2021-09-16T17:51:19.000Z | from datetime import datetime
from typing import Any, Callable, Dict, Optional, Union
from pydantic import BaseModel
class BaseMetadata(BaseModel):
def dict(
self,
*,
include: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
exclude: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
by_alias: bool = False,
skip_defaults: bool = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = True,
to_rdf: bool = False,
) -> 'DictStrAny':
"""
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
Checks the config for a schema_config dictionary_field and converts a dictionary to a list of key/value pairs.
This converts the dictionary to a format that can be described in a json schema (which can be found below in the
schema_extra staticmethod.
Override the default of exclude_none to True
"""
d = super().dict(
include=include,
exclude=exclude,
by_alias=by_alias,
skip_defaults=skip_defaults,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
if to_rdf and hasattr(self.Config, "schema_config"):
schema_config = self.Config.schema_config
if "dictionary_field" in schema_config:
for field in schema_config["dictionary_field"]:
field_value = d[field]
d[field] = [{"key": key, "value": value} for key, value in field_value.items()]
return d
def json(
self,
*,
include: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
exclude: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
by_alias: bool = False,
skip_defaults: bool = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = True,
encoder: Optional[Callable[[Any], Any]] = None,
**dumps_kwargs: Any,
) -> str:
"""
Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.
`encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
Override the default of exclude_none to True
"""
return super().json(
include=include,
exclude=exclude,
by_alias=by_alias,
skip_defaults=skip_defaults,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
encoder=encoder,
**dumps_kwargs,
)
class Config:
validate_assignment = True
@staticmethod
def schema_extra(schema: Dict[str, Any], model) -> None:
if hasattr(model.Config, "schema_config"):
schema_config = model.Config.schema_config
if "read_only" in schema_config:
# set readOnly in json schema
for field in schema_config["read_only"]:
if field in schema['properties']: # ignore unknown properties for inheritance
schema['properties'][field]['readOnly'] = True
if "dictionary_field" in schema_config:
for field in schema_config["dictionary_field"]:
if field in schema['properties']: # ignore unknown properties for inheritance
prop = schema["properties"][field]
prop.pop('default', None)
prop.pop('additionalProperties', None)
prop['type'] = "array"
prop['items'] = {
"type": "object",
"title": "Key-Value",
"description": "A key-value pair",
"default": [],
"properties": {"key": {"type": "string"}, "value": {"type": "string"}}
}
class BaseCoverage(BaseMetadata):
def __str__(self):
return "; ".join(
[
"=".join([key, val.isoformat() if isinstance(val, datetime) else str(val)])
for key, val in self.__dict__.items()
if key != "type" and val
]
)
| 39.706897 | 120 | 0.550803 | 4,482 | 0.973079 | 0 | 0 | 1,389 | 0.301563 | 0 | 0 | 1,414 | 0.306991 |
fafd772a07cdd650ed253ad18b37cc59015b77a9 | 594 | py | Python | overlay_camera/Server.py | toakarsky/OverlayCamera | 19ff8b616f38c41c68723f7b6abc1e893f9f2488 | [
"MIT"
] | null | null | null | overlay_camera/Server.py | toakarsky/OverlayCamera | 19ff8b616f38c41c68723f7b6abc1e893f9f2488 | [
"MIT"
] | null | null | null | overlay_camera/Server.py | toakarsky/OverlayCamera | 19ff8b616f38c41c68723f7b6abc1e893f9f2488 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, Response
from .OverlayCamera import OverlayCamera
from .settings import ROUTE
app = Flask(__name__)
def gen(camera):
"""Video streaming generator function."""
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.route(ROUTE)
def video_feed():
"""Video streaming route. Put this in the src attribute of an img tag."""
return Response(gen(OverlayCamera()),
mimetype='multipart/x-mixed-replace; boundary=frame')
| 24.75 | 77 | 0.653199 | 0 | 0 | 213 | 0.358586 | 229 | 0.385522 | 0 | 0 | 213 | 0.358586 |
fafec615b08143a3fef967cae3cb798ebe890c40 | 8,098 | py | Python | backend/shrunk/client/search.py | kevinmonisit/shrunk | 55106356735c3491f8c8c0774f5ae500ba1c970a | [
"MIT"
] | 13 | 2015-05-08T00:26:23.000Z | 2021-07-28T15:42:10.000Z | backend/shrunk/client/search.py | kevinmonisit/shrunk | 55106356735c3491f8c8c0774f5ae500ba1c970a | [
"MIT"
] | 68 | 2015-01-12T20:27:44.000Z | 2021-05-17T19:08:05.000Z | backend/shrunk/client/search.py | kevinmonisit/shrunk | 55106356735c3491f8c8c0774f5ae500ba1c970a | [
"MIT"
] | 7 | 2015-08-05T20:31:20.000Z | 2022-01-28T21:14:06.000Z | """Implements the :py:class:`SearchClient` class."""
from typing import Any, List
from datetime import datetime, timezone
from pymongo.collation import Collation
import pymongo
__all__ = ['SearchClient']
class SearchClient:
"""This class executes search queries."""
def __init__(self, *, db: pymongo.database.Database, client: Any):
self.db = db
self.client = client
def execute(self, user_netid: str, query: Any) -> Any: # pylint: disable=too-many-branches,too-many-statements
"""Execute a search query
:param user_netid: The NetID of the user performing the search
:param query: The search query. See :py:mod:`shrunk.api.search` for
the search query format
"""
# We're going to build up an aggregation pipeline based on the submitted query.
# This pipeline will be executed on the organizations collection if set.set == 'org',
# or on the urls collection otherwise.
pipeline: List[Any] = []
# Filter based on search string, if provided.
if 'query' in query and query['query'] != '' and query['set']['set'] != 'shared':
pipeline += [
{'$match': {'$text': {'$search': query['query']}}},
{'$addFields': {'text_search_score': {'$meta': 'textScore'}}},
]
# Filter the appropriate links set.
if query['set']['set'] == 'user': # search within `user_netid`'s links
pipeline.append({'$match': {'netid': user_netid}})
elif query['set']['set'] == 'shared':
# If the set is 'shared', the pipeline will be executed against the 'organizations'
# collection instead of the 'urls' collection.
if 'query' in query and query['query'] != '':
pipeline += [
{'$match': {'members.netid': user_netid}},
{'$lookup': {
'from': 'urls',
'let': {'org_id':'$_id'},
'pipeline' : [
{'$match': {'$text': {'$search': query['query']}}},
{'$addFields': {'text_search_score': {'$meta': 'textScore'}}},
{'$unwind': '$viewers'},
{'$match': {'$expr':{'$eq':['$viewers._id','$$org_id']}}},
{'$match': {'text_search_score': {'$gt': 0.5}}},
],
'as': 'shared_urls',
}},
{'$unwind': '$shared_urls'},
{'$replaceRoot': {'newRoot': '$shared_urls'}},
{'$unionWith': {
'coll': 'urls',
'pipeline': [{'$match': {'$text': {'$search': query['query']}}},
{'$addFields': {'text_search_score': {'$meta': 'textScore'}}},
{'$match': {'viewers._id': user_netid}},
{'$match': {'text_search_score': {'$gt': 0.5}}}]
}}]
else:
pipeline += [
{'$match': {'members.netid': user_netid}},
{'$lookup': {
'from': 'urls',
'localField': '_id',
'foreignField': 'viewers._id',
'as': 'shared_urls',
}},
{'$unwind': '$shared_urls'},
{'$replaceRoot': {'newRoot': '$shared_urls'}},
{'$unionWith': {
'coll': 'urls',
'pipeline': [{'$match': {'viewers._id': user_netid}}]
}}]
elif query['set']['set'] == 'org': # search within the given org
pipeline.append({'$match': {'viewers.type': 'org', 'viewers._id': query['set']['org']}})
# Sort results.
sort_order = 1 if query['sort']['order'] == 'ascending' else -1
if query['sort']['key'] == 'created_time':
sort_key = 'timeCreated'
elif query['sort']['key'] == 'title':
sort_key = 'title'
elif query['sort']['key'] == 'visits':
sort_key = 'visits'
elif query['sort']['key'] == 'relevance':
sort_key = 'text_search_score'
else:
# This should never happen
raise RuntimeError(f'Bad sort key {query["sort"]["key"]}')
pipeline.append({'$sort': {sort_key: sort_order, '_id': sort_order}})
# Add is_expired field
now = datetime.now(timezone.utc)
pipeline.append({
'$addFields': {
'is_expired': {
'$and': [
{'$toBool': '$expiration_time'},
{'$gte': [now, '$expiration_time']},
],
},
},
})
if not query.get('show_deleted_links', False):
pipeline.append({'$match': {'deleted': {'$ne': True}}})
if not query.get('show_expired_links', False):
pipeline.append({'$match': {'is_expired': False}})
if 'begin_time' in query:
pipeline.append({'$match': {'timeCreated': {'$gte': query['begin_time']}}})
if 'end_time' in query:
pipeline.append({'$match': {'timeCreated': {'$lte': query['end_time']}}})
# Pagination.
facet = {
'count': [{'$count': 'count'}],
'result': [{'$skip': 0}],
}
if 'pagination' in query:
facet['result'] = [
{'$skip': query['pagination']['skip']},
{'$limit': query['pagination']['limit']},
]
pipeline.append({'$facet': facet})
# Execute the query. Make sure we use the 'en' collation so strings
# are sorted properly (e.g. wrt case and punctuation).
if query['set']['set'] == 'shared':
cursor = self.db.organizations.aggregate(pipeline, collation=Collation('en'))
else:
cursor = self.db.urls.aggregate(pipeline, collation=Collation('en'))
def prepare_result(res: Any) -> Any:
"""Turn a result from the DB into something than can be JSON-serialized."""
def is_alias_visible(alias: Any) -> bool:
if query.get('show_deleted_links', False):
return True
return not alias['deleted']
if res.get('expiration_time'):
expiration_time = res['expiration_time']
else:
expiration_time = None
prepared = {
'id': res['_id'],
'title': res['title'],
'long_url': res['long_url'],
'created_time': res['timeCreated'],
'expiration_time': expiration_time,
'visits': res['visits'],
'unique_visits': res.get('unique_visits', 0),
'owner': res['netid'],
'aliases': [alias for alias in res['aliases'] if is_alias_visible(alias)],
'is_expired': res['is_expired'],
'may_edit': self.client.links.may_edit(res['_id'], user_netid),
}
if res.get('deleted'):
prepared['deletion_info'] = {
'deleted_by': res['deleted_by'],
'deleted_time': res['deleted_time'],
}
return prepared
result = next(cursor)
count = result['count'][0]['count'] if result['count'] else 0
results = [prepare_result(res) for res in result['result']]
# Remove possible duplicates in results and update total count
unique = { each['id'] : each for each in results}.values()
unique_results = list(unique)
diff = len(results) - len(unique_results)
count = count - diff
return {
'count': count,
'results': unique_results,
} | 41.958549 | 115 | 0.470487 | 7,889 | 0.974191 | 0 | 0 | 0 | 0 | 0 | 0 | 3,148 | 0.388738 |
faff6b99f938954c9a5b7a6e6ff1a0945884e179 | 4,110 | py | Python | tests/test_pinblock.py | adelosa/cardutil | fa31223aaac1f0749d50368bb639a311d98e279a | [
"MIT"
] | null | null | null | tests/test_pinblock.py | adelosa/cardutil | fa31223aaac1f0749d50368bb639a311d98e279a | [
"MIT"
] | 1 | 2022-03-25T20:15:24.000Z | 2022-03-30T09:20:34.000Z | tests/test_pinblock.py | adelosa/cardutil | fa31223aaac1f0749d50368bb639a311d98e279a | [
"MIT"
] | null | null | null | import unittest
from cardutil import pinblock
class PinblockTestCase(unittest.TestCase):
def test_pin_block_Iso0(self):
self.assertEqual(
b'\x04\x12\x26\xcb\xa9\x87\x6f\xed',
pinblock.Iso0PinBlock(pin='1234', card_number='4441234567890123').to_bytes())
def test_visa_pvv(self):
test_key = '5CA64B3C22BEC347CA7E6609904BAAED'
self.assertEqual(
'3856', pinblock.calculate_pvv(pin='2205', card_number='4564320000980369', pvv_key=test_key, key_index=1))
def test_visa_pvv_more_values_required(self):
"""
check when additional digits for pvv required
"""
test_key = '5CA64B3C22BEC347CA7E6609904BAAED'
self.assertEqual(
'0885', pinblock.calculate_pvv(pin='0654', card_number='4564320000980369', pvv_key=test_key, key_index=1))
def test_visa_pvv_mixin(self):
# use pin block without card_number property
MyPinBlock = type('MyPinBlock', (pinblock.Iso4PinBlock, pinblock.VisaPVVPinBlockMixin), {})
pb = MyPinBlock(pin='6666')
with self.assertRaises(ValueError):
pb.to_pvv(pvv_key='00' * 8)
self.assertEqual(pb.to_pvv(pvv_key='00' * 8, card_number='1111222233334444'), '1703')
def test_pin_block_Iso0TDESPinBlockWithVisaPVV(self):
pb1 = pinblock.Iso0TDESPinBlockWithVisaPVV(pin='1234', card_number='1111222233334444')
self.assertEqual(pb1.pin, '1234')
self.assertEqual(pb1.card_number, '1111222233334444')
self.assertEqual(pb1.to_bytes(), b'\x04\x12&\xdd\xdc\xcc\xcb\xbb')
self.assertEqual(pb1.to_enc_bytes(key='00' * 16), b'L\t\x06\xd1\x03\x08\x87\x1a')
self.assertEqual(pb1.to_pvv(pvv_key='00' * 16), '6264')
pb2 = pinblock.Iso0TDESPinBlockWithVisaPVV.from_bytes(
pin_block=pb1.to_bytes(), card_number='1111222233334444')
self.assertEqual(pb2.pin, '1234')
pb3 = pinblock.Iso0TDESPinBlockWithVisaPVV.from_enc_bytes(
enc_pin_block=pb1.to_enc_bytes('00' * 16), card_number='1111222233334444', key='00' * 16)
self.assertEqual(pb3.pin, '1234')
def test_pin_block_Iso4AESPinBlockWithVisaPVV(self):
pb1 = pinblock.Iso4AESPinBlockWithVisaPVV(pin='1234', random_value=14932500169729639426)
self.assertEqual(pb1.pin, '1234')
self.assertEqual(pb1.to_bytes()[0:8], b'\x44\x12\x34\xaa\xaa\xaa\xaa\xaa')
self.assertEqual(pb1.to_enc_bytes(key='00' * 16), b',4yaY\xbf\x10j\xf6\xf5\xd2;Y\xfd\xe2\xfe')
self.assertEqual(pb1.to_pvv(pvv_key='00' * 16, card_number='1111222233334444'), '6264')
pb2 = pinblock.Iso4AESPinBlockWithVisaPVV.from_bytes(pin_block=pb1.to_bytes())
self.assertEqual(pb2.pin, '1234')
pb3 = pinblock.Iso4AESPinBlockWithVisaPVV.from_enc_bytes(
enc_pin_block=pb1.to_enc_bytes('00' * 16), key='00' * 16)
self.assertEqual(pb3.pin, '1234')
def test_pinblock_operations(self):
tdes_key = '2222111122221111'
aes_key = tdes_key * 2
key = aes_key
pin_blocks = [
type(
'TDESPinBlock',
(pinblock.Iso0PinBlock, pinblock.TdesEncryptedPinBlockMixin, pinblock.VisaPVVPinBlockMixin), {}),
type(
'AESPinBlock',
(pinblock.Iso4PinBlock, pinblock.AESEncryptedPinBlockMixin, pinblock.VisaPVVPinBlockMixin), {}),
pinblock.Iso0TDESPinBlockWithVisaPVV,
pinblock.Iso4AESPinBlockWithVisaPVV
]
for pb in pin_blocks:
print(pb)
pb = pb(pin='1234', card_number='1111222233334444')
self.assertEqual(pb.pin, '1234')
self.assertEqual(pb.to_pvv(pvv_key='1111222211112222', key_index=1, card_number='1111222233334444'), '9595')
pb2 = pb.from_bytes(pb.to_bytes(), card_number='1111222233334444')
self.assertEqual(pb2.pin, '1234')
pb2 = pb.from_enc_bytes(pb.to_enc_bytes(key=key), key=key, card_number='1111222233334444')
self.assertEqual(pb2.pin, '1234')
if __name__ == '__main__':
unittest.main()
| 45.164835 | 120 | 0.662287 | 4,011 | 0.975912 | 0 | 0 | 0 | 0 | 0 | 0 | 847 | 0.206083 |
faff7d52f28537ddf92bfe1718201e8d45c2ed7c | 12,300 | py | Python | tests/test_internal_devices.py | NotBobTheBuilder/gpiozero | aeb9d30056ec97e6bf896152e71a870bd0099b4e | [
"BSD-3-Clause"
] | 1 | 2021-01-27T21:46:52.000Z | 2021-01-27T21:46:52.000Z | tests/test_internal_devices.py | NotBobTheBuilder/gpiozero | aeb9d30056ec97e6bf896152e71a870bd0099b4e | [
"BSD-3-Clause"
] | null | null | null | tests/test_internal_devices.py | NotBobTheBuilder/gpiozero | aeb9d30056ec97e6bf896152e71a870bd0099b4e | [
"BSD-3-Clause"
] | 1 | 2020-09-12T09:21:46.000Z | 2020-09-12T09:21:46.000Z | # GPIO Zero: a library for controlling the Raspberry Pi's GPIO pins
# Copyright (c) 2019 Jeevan M R <14.jeevan@gmail.com>
# Copyright (c) 2019 Dave Jones <dave@waveform.org.uk>
# Copyright (c) 2019 Ben Nuttall <ben@bennuttall.com>
# Copyright (c) 2018 SteveAmor <steveamor@noreply.users.github.com>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
import io
import errno
import warnings
from posix import statvfs_result
from subprocess import CalledProcessError
import pytest
from mock import patch
from gpiozero import *
from datetime import datetime, time
file_not_found = IOError(errno.ENOENT, 'File not found')
bad_ping = CalledProcessError(1, 'returned non-zero exit status 1')
def test_timeofday_bad_init(mock_factory):
with pytest.raises(TypeError):
TimeOfDay()
with pytest.raises(ValueError):
TimeOfDay(7, 12)
with pytest.raises(TypeError):
TimeOfDay(time(7))
with pytest.raises(ValueError):
TimeOfDay(time(7), time(7))
with pytest.raises(ValueError):
TimeOfDay(time(7), time(7))
with pytest.raises(ValueError):
TimeOfDay('7:00', '8:00')
with pytest.raises(ValueError):
TimeOfDay(7.00, 8.00)
with pytest.raises(ValueError):
TimeOfDay(datetime(2019, 1, 24, 19), time(19)) # lurch edge case
def test_timeofday_init(mock_factory):
TimeOfDay(time(7), time(8), utc=False)
TimeOfDay(time(7), time(8), utc=True)
TimeOfDay(time(0), time(23, 59))
TimeOfDay(time(0), time(23, 59))
TimeOfDay(time(12, 30), time(13, 30))
TimeOfDay(time(23), time(1))
TimeOfDay(time(6), time(18))
TimeOfDay(time(18), time(6))
TimeOfDay(datetime(2019, 1, 24, 19), time(19, 1)) # lurch edge case
def test_timeofday_value(mock_factory):
with TimeOfDay(time(7), time(8), utc=False) as tod:
assert repr(tod).startswith('<gpiozero.TimeOfDay object')
assert tod.start_time == time(7)
assert tod.end_time == time(8)
assert not tod.utc
with patch('gpiozero.internal_devices.datetime') as dt:
dt.now.return_value = datetime(2018, 1, 1, 6, 59, 0)
assert not tod.is_active
dt.now.return_value = datetime(2018, 1, 1, 7, 0, 0)
assert tod.is_active
dt.now.return_value = datetime(2018, 1, 2, 8, 0, 0)
assert tod.is_active
dt.now.return_value = datetime(2018, 1, 2, 8, 1, 0)
assert not tod.is_active
with TimeOfDay(time(1, 30), time(23, 30)) as tod:
assert tod.start_time == time(1, 30)
assert tod.end_time == time(23, 30)
assert tod.utc
with patch('gpiozero.internal_devices.datetime') as dt:
dt.utcnow.return_value = datetime(2018, 1, 1, 1, 29, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 1, 30, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 12, 30, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 23, 30, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 23, 31, 0)
assert not tod.is_active
with TimeOfDay(time(23), time(1)) as tod:
with patch('gpiozero.internal_devices.datetime') as dt:
dt.utcnow.return_value = datetime(2018, 1, 1, 22, 59, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 23, 0, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 2, 1, 0, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 2, 1, 1, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 3, 12, 0, 0)
assert not tod.is_active
with TimeOfDay(time(6), time(5)) as tod:
with patch('gpiozero.internal_devices.datetime') as dt:
dt.utcnow.return_value = datetime(2018, 1, 1, 5, 30, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 5, 59, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 6, 0, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 18, 0, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 1, 5, 0, 0)
assert tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 2, 5, 1, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 2, 5, 30, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 2, 5, 59, 0)
assert not tod.is_active
dt.utcnow.return_value = datetime(2018, 1, 2, 6, 0, 0)
assert tod.is_active
def test_pingserver_bad_init(mock_factory):
with pytest.raises(TypeError):
PingServer()
def test_pingserver_init(mock_factory):
with patch('gpiozero.internal_devices.subprocess') as sp:
sp.check_call.return_value = True
with PingServer('example.com') as server:
assert repr(server).startswith('<gpiozero.PingServer object')
assert server.host == 'example.com'
with PingServer('192.168.1.10') as server:
assert server.host == '192.168.1.10'
with PingServer('8.8.8.8') as server:
assert server.host == '8.8.8.8'
with PingServer('2001:4860:4860::8888') as server:
assert server.host == '2001:4860:4860::8888'
def test_pingserver_value(mock_factory):
with patch('gpiozero.internal_devices.subprocess.check_call') as check_call:
with PingServer('example.com') as server:
assert server.is_active
check_call.side_effect = bad_ping
assert not server.is_active
check_call.side_effect = None
assert server.is_active
def test_cputemperature_bad_init(mock_factory):
with patch('io.open') as m:
m.return_value.__enter__.side_effect = file_not_found
with pytest.raises(IOError):
with CPUTemperature('') as temp:
temp.value
with pytest.raises(IOError):
with CPUTemperature('badfile') as temp:
temp.value
m.return_value.__enter__.return_value.readline.return_value = '37000'
with pytest.raises(ValueError):
CPUTemperature(min_temp=100)
with pytest.raises(ValueError):
CPUTemperature(min_temp=10, max_temp=10)
with pytest.raises(ValueError):
CPUTemperature(min_temp=20, max_temp=10)
def test_cputemperature(mock_factory):
with patch('io.open') as m:
m.return_value.__enter__.return_value.readline.return_value = '37000'
with CPUTemperature() as cpu:
assert repr(cpu).startswith('<gpiozero.CPUTemperature object')
assert cpu.temperature == 37.0
assert cpu.value == 0.37
with warnings.catch_warnings(record=True) as w:
warnings.resetwarnings()
with CPUTemperature(min_temp=30, max_temp=40) as cpu:
assert cpu.value == 0.7
assert not cpu.is_active
assert len(w) == 1
assert w[0].category == ThresholdOutOfRange
assert cpu.temperature == 37.0
with CPUTemperature(min_temp=30, max_temp=40, threshold=35) as cpu:
assert cpu.is_active
def test_loadaverage_bad_init(mock_factory):
with patch('io.open') as m:
foo = m.return_value.__enter__
foo.side_effect = file_not_found
with pytest.raises(IOError):
with LoadAverage('') as load:
load.value
with pytest.raises(IOError):
with LoadAverage('badfile') as load:
load.value
foo.return_value.readline.return_value = '0.09 0.10 0.09 1/292 20758'
with pytest.raises(ValueError):
LoadAverage(min_load_average=1)
with pytest.raises(ValueError):
LoadAverage(min_load_average=0.5, max_load_average=0.5)
with pytest.raises(ValueError):
LoadAverage(min_load_average=1, max_load_average=0.5)
with pytest.raises(ValueError):
LoadAverage(minutes=0)
with pytest.raises(ValueError):
LoadAverage(minutes=10)
def test_loadaverage(mock_factory):
with patch('io.open') as m:
foo = m.return_value.__enter__
foo.return_value.readline.return_value = '0.09 0.10 0.09 1/292 20758'
with LoadAverage() as la:
assert repr(la).startswith('<gpiozero.LoadAverage object')
assert la.min_load_average == 0
assert la.max_load_average == 1
assert la.threshold == 0.8
assert la.load_average == 0.1
assert la.value == 0.1
assert not la.is_active
foo.return_value.readline.return_value = '1.72 1.40 1.31 3/457 23102'
with LoadAverage(min_load_average=0.5, max_load_average=2,
threshold=1, minutes=5) as la:
assert la.min_load_average == 0.5
assert la.max_load_average == 2
assert la.threshold == 1
assert la.load_average == 1.4
assert la.value == 0.6
assert la.is_active
with warnings.catch_warnings(record=True) as w:
warnings.resetwarnings()
with LoadAverage(min_load_average=1, max_load_average=2,
threshold=0.8, minutes=5) as la:
assert len(w) == 1
assert w[0].category == ThresholdOutOfRange
assert la.load_average == 1.4
def test_diskusage_bad_init(mock_factory):
with pytest.raises(OSError):
DiskUsage(filesystem='badfilesystem')
def test_diskusage(mock_factory):
with patch('os.statvfs') as statvfs:
statvfs.return_value = statvfs_result((
4096, 4096, 100000, 48000, 48000, 0, 0, 0, 0, 255))
with DiskUsage() as disk:
assert repr(disk).startswith('<gpiozero.DiskUsage object')
assert disk.filesystem == '/'
assert disk.usage == 52.0
assert disk.is_active == False
assert disk.value == 0.52
with DiskUsage(threshold=50.0) as disk:
assert disk.is_active == True
with warnings.catch_warnings(record=True) as w:
warnings.resetwarnings()
with DiskUsage(threshold=125) as disk:
assert disk.threshold == 125
assert not disk.is_active
assert len(w) == 1
assert w[0].category == ThresholdOutOfRange
assert disk.usage == 52.0
| 42.857143 | 80 | 0.637805 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,542 | 0.206667 |
faffe937b8eebc8fe07359a13eb9bfdaff6bf554 | 506 | py | Python | ejercicio.py | inacayal/reservas | b74bd26eae3830ebc3329e43e50ebb86ca12ddb7 | [
"MIT"
] | null | null | null | ejercicio.py | inacayal/reservas | b74bd26eae3830ebc3329e43e50ebb86ca12ddb7 | [
"MIT"
] | 7 | 2020-06-12T22:19:36.000Z | 2022-02-26T11:42:58.000Z | ejercicio.py | inacayal/reservas | b74bd26eae3830ebc3329e43e50ebb86ca12ddb7 | [
"MIT"
] | null | null | null | def pedir_entero (mensaje,min,max):
numero = input(mensaje.format(min,max))
if type(numero)==int:
while numero <= min or numero>=max:
numero = input("el numero debe estar entre {:d} y {:d} ".format(min,max))
return numero
else:
return "debes introducir un entero"
valido = pedir_entero("Cual es tu numero favorito entre {:d} y {:d}? ",-25,25)
if type(valido)==int:
print("Has introducido un numero valido: {:d}".format(valido))
else :
print(valido) | 36.142857 | 85 | 0.630435 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 157 | 0.310277 |
4f002ef133ac9afe0b1bdd85f9600fa414911444 | 2,806 | py | Python | src/thirdparty/yaml/utils.py | third-party-dev/yaml | 545d63f350268028b6df2446f8e14284b0119ca3 | [
"MIT"
] | 1 | 2020-12-03T23:11:45.000Z | 2020-12-03T23:11:45.000Z | src/thirdparty/yaml/utils.py | third-party-dev/yaml | 545d63f350268028b6df2446f8e14284b0119ca3 | [
"MIT"
] | null | null | null | src/thirdparty/yaml/utils.py | third-party-dev/yaml | 545d63f350268028b6df2446f8e14284b0119ca3 | [
"MIT"
] | null | null | null | # from collections.abc import MutableMapping, MutableSequence, MutableSet
from collections.abc import Sequence, Container # , Mapping, Set
from collections import OrderedDict
import contextlib
from ruamel.yaml.constructor import ConstructorError
from ruamel.yaml.nodes import MappingNode, SequenceNode
def _is_container(value):
# chenz: What about numbers and booleans?
if isinstance(value, str) or isinstance(value, bytes):
return False
if isinstance(value, Container):
return True
return False
def _is_sequence(value):
if isinstance(value, str):
return False
if isinstance(value, Sequence):
return True
return False
def _check_order(dst, src):
dst_is_ordered = isinstance(dst, OrderedDict)
src_is_ordered = isinstance(src, OrderedDict)
if dst_is_ordered and not src_is_ordered:
raise ConstructorError(
problem="expected an ordered dictionary, but found %r" % src
)
if not dst_is_ordered and src_is_ordered:
raise ConstructorError(
problem="expected an unordered dictionary, but found %r" % src
)
@contextlib.contextmanager
def _with_context(context_managers):
try:
context_manager = context_managers.pop(0)
except IndexError:
yield
else:
with context_manager:
yield _with_context(context_managers)
def _iter_mapping_nodes(node):
# verify node is a mapping
if not isinstance(node, MappingNode):
raise ConstructorError(
problem="expected a mapping node, but found %s" % node.id,
problem_mark=node.start_mark,
)
# iterate over mapping sub-nodes
for key_node, value_node in node.value:
yield key_node, value_node
def _iter_sequence_nodes(node):
# verify the node is a sequence
if not isinstance(node, SequenceNode):
raise ConstructorError(
problem="expected a sequence node, but found %s" % node.id,
problem_mark=node.start_mark,
)
# iterate over sequence sub-nodes
for subnode in node.value:
yield subnode
def _iter_pairs_nodes(node):
# iterate over sequence sub-nodes
for seq_subnode in _iter_sequence_nodes(node):
# get mapping sub-nodes
map_subnodes = list(_iter_mapping_nodes(seq_subnode))
# verify there is one mapping sub-node
if len(map_subnodes) != 1:
raise ConstructorError(
problem="expected a single mapping item,"
" but found %d items" % len(map_subnodes),
problem_mark=seq_subnode.start_mark,
)
# ###### ORDERING ISSUE #####
# extract key:value nodes
key_node, value_node = map_subnodes[0]
yield key_node, value_node
| 28.06 | 74 | 0.665716 | 0 | 0 | 1,625 | 0.579116 | 249 | 0.088738 | 0 | 0 | 627 | 0.22345 |
4f00e3c5d6d2a2a2f904f8badd0a0b8379b14bc2 | 2,482 | py | Python | ooobuild/dyn/rendering/repaint_result.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/rendering/repaint_result.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/rendering/repaint_result.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Const Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.rendering
from enum import IntEnum
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from com.sun.star.rendering import RepaintResult as RepaintResult
if hasattr(RepaintResult, '_constants') and isinstance(RepaintResult._constants, dict):
RepaintResult._constants['__ooo_ns__'] = 'com.sun.star.rendering'
RepaintResult._constants['__ooo_full_ns__'] = 'com.sun.star.rendering.RepaintResult'
RepaintResult._constants['__ooo_type_name__'] = 'const'
def build_enum():
global RepaintResultEnum
ls = [f for f in dir(RepaintResult) if not callable(getattr(RepaintResult, f)) and not f.startswith('__')]
_dict = {}
for name in ls:
_dict[name] = getattr(RepaintResult, name)
RepaintResultEnum = IntEnum('RepaintResultEnum', _dict)
build_enum()
else:
from ...lo.rendering.repaint_result import RepaintResult as RepaintResult
class RepaintResultEnum(IntEnum):
"""
Enum of Const Class RepaintResult
These constants specify the result of the XCachedPrimitive render operation.
**since**
OOo 2.0
"""
REDRAWN = RepaintResult.REDRAWN
"""
Repaint succeeded, primitive has been exactly reproduced.
"""
DRAFTED = RepaintResult.DRAFTED
"""
Repaint succeeded, primitive has been reproduced in preview quality.
"""
FAILED = RepaintResult.FAILED
"""
Repaint failed altogether.
"""
__all__ = ['RepaintResult', 'RepaintResultEnum']
| 35.971014 | 114 | 0.695407 | 609 | 0.245367 | 0 | 0 | 0 | 0 | 0 | 0 | 1,328 | 0.535052 |
4f0149cb740276e036ceef0f9256600992cdaaba | 338 | py | Python | timetest.py | schaibo/gpio- | 67843aa3869662ad87e9c23914bc0c1ff9f4e2bb | [
"Apache-2.0"
] | null | null | null | timetest.py | schaibo/gpio- | 67843aa3869662ad87e9c23914bc0c1ff9f4e2bb | [
"Apache-2.0"
] | null | null | null | timetest.py | schaibo/gpio- | 67843aa3869662ad87e9c23914bc0c1ff9f4e2bb | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 26 16:01:59 2014
@author: pi
"""
import time
def delay(i):
k=0
for j in range(i):
k+=1
n=5000
j=0
a=time.time()
i=1
c=time.time()
d=c-a
print d
a=time.time()
for i in range(n):
j+=1
c=time.time()
d=c-a
print d
a=time.time()
delay(n)
c=time.time()
d=c-a
print d
| 9.941176 | 35 | 0.56213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.236686 |
4f01cb78d8c650937562fd3ca942f32e2ccc0647 | 1,716 | py | Python | pySIC/reader.py | edoaxyz/image_to_hOCR | 20b96fd758e9994ce94b311fbc69b413d635004e | [
"MIT"
] | null | null | null | pySIC/reader.py | edoaxyz/image_to_hOCR | 20b96fd758e9994ce94b311fbc69b413d635004e | [
"MIT"
] | null | null | null | pySIC/reader.py | edoaxyz/image_to_hOCR | 20b96fd758e9994ce94b311fbc69b413d635004e | [
"MIT"
] | null | null | null | ############################################################
import pytesseract
import os
from reportlab.lib.pagesizes import letter
from reportlab.pdfgen import canvas
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
#############################################################
R_percentage = 0
if "TESSERACT_EXEC" in os.environ:
pytesseract.pytesseract.tesseract_cmd = os.environ["TESSERACT_EXEC"]
#############################################################
def real(t):
return "".join([c for c in t if c.isalnum()])
def Read(abs_folder_in, abs_folder_out, abs_folder_out_pdf, lang, debug, name):
global R_percentage
app_folder = os.path.dirname(__file__)
s = sorted([int(i[:-4]) for i in os.listdir(abs_folder_in) if i.endswith(".jpg")])
images_list = [os.path.join(abs_folder_in, str(i) + ".jpg") for i in s]
for c, img_name in enumerate(images_list, 0):
if debug: print("Creating hOCR")
pytesseract.pytesseract.run_tesseract(img_name, os.path.join(abs_folder_in, str(s[c])), lang = lang, extension = "", config = "hocr")
if debug: print("Done ", c+1, " of ", len(images_list))
R_percentage += 1 / len(images_list)
if debug: print("Creating Pdf from Hocr and images")
os.system("hocr-pdf --savefile " + os.path.join(abs_folder_out_pdf, name + ".pdf" ) + " " + abs_folder_in)
if debug: print("Moving the hocr to their folder")
for i, n in zip(images_list, s):
os.rename(i[:-4]+".hocr", os.path.join(abs_folder_out, str(n)+".hocr"))
R_percentage = 0
def get_percentage():
global R_percentage
return R_percentage
###############################################################
| 49.028571 | 141 | 0.593823 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 440 | 0.25641 |
4f029f9744d9cffb7aa60df64e098f556aa1867c | 12,921 | py | Python | sciflo/event/pdict.py | hysds/sciflo | f706288405c8eee59a2f883bab3dcb5229615367 | [
"Apache-2.0"
] | null | null | null | sciflo/event/pdict.py | hysds/sciflo | f706288405c8eee59a2f883bab3dcb5229615367 | [
"Apache-2.0"
] | null | null | null | sciflo/event/pdict.py | hysds/sciflo | f706288405c8eee59a2f883bab3dcb5229615367 | [
"Apache-2.0"
] | 1 | 2019-02-07T01:08:34.000Z | 2019-02-07T01:08:34.000Z | """
pdict.py -- Implement a (remote) persistent dictionary that is accessed over
a socket. The backend dictionary could be dbshelve, BSDDB, or
even a relational table of (key, blob).
*** The purpose is to have a bullet-proof, separate-process, persistent dictionary
that is very fast, globally shared by many processes, and can't be harmed by
process segfaults.
***
"""
from twisted.internet import reactor
from twisted.internet.protocol import ServerFactory
from twisted.protocols.basic import LineReceiver
from twisted.python import log
import os
import sys
import socket
from bsddb3 import dbshelve
import pickle as pickle
try:
from UserDict import DictMixin
except ImportError:
class DictMixin:
pass
# retrieve work unit cache dir and file from user configuration
from sciflo.utils import ScifloConfigParser, validateDirectory
scp = ScifloConfigParser()
WorkUnitCacheDir = scp.getParameter("cacheHome")
WorkUnitCacheFile = scp.getParameter("cacheDb")
WorkUnitCachePort = int(scp.getParameter("cachePort"))
WorkUnitCache = os.path.join(WorkUnitCacheDir, WorkUnitCacheFile)
WorkUnitCacheLog = os.path.join(sys.prefix, 'log', '%s.log' %
os.path.splitext(WorkUnitCacheFile)[0])
DEBUG = False
# Registry of named (shareable) dictionaries
NamedDicts = {'WorkUnitCache':
{'dbFile': WorkUnitCache, 'port': WorkUnitCachePort,
'logFile': WorkUnitCacheLog},
'EventStore':
{'dbFile': '/tmp/EventStore/eventStore.db', 'port': 8002,
'logFile': 'eventStoreServer.log'},
'Test':
{'dbFile': None, 'port': 8009, 'logFile': '/tmp/Test.log'},
}
# String constants for client/server protocol across wire
NNL = '\r\n' # network newline
MsgPrefix = '#!#'
OkMsg = MsgPrefix + 'ok'
NoneMsg = MsgPrefix + 'None'
ErrorMsg = MsgPrefix + 'error: '
EndMsg = MsgPrefix + 'end'
EndToken = EndMsg + NNL
_TestDict = {'foo': 'bar', 'bush': 'sucks', 'fool': 'no money'}
class PersistentDictProtocol(LineReceiver):
"""A twisted server to allow access to a persistent dictionary (e.g. bsddb)
from multiple remote clients. The line-oriented protocol accepts the commands:
- ping<NNL> : see if the server is up on a given port)
- get<NNL>key<NNL> : get the string value of a string key)
- delete<NNL>key<NNL> : delete a key/value pair from the dictionary
- insert<NNL>key<NNL>val<EndMsg><NNL> : insert a multi-line string value under that key)
- length<NNL> : return number of keys in dict (**CURRENTLY BROKEN, returns zero**)
Notes:
- Keys cannot contain network newlines, NNL = '\r\n'.
- Values can be multi-line strings (python pickles or XML).
- Newlines are used to separate parts of the commands so that the cmd can be
parsed using LineReceiver
"""
def __init__(self, state='start'):
self.state = state # state of FSM = 'start', 'get', 'delete', 'insert', or 'getval'
self.key = None # key to insert value under
self.val = None # value to insert
def connectionMade(self):
if DEBUG:
print('PersistentDict: connection made.')
def lineReceived(self, line):
"""Simple finite state machine to process the four possible commands.
"""
dic = self.factory.dict # get dictionary opened in factory init()
if DEBUG:
print(('**', line, '**'))
if self.state == 'start':
if line == 'ping':
print('ping')
self.sendline(OkMsg)
elif line == 'length':
print('length')
self.sendline('1')
# self.sendline( str(len(dic)) )
elif line in ('get', 'delete', 'insert'):
if DEBUG:
print(('Change state to', line))
self.state = line
elif self.state == 'get':
print(('get', line))
val = dic.get(line, NoneMsg)
self.sendline(val + EndMsg)
self.state = 'start'
elif self.state == 'delete':
print(('delete', line))
if line in dic:
del dic[line]
self.sendline(OkMsg)
self.state = 'start'
elif self.state == 'insert':
print(('insert', line))
self.key = line
self.val = ''
self.state = 'getval'
elif self.state == 'getval':
if DEBUG:
print(('Adding to val:', line))
self.val += line
if line.endswith(EndMsg):
val = self.val[:-len(EndMsg)]
dic[self.key] = val
if DEBUG:
print('Inserted:')
if DEBUG:
print(val)
self.sendline(OkMsg)
self.state = 'start'
def sendline(self, line):
self.transport.write(line + NNL)
class PersistentDictFactoryException(RuntimeError):
pass
class PersistentDictFactory(ServerFactory):
protocol = PersistentDictProtocol
def __init__(self, dictName, dictRegistry=NamedDicts):
"""Set up for the protocol by opening the named persistent dictionary.
"""
self.dictName = dictName
try:
self.dbFile = dictRegistry[dictName]['dbFile']
self.port = dictRegistry[dictName]['port']
if self.dbFile:
dbHome = os.path.split(self.dbFile)[0]
if not os.path.exists(dbHome):
os.makedirs(dbHome, 0o777)
self.dbHome = dbHome
logFile = dictRegistry[dictName]['logFile']
if not logFile.startswith('/'):
logFile = os.path.join(dbHome, logFile)
self.logFile = logFile
except:
raise PersistentDictFactoryException(
'Error, no dict of that name: %s' % dictName)
validateDirectory(os.path.dirname(self.logFile))
log.startLogging(open(self.logFile, 'w'))
if dictName == 'Test':
self.dict = _TestDict
else:
self.dict = dbshelve.open(self.dbFile)
os.chmod(self.dbFile, 0o666)
class PersistentDictClientException(RuntimeError):
pass
class PersistentDictClient:
"""A simple client to call a persistent dictionary (e.g. bsddb) across a socket.
The client only has four useful methods: ping, get, delete, insert.
"""
def __init__(self, dictName, dictRegistry=NamedDicts, pickleVals=False, timeout=3.0, bufsize=4096):
self.dictName = dictName
self.pickleVals = pickleVals
self.timeout = timeout
self.bufsize = bufsize
try:
self.port = dictRegistry[dictName]['port']
except:
raise PersistentDictClientException(
'Error, no dict of that name: %s' % dictName)
self.soc = self._openLocalSocket(self.port)
if not self.ping():
raise PersistentDictClientException(
'Error, server for %s on port %s does not return ping' % (dictName, self.port))
def close(self):
self.soc.close()
if DEBUG:
print(('PersistentDictClient: Closed socket connection to dictName, port: %s, %d' % (
self.dictName, self.port)))
def ping(self):
"""Ping server to ensure it's alive."""
try:
return self._sendCmd('ping')
except:
return False
def get(self, key, default=None):
"""Get value of a string key, or default value if missing."""
soc = self.soc
cmd = 'get' + NNL + key + NNL
try:
soc.sendall(cmd)
except socket.error as msg:
soc.close()
raise PersistentDictClientException(
'Error, cannot send to socket: %s' % cmd)
data = ''
firstTry = True
while not data.endswith(EndToken):
try:
data += soc.recv(self.bufsize)
if DEBUG:
print(('Got data:', data))
except socket.error as msg:
soc.close()
raise PersistentDictClientException(
'Error, no data received from socket, sent: %s' % cmd)
if data.startswith(NoneMsg) or (firstTry and len(data) == 0):
return default
firstTry = False
data = data[:-len(EndToken)]
if self.pickleVals:
return pickle.loads(data)
else:
return data
def delete(self, key):
"""Delete a key and its value from persistent dict."""
cmd = 'delete' + NNL + key + NNL
try:
return self._sendCmd(cmd)
except:
return False
def insert(self, key, val):
"""Insert or change the value of a key."""
if self.pickleVals:
val = pickle.dumps(val)
cmd = 'insert' + NNL + key + NNL + val + EndToken
try:
return self._sendCmd(cmd)
except:
return False
def length(self):
"""Return number of keys in dict."""
try:
return int(self._sendCmd('length'))
except:
return 0
def _openLocalSocket(self, port):
"""Open a port on localhost and send a ping command to ensure server is alive."""
try:
soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
soc.connect(('127.0.0.1', port))
soc.settimeout(self.timeout)
except socket.error as e:
soc.close()
print(
('PersistentDictClient: Error, cannot connect socket to local port: %s' % port))
raise e
return soc
def _sendCmd(self, cmd):
"""Send a command and check for returned 'ok' message."""
soc = self.soc
if cmd[-2:] != NNL:
cmd += NNL
try:
soc.sendall(cmd)
except socket.error as msg:
soc.close()
raise RuntimeError(
'PersistentDictClient: Error, cannot send to socket: %s' % cmd)
try:
data = soc.recv(self.bufsize)
except socket.error as e:
soc.close()
print(
('PersistentDictClient: Error, no data received from socket, sent: %s' % cmd))
raise e
data = data[-len(NNL):]
if data == OkMsg:
data = True
return data
class PersistentDictException(RuntimeError):
pass
class PersistentDict(DictMixin):
"""Presents the usual dict interface, accessing a *named*, shared, persistent dictionary,
and hides the (socket) client and (twisted) server classes from view.
"""
def __init__(self, dictName, pickleVals=False):
self.dictName = dictName
self.db = None
self.db = PersistentDictClient(dictName, pickleVals=pickleVals)
def __del__(self):
if self.db:
self.db.close()
def __getattr__(self, name):
"""Many methods we can just pass through to the DB object."""
return getattr(self.db, name)
# dictionary access methods
def __len__(self):
return self.db.length()
def __getitem__(self, key):
return self.db.get(key)
def __setitem__(self, key, val):
self.db.insert(key, val)
def __delitem__(self, key):
self.db.delete(key)
def keys(self, txn=None):
raise PersistentDictException(
'Error, class does not implement keys() method.')
def items(self, txn=None):
raise PersistentDictException(
'Error, class does not implement items() method.')
def values(self, txn=None):
raise PersistentDictException(
'Error, class does not implement values() method.')
def startPersistentDictServer():
"""This code belongs in a twisted tac file (at toplevel)."""
from .pdict import NamedDicts, PersistentDictFactory
from twisted.application import internet, service
namedDict = "EventStore"
port = NamedDicts[namedDict]['port']
application = service.Application("pdict")
factory = PersistentDictFactory(namedDict)
pdictService = internet.TCPServer(port, factory)
pdictService.setServiceParent(service.IServiceCollection(application))
def testClientSimple():
dic = PersistentDict("Test")
print((dic['foo']))
del dic['foo']
dic['you'] = 'tube'
print((dic['you']))
del dic
def testClient():
dic = PersistentDict("EventStore")
print((len(dic)))
print((dic['foo']))
dic['foo'] = 'bar'
dic['bush'] = 'sucks'
dic['fool'] = 'no money'
print((dic['foo']))
del dic['foo']
dic['you'] = 'tube'
print((dic['you']))
print((len(dic)))
def main():
testClient()
if __name__ == '__main__':
main()
| 32.711392 | 103 | 0.582153 | 9,854 | 0.762634 | 0 | 0 | 0 | 0 | 0 | 0 | 3,947 | 0.305472 |
4f03405a3316d902d0f6702b629b6f2aae600c70 | 633 | py | Python | tests/functional_tests.py | Ecotrust/OPCDB | f639408c9cfdfa392a9233042f40e116c703fff1 | [
"MIT"
] | null | null | null | tests/functional_tests.py | Ecotrust/OPCDB | f639408c9cfdfa392a9233042f40e116c703fff1 | [
"MIT"
] | 7 | 2021-03-19T02:36:29.000Z | 2022-01-21T23:51:38.000Z | tests/functional_tests.py | Ecotrust/OPCDB | f639408c9cfdfa392a9233042f40e116c703fff1 | [
"MIT"
] | null | null | null | from selenium import webdriver
import unittest
class FirefoxTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.quit()
def test_page(self): #test method names must start with 'test'
self.browser.get('http://localhost:8000')
self.assertIn('Database', self.browser.title)
# self.fail('Finish the test!')
if __name__ == '__main__':
unittest.main() #call unittest.main(), which launches
# the unittest test runner, which will automatically find test classes and
# methods in the file and run them
| 31.65 | 80 | 0.669826 | 373 | 0.589258 | 0 | 0 | 0 | 0 | 0 | 0 | 264 | 0.417062 |
4f0410491333c2d185cc0abeb6504f625457135d | 204 | py | Python | python-stuff/performance_timing.py | mnky9800n/python-data-analysis-tools | 298dc6d9271c72f61285c19ff0112d755fb0c83c | [
"MIT"
] | 2 | 2020-09-11T12:50:15.000Z | 2021-05-12T07:10:04.000Z | python-stuff/performance_timing.py | mnky9800n/python-data-analysis-tools | 298dc6d9271c72f61285c19ff0112d755fb0c83c | [
"MIT"
] | null | null | null | python-stuff/performance_timing.py | mnky9800n/python-data-analysis-tools | 298dc6d9271c72f61285c19ff0112d755fb0c83c | [
"MIT"
] | 3 | 2020-03-06T15:35:46.000Z | 2021-12-11T07:36:08.000Z | import timeit
# bro, probably could just use %timeit if
# you are on ipython. :-P
starttime = timeit.default_timer()
"""
your code here
"""
endtime = timeit.default_timer()
print(endtime - starttime)
| 14.571429 | 41 | 0.715686 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 88 | 0.431373 |
4f0442ce4e58fffb400b80c3f2ed1670944947b9 | 336 | py | Python | hortiradar/database/restart_workers.py | mctenthij/big-tu-top10 | d551f944aa364728d97bb2b672276a97f8019749 | [
"Apache-2.0",
"BSD-2-Clause"
] | 7 | 2019-04-21T15:25:29.000Z | 2021-11-07T23:20:17.000Z | hortiradar/database/restart_workers.py | mctenthij/big-tu-top10 | d551f944aa364728d97bb2b672276a97f8019749 | [
"Apache-2.0",
"BSD-2-Clause"
] | null | null | null | hortiradar/database/restart_workers.py | mctenthij/big-tu-top10 | d551f944aa364728d97bb2b672276a97f8019749 | [
"Apache-2.0",
"BSD-2-Clause"
] | 2 | 2019-04-21T15:25:30.000Z | 2022-01-01T20:49:36.000Z | import os
import re
from subprocess import call
from time import sleep
supervisor_dir = "/etc/supervisor/conf.d/"
_, _, files = next(os.walk(supervisor_dir))
for f in files:
m = re.match("(hortiradar-worker\d)\.conf", f)
if m:
worker = m.group(1)
call(["supervisorctl", "restart", worker])
sleep(60)
| 19.764706 | 50 | 0.642857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.232143 |
4f0474aa94a2859bafafd22b10610ff8ce47631c | 687 | py | Python | service/main.py | release-depot/rtls | e3e8af0a41def60a13e46bef1b276d5bdd215b8d | [
"MIT"
] | null | null | null | service/main.py | release-depot/rtls | e3e8af0a41def60a13e46bef1b276d5bdd215b8d | [
"MIT"
] | 4 | 2020-07-29T13:55:22.000Z | 2021-01-07T14:51:24.000Z | service/main.py | release-depot/rtls | e3e8af0a41def60a13e46bef1b276d5bdd215b8d | [
"MIT"
] | 3 | 2017-11-15T18:51:12.000Z | 2020-12-08T18:37:39.000Z | #!/usr/bin/env python
""" Main runner for the service """
from flask import Flask
from flask_restful import Api
from resources.rtls import Rtls
def create_app():
app = Flask(__name__)
api = Api(app)
api.add_resource(Rtls, '/rtls/',
'/rtls/<string:package>/<string:change_id>',
resource_class_kwargs={'logger': app.logger})
return app
def run():
from logging import INFO
app = create_app()
app.logger.setLevel(INFO)
app.run(port=8080)
def run_debug():
from logging import DEBUG
app = create_app()
app.logger.setLevel(DEBUG)
app.run(port=8080)
if __name__ == '__main__':
run_debug()
| 18.567568 | 66 | 0.631732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 125 | 0.181951 |
4f0510a9836b3985958ce8cc21d77fb1c29fbe31 | 3,205 | py | Python | ykdl/extractors/mgtv.py | 163ui/ykdl | aea70f47cbe06cc500120c6b89c16cd0905ca19f | [
"MIT"
] | 3 | 2018-09-04T09:33:51.000Z | 2021-11-01T09:03:27.000Z | ykdl/extractors/mgtv.py | 163ui/ykdl | aea70f47cbe06cc500120c6b89c16cd0905ca19f | [
"MIT"
] | null | null | null | ykdl/extractors/mgtv.py | 163ui/ykdl | aea70f47cbe06cc500120c6b89c16cd0905ca19f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from ykdl.util.html import default_proxy_handler, get_content
from ykdl.util.match import match1, matchall
from ykdl.extractor import VideoExtractor
from ykdl.videoinfo import VideoInfo
from ykdl.compact import install_opener, build_opener, HTTPCookieProcessor
import json
import sys
import base64
import uuid
import time
py3 = sys.version_info[0] == 3
if py3:
maketrans = bytes.maketrans
bytearray2str = bytearray.decode
else:
from string import maketrans
bytearray2str = str
encode_translation = maketrans(b'+/=', b'_~-')
def generate_did_tk2():
did = str(uuid.uuid4())
s = 'pno=1000|ver=0.3.0001|did={}|clit={}'.format(did, int(time.time()))
if not isinstance(s, bytes):
s = s.encode()
e = bytearray(base64.b64encode(s).translate(encode_translation))
e.reverse()
return did, bytearray2str(e)
class Hunantv(VideoExtractor):
name = u"芒果TV (HunanTV)"
supported_stream_profile = [ u'蓝光', u'超清', u'高清', u'标清' ]
supported_stream_types = [ 'BD', 'TD', 'HD', 'SD' ]
profile_2_types = { u'蓝光': 'BD', u'超清': 'TD', u'高清': 'HD', u'标清': 'SD' }
def prepare(self):
handlers = [HTTPCookieProcessor()]
if default_proxy_handler:
handlers += default_proxy_handler
install_opener(build_opener(*handlers))
info = VideoInfo(self.name)
if self.url and not self.vid:
self.vid = match1(self.url, 'https?://www.mgtv.com/b/\d+/(\d+).html')
if self.vid is None:
html = get_content(self.url)
self.vid = match1(html, 'vid=(\d+)', 'vid=\"(\d+)', 'vid: (\d+)')
did, tk2 = generate_did_tk2()
api_info_url = 'https://pcweb.api.mgtv.com/player/video?video_id={}&did={}&tk2={}'.format(self.vid, did, tk2)
meta = json.loads(get_content(api_info_url))
assert meta['code'] == 200, '[failed] code: {}, msg: {}'.format(meta['code'], meta['msg'])
assert meta['data'], '[Failed] Video info not found.'
pm2 = meta['data']['atc']['pm2']
info.title = meta['data']['info']['title']
api_source_url = 'https://pcweb.api.mgtv.com/player/getSource?video_id={}&did={}&pm2={}&tk2={}'.format(self.vid, did, pm2, tk2)
meta = json.loads(get_content(api_source_url))
assert meta['code'] == 200, '[failed] code: {}, msg: {}'.format(meta['code'], meta['msg'])
assert meta['data'], '[Failed] Video source not found.'
data = meta['data']
domain = data['stream_domain'][0]
for lstream in data['stream']:
if lstream['url']:
url = json.loads(get_content(domain + lstream['url']))['info']
info.streams[self.profile_2_types[lstream['name']]] = {'container': 'm3u8', 'video_profile': lstream['name'], 'src' : [url]}
info.stream_types.append(self.profile_2_types[lstream['name']])
info.stream_types= sorted(info.stream_types, key = self.supported_stream_types.index)
return info
def prepare_list(self):
html = get_content(self.url, headers={})
return matchall(html, ['"a-pic-play" href="([^"]+)"'])
site = Hunantv()
| 36.420455 | 140 | 0.613417 | 2,326 | 0.71768 | 0 | 0 | 0 | 0 | 0 | 0 | 772 | 0.238198 |