hexsha
stringlengths
40
40
size
int64
7
1.04M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
247
max_stars_repo_name
stringlengths
4
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
368k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
247
max_issues_repo_name
stringlengths
4
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
247
max_forks_repo_name
stringlengths
4
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
1
1.04M
avg_line_length
float64
1.77
618k
max_line_length
int64
1
1.02M
alphanum_fraction
float64
0
1
original_content
stringlengths
7
1.04M
filtered:remove_function_no_docstring
int64
-102
942k
filtered:remove_class_no_docstring
int64
-354
977k
filtered:remove_delete_markers
int64
0
60.1k
3499eeff8038172939193acef20b3fe68d410fc5
589
py
Python
Miscellaneous/GuessTheNo.py
spirulinax/PyHub
ae92c249224013f86c06b4d6cec3c6caa2e18c4d
[ "MIT" ]
1
2020-11-09T14:13:45.000Z
2020-11-09T14:13:45.000Z
Miscellaneous/GuessTheNo.py
spirulinax/PyHub
ae92c249224013f86c06b4d6cec3c6caa2e18c4d
[ "MIT" ]
null
null
null
Miscellaneous/GuessTheNo.py
spirulinax/PyHub
ae92c249224013f86c06b4d6cec3c6caa2e18c4d
[ "MIT" ]
null
null
null
# Guess The No n = 45 attempt = 0 NoOfGuess = 9 while NoOfGuess > 0: attempt += 1 print("---------------------------------") No = int(input("Enter a No:-")) if No == n: print("Congratulations You won!") print("Total Attempt Taken:-", attempt) break elif No < n: print("Given No is Smaller") else: print("Given No is Greater") NoOfGuess -= 1 print("---------------------------------") if NoOfGuess == 0: print("Game Over! The Number was ",n) else: print("No of Guesses left: ", NoOfGuess)
21.035714
48
0.47708
# Guess The No n = 45 attempt = 0 NoOfGuess = 9 while NoOfGuess > 0: attempt += 1 print("---------------------------------") No = int(input("Enter a No:-")) if No == n: print("Congratulations You won!") print("Total Attempt Taken:-", attempt) break elif No < n: print("Given No is Smaller") else: print("Given No is Greater") NoOfGuess -= 1 print("---------------------------------") if NoOfGuess == 0: print("Game Over! The Number was ",n) else: print("No of Guesses left: ", NoOfGuess)
0
0
0
8ac4088faa49f24247ab4e52803e3dbe165157c3
4,231
py
Python
examples/gui/gui_common.py
charbeljc/pymodbus
f0edef3071d500b7e3a987ee196c38bb79f59518
[ "W3C" ]
null
null
null
examples/gui/gui_common.py
charbeljc/pymodbus
f0edef3071d500b7e3a987ee196c38bb79f59518
[ "W3C" ]
null
null
null
examples/gui/gui_common.py
charbeljc/pymodbus
f0edef3071d500b7e3a987ee196c38bb79f59518
[ "W3C" ]
2
2021-05-02T06:37:33.000Z
2022-02-06T16:08:47.000Z
#!/usr/bin/env python # -------------------------------------------------------------------------- # # System # -------------------------------------------------------------------------- # import os import getpass import pickle from threading import Thread # -------------------------------------------------------------------------- # # SNMP Simulator # -------------------------------------------------------------------------- # from twisted.internet import reactor from twisted.internet import error as twisted_error from pymodbus.server.async import ModbusServerFactory from pymodbus.datastore import ModbusServerContext,ModbusSlaveContext # -------------------------------------------------------------------------- # # Logging # -------------------------------------------------------------------------- # import logging log = logging.getLogger("pymodbus") # -------------------------------------------------------------------------- # # Application Error # -------------------------------------------------------------------------- # class ConfigurationException(Exception): """ Exception for configuration error """ pass # -------------------------------------------------------------------------- # # Extra Global Functions # -------------------------------------------------------------------------- # # These are extra helper functions that don't belong in a class # -------------------------------------------------------------------------- # def root_test(): """ Simple test to see if we are running as root """ return getpass.getuser() == "root" # -------------------------------------------------------------------------- # # Simulator Class # -------------------------------------------------------------------------- # class Simulator(object): """ Class used to parse configuration file and create and modbus datastore. The format of the configuration file is actually just a python pickle, which is a compressed memory dump from the scraper. """ def __init__(self, config): """ Trys to load a configuration file, lets the file not found exception fall through :param config: The pickled datastore """ try: self.file = open(config, "r") except Exception: raise ConfigurationException("File not found %s" % config) def _parse(self): """ Parses the config file and creates a server context """ try: handle = pickle.load(self.file) dsd = handle['di'] csd = handle['ci'] hsd = handle['hr'] isd = handle['ir'] except KeyError: raise ConfigurationException("Invalid Configuration") slave = ModbusSlaveContext(d=dsd, c=csd, h=hsd, i=isd) return ModbusServerContext(slaves=slave) def _simulator(self): """ Starts the snmp simulator """ ports = [502]+range(20000,25000) for port in ports: try: reactor.listenTCP(port, ModbusServerFactory(self._parse())) log.debug('listening on port %d' % port) return port except twisted_error.CannotListenError: pass def run(self): """ Used to run the simulator """ log.debug('simulator started') reactor.callWhenRunning(self._simulator) # -------------------------------------------------------------------------- # # Network reset thread # -------------------------------------------------------------------------- # # This is linux only, maybe I should make a base class that can be filled # in for linux(debian/redhat)/windows/nix # -------------------------------------------------------------------------- # class NetworkReset(Thread): """ This class is simply a daemon that is spun off at the end of the program to call the network restart function (an easy way to remove all the virtual interfaces) """ def __init__(self): """ Initialize a new network reset thread """ Thread.__init__(self) self.setDaemon(True) def run(self): """ Run the network reset """ os.system("/etc/init.d/networking restart")
34.398374
78
0.45143
#!/usr/bin/env python # -------------------------------------------------------------------------- # # System # -------------------------------------------------------------------------- # import os import getpass import pickle from threading import Thread # -------------------------------------------------------------------------- # # SNMP Simulator # -------------------------------------------------------------------------- # from twisted.internet import reactor from twisted.internet import error as twisted_error from pymodbus.server.async import ModbusServerFactory from pymodbus.datastore import ModbusServerContext,ModbusSlaveContext # -------------------------------------------------------------------------- # # Logging # -------------------------------------------------------------------------- # import logging log = logging.getLogger("pymodbus") # -------------------------------------------------------------------------- # # Application Error # -------------------------------------------------------------------------- # class ConfigurationException(Exception): """ Exception for configuration error """ pass # -------------------------------------------------------------------------- # # Extra Global Functions # -------------------------------------------------------------------------- # # These are extra helper functions that don't belong in a class # -------------------------------------------------------------------------- # def root_test(): """ Simple test to see if we are running as root """ return getpass.getuser() == "root" # -------------------------------------------------------------------------- # # Simulator Class # -------------------------------------------------------------------------- # class Simulator(object): """ Class used to parse configuration file and create and modbus datastore. The format of the configuration file is actually just a python pickle, which is a compressed memory dump from the scraper. """ def __init__(self, config): """ Trys to load a configuration file, lets the file not found exception fall through :param config: The pickled datastore """ try: self.file = open(config, "r") except Exception: raise ConfigurationException("File not found %s" % config) def _parse(self): """ Parses the config file and creates a server context """ try: handle = pickle.load(self.file) dsd = handle['di'] csd = handle['ci'] hsd = handle['hr'] isd = handle['ir'] except KeyError: raise ConfigurationException("Invalid Configuration") slave = ModbusSlaveContext(d=dsd, c=csd, h=hsd, i=isd) return ModbusServerContext(slaves=slave) def _simulator(self): """ Starts the snmp simulator """ ports = [502]+range(20000,25000) for port in ports: try: reactor.listenTCP(port, ModbusServerFactory(self._parse())) log.debug('listening on port %d' % port) return port except twisted_error.CannotListenError: pass def run(self): """ Used to run the simulator """ log.debug('simulator started') reactor.callWhenRunning(self._simulator) # -------------------------------------------------------------------------- # # Network reset thread # -------------------------------------------------------------------------- # # This is linux only, maybe I should make a base class that can be filled # in for linux(debian/redhat)/windows/nix # -------------------------------------------------------------------------- # class NetworkReset(Thread): """ This class is simply a daemon that is spun off at the end of the program to call the network restart function (an easy way to remove all the virtual interfaces) """ def __init__(self): """ Initialize a new network reset thread """ Thread.__init__(self) self.setDaemon(True) def run(self): """ Run the network reset """ os.system("/etc/init.d/networking restart")
0
0
0
99737a47300033ec91b5e8f9c0d5fbaea99d17dc
2,124
py
Python
abc_graphene_sqlalchemy/tests/test_fields.py
maquino1985/graphene-sqlalchemy
89e11fbbb7513bb07fb87ed805cad6dcd98d41c2
[ "MIT" ]
1
2021-03-18T10:35:33.000Z
2021-03-18T10:35:33.000Z
abc_graphene_sqlalchemy/tests/test_fields.py
maquino1985/graphene-sqlalchemy
89e11fbbb7513bb07fb87ed805cad6dcd98d41c2
[ "MIT" ]
null
null
null
abc_graphene_sqlalchemy/tests/test_fields.py
maquino1985/graphene-sqlalchemy
89e11fbbb7513bb07fb87ed805cad6dcd98d41c2
[ "MIT" ]
null
null
null
import logging import pytest from promise import Promise from graphene import InputObjectType from graphene.relay import Connection from sqlalchemy import inspect from .models import Editor as EditorModel from .models import Pet as PetModel from ..fields import SQLAlchemyConnectionField, SQLAlchemyFilteredConnectionField from ..types import SQLAlchemyObjectType log = logging.getLogger(__name__)
27.947368
111
0.750471
import logging import pytest from promise import Promise from graphene import InputObjectType from graphene.relay import Connection from sqlalchemy import inspect from .models import Editor as EditorModel from .models import Pet as PetModel from ..fields import SQLAlchemyConnectionField, SQLAlchemyFilteredConnectionField from ..types import SQLAlchemyObjectType log = logging.getLogger(__name__) class Pet(SQLAlchemyObjectType): class Meta: model = PetModel class Editor(SQLAlchemyObjectType): class Meta: model = EditorModel class PetConnection(Connection): class Meta: node = Pet def test_promise_connection_resolver(): def resolver(_obj, _info): return Promise.resolve([]) result = SQLAlchemyConnectionField.connection_resolver( resolver, PetConnection, Pet, None, None ) assert isinstance(result, Promise) def test_where_filter_added(): field = SQLAlchemyFilteredConnectionField(Pet) assert "where" in field.args assert issubclass(field.args['where']._type, InputObjectType) filter_fields = field.args['where']._type._meta.fields log.info(filter_fields) filter_column_names = [column.name for column in inspect(Pet._meta.model).columns.values()] + ['and', 'or'] for field_name, value in filter_fields.items(): assert field_name in filter_column_names def test_init_raises(): with pytest.raises(TypeError, match="Cannot create sort"): SQLAlchemyFilteredConnectionField(Connection) def test_sort_added_by_default(): field = SQLAlchemyConnectionField(PetConnection) assert "sort" in field.args assert field.args["sort"] == Pet.sort_argument() def test_sort_can_be_removed(): field = SQLAlchemyConnectionField(PetConnection, sort=None) assert "sort" not in field.args def test_custom_sort(): field = SQLAlchemyConnectionField(PetConnection, sort=Editor.sort_argument()) assert field.args["sort"] == Editor.sort_argument() def test_init_raises(): with pytest.raises(TypeError, match="Cannot create sort"): SQLAlchemyConnectionField(Connection)
1,328
156
229
d434cf747e0d00bccee8b50a8bf0bf43a33096bb
155
py
Python
module2/Bot/test.py
2B5/ia-3B5
40a54eb3c6b3d4f536ca1e93b317e2ea9c9a327e
[ "MIT" ]
null
null
null
module2/Bot/test.py
2B5/ia-3B5
40a54eb3c6b3d4f536ca1e93b317e2ea9c9a327e
[ "MIT" ]
2
2017-01-17T17:25:15.000Z
2021-02-16T23:08:25.000Z
module2/Bot/test.py
2B5/ia-3B5
40a54eb3c6b3d4f536ca1e93b317e2ea9c9a327e
[ "MIT" ]
2
2016-12-06T08:30:38.000Z
2016-12-22T01:06:27.000Z
import aiml kernel = aiml.Kernel() kernel.learn("startup.xml") kernel.respond("load aiml") while True: print(kernel.respond(raw_input("Message >>")))
19.375
50
0.716129
import aiml kernel = aiml.Kernel() kernel.learn("startup.xml") kernel.respond("load aiml") while True: print(kernel.respond(raw_input("Message >>")))
0
0
0
a3dfba0ecffb4a8e77fef62ce979de407e8dc97d
1,522
py
Python
semantic_segmentation/src/models/segformer.py
no-name-xiaosheng/PaddleViT
50226a3be5095b3727d3c62d2eab23ef1e9612ec
[ "Apache-2.0" ]
2
2021-11-23T02:01:52.000Z
2021-11-23T02:02:03.000Z
semantic_segmentation/src/models/segformer.py
Dongsheng-Bi/PaddleViT
c90a6c8dc3787e69cef3a37b9a260bd59eeff1f7
[ "Apache-2.0" ]
null
null
null
semantic_segmentation/src/models/segformer.py
Dongsheng-Bi/PaddleViT
c90a6c8dc3787e69cef3a37b9a260bd59eeff1f7
[ "Apache-2.0" ]
null
null
null
import paddle.nn as nn from .backbones.mix_transformer import MixVisionTransformer from .decoders.segformer_head import SegformerHead class Segformer(nn.Layer): """Segformer model implementation """
40.052632
63
0.676741
import paddle.nn as nn from .backbones.mix_transformer import MixVisionTransformer from .decoders.segformer_head import SegformerHead class Segformer(nn.Layer): """Segformer model implementation """ def __init__(self, config): super(Segformer, self).__init__() self.backbone = MixVisionTransformer( in_channels=config.MODEL.TRANS.IN_CHANNELS, embed_dims=config.MODEL.TRANS.EMBED_DIM, num_stages=config.MODEL.TRANS.NUM_STAGES, num_layers=config.MODEL.TRANS.NUM_LAYERS, num_heads=config.MODEL.TRANS.NUM_HEADS, patch_sizes=config.MODEL.TRANS.PATCH_SIZE, strides=config.MODEL.TRANS.STRIDES, sr_ratios=config.MODEL.TRANS.SR_RATIOS, out_indices=config.MODEL.ENCODER.OUT_INDICES, mlp_ratio=config.MODEL.TRANS.MLP_RATIO, qkv_bias=config.MODEL.TRANS.QKV_BIAS, drop_rate=config.MODEL.DROPOUT, attn_drop_rate=config.MODEL.ATTENTION_DROPOUT, drop_path_rate=config.MODEL.DROP_PATH, pretrained=config.MODEL.PRETRAINED) self.decode_head = SegformerHead( in_channels=config.MODEL.SEGFORMER.IN_CHANNELS, channels=config.MODEL.SEGFORMER.CHANNELS, num_classes=config.DATA.NUM_CLASSES, align_corners=config.MODEL.SEGFORMER.ALIGN_CORNERS) def forward(self, inputs): features = self.backbone(inputs) out = self.decode_head(features) return out
1,255
0
53
88d0e040eb0d18aebce1104ffd1bde81bddf0d0b
8,176
py
Python
siuba/siu/dispatchers.py
sethvargo/siuba
90172e7888b93eb4a42187a5aea947dbcbbc05ad
[ "MIT" ]
null
null
null
siuba/siu/dispatchers.py
sethvargo/siuba
90172e7888b93eb4a42187a5aea947dbcbbc05ad
[ "MIT" ]
null
null
null
siuba/siu/dispatchers.py
sethvargo/siuba
90172e7888b93eb4a42187a5aea947dbcbbc05ad
[ "MIT" ]
null
null
null
# symbolic dispatch wrapper --------------------------------------------------- from functools import singledispatch, update_wrapper, wraps import inspect from .calls import Call, FuncArg, MetaArg, Lazy from .symbolic import Symbolic, create_sym_call, strip_symbolic def symbolic_dispatch(f = None, cls = object): """Return a generic dispatch function with symbolic data implementations. The function dispatches (Call or Symbolic) -> FuncArg. Parameters ---------- cls : A class to dispatch on. f : A function to call if no classes match while dispatching. Examples -------- Here is an example of running separate add functions on integers and strings. >>> @symbolic_dispatch(cls = int) ... def add1(x): return x + 1 >>> @add1.register(str) ... def _add1_str(x): return int(x) + 1 >>> add1(1) 2 >>> add1("1") 2 Note that passing a symbolic causes it to return a symbolic, so you can continue creating expressions. >>> from siuba.siu import _ >>> type(add1(_.a.b) + _.c.d) <class 'siuba.siu.symbolic.Symbolic'> symbolic dispatch raises a NotImplementedError by default if it no function ``f`` is passed. However, you can override the default as follows: >>> @symbolic_dispatch ... def my_func(x): raise NotImplementedError("some error message") """ if f is None: return lambda f: symbolic_dispatch(f, cls) # TODO: don't use singledispatch if it has already been done dispatch_func = singledispatch(f) if cls is not object: dispatch_func.register(cls, f) dispatch_func.register(object, _dispatch_not_impl(dispatch_func.__name__)) @dispatch_func.register(Symbolic) @dispatch_func.register(Call) return dispatch_func # Verb dispatch ============================================================== def pipe_no_args(f): """Register a concrete function that handles when a verb received no arguments.""" @f.register(NoArgs) return f def register_pipe(f, cls): """Register a concrete function that returns a Pipeable when called.""" @f.register(cls) return f def register_pipe_call(f): """Register a concrete function that .""" @f.register(Call) return f # option: no args, custom dispatch (e.g. register NoArgs) # strips symbols def verb_dispatch(cls, f = None): """Wrap singledispatch. Making sure to keep its attributes on the wrapper. This wrapper has three jobs: 1. strip symbols off of calls 2. pass NoArgs instance for calls like some_func(), so dispatcher can handle 3. return a Pipeable when the first arg of a call is a symbol Parameters ---------- cls : A class to dispatch on. f : A function to call if no classes match while dispatching. """ # classic way of allowing args to a decorator if f is None: return lambda f: verb_dispatch(cls, f) # initially registers func for object, so need to change to pd.DataFrame dispatch_func = singledispatch(f) if isinstance(cls, tuple): for c in cls: dispatch_func.register(c, f) else: dispatch_func.register(cls, f) # then, set the default object dispatcher to create a pipe register_pipe(dispatch_func, object) # register dispatcher for Call, and NoArgs register_pipe_call(dispatch_func) pipe_no_args(dispatch_func) @wraps(dispatch_func) return wrapper # TODO: deprecate / remove singledispatch2 singledispatch2 = verb_dispatch # Pipe ======================================================================== class Pipeable: """Enable function composition through the right bitshift (>>) operator. Parameters ---------- f : A function to be called. calls : sequence, optional A list-like of functions to be called, with each result chained into the next. Examples -------- >>> f = lambda x: x + 1 Eager evaluation: >>> 1 >> Pipeable(f) 2 Defer to a pipe: >>> p = Pipeable(f) >> Pipeable(f) >>> 1 >> p 3 >>> p_undo = p >> (lambda x: x - 3) >>> 1 >> p_undo 0 >>> from siuba.siu import _ >>> p_undo_sym = p >> (_ - 3) >>> 1 >> p_undo_sym 0 """ def __rshift__(self, x) -> "Pipeable": """Defer evaluation when pipe is on the left (lazy piping).""" if isinstance(x, Pipeable): return Pipeable(calls = self.calls + x.calls) elif isinstance(x, (Symbolic, Call)): call = strip_symbolic(x) return Pipeable(calls = self.calls + [call]) elif callable(x): return Pipeable(calls = self.calls + [x]) raise Exception() def __rrshift__(self, x): """Potentially evaluate result when pipe is on the right (eager piping). This function handles two cases: * callable >> pipe -> pipe * otherewise, evaluate the pipe """ if isinstance(x, (Symbolic, Call)): call = strip_symbolic(x) return Pipeable(calls = [call] + self.calls) elif callable(x): return Pipeable(calls = [x] + self.calls) return self(x) def __call__(self, x): """Evaluate a pipe and return the result. Parameters ---------- x : An object to be passed into the first function in the pipe. """ res = x for f in self.calls: res = f(res) return res def create_pipe_call(obj, *args, **kwargs) -> Pipeable: """Return a Call of a function on its args and kwargs, wrapped in a Pipeable.""" first, *rest = args return Pipeable(Call( "__call__", strip_symbolic(obj), strip_symbolic(first), *(Lazy(strip_symbolic(x)) for x in rest), **{k: Lazy(strip_symbolic(v)) for k,v in kwargs.items()} ))
29.516245
95
0.597725
# symbolic dispatch wrapper --------------------------------------------------- from functools import singledispatch, update_wrapper, wraps import inspect from .calls import Call, FuncArg, MetaArg, Lazy from .symbolic import Symbolic, create_sym_call, strip_symbolic def _dispatch_not_impl(func_name): def f(x, *args, **kwargs): raise TypeError("singledispatch function {func_name} not implemented for type {type}" .format(func_name = func_name, type = type(x)) ) return f def symbolic_dispatch(f = None, cls = object): """Return a generic dispatch function with symbolic data implementations. The function dispatches (Call or Symbolic) -> FuncArg. Parameters ---------- cls : A class to dispatch on. f : A function to call if no classes match while dispatching. Examples -------- Here is an example of running separate add functions on integers and strings. >>> @symbolic_dispatch(cls = int) ... def add1(x): return x + 1 >>> @add1.register(str) ... def _add1_str(x): return int(x) + 1 >>> add1(1) 2 >>> add1("1") 2 Note that passing a symbolic causes it to return a symbolic, so you can continue creating expressions. >>> from siuba.siu import _ >>> type(add1(_.a.b) + _.c.d) <class 'siuba.siu.symbolic.Symbolic'> symbolic dispatch raises a NotImplementedError by default if it no function ``f`` is passed. However, you can override the default as follows: >>> @symbolic_dispatch ... def my_func(x): raise NotImplementedError("some error message") """ if f is None: return lambda f: symbolic_dispatch(f, cls) # TODO: don't use singledispatch if it has already been done dispatch_func = singledispatch(f) if cls is not object: dispatch_func.register(cls, f) dispatch_func.register(object, _dispatch_not_impl(dispatch_func.__name__)) @dispatch_func.register(Symbolic) def _dispatch_symbol(__data, *args, **kwargs): return create_sym_call(FuncArg(dispatch_func), strip_symbolic(__data), *args, **kwargs) @dispatch_func.register(Call) def _dispatch_call(__data, *args, **kwargs): # TODO: want to just create call, for now use hack of creating a symbolic # call and getting the source off of it... return strip_symbolic(create_sym_call(FuncArg(dispatch_func), __data, *args, **kwargs)) return dispatch_func # Verb dispatch ============================================================== class NoArgs: pass def pipe_no_args(f): """Register a concrete function that handles when a verb received no arguments.""" @f.register(NoArgs) def wrapper(__data, *args, **kwargs): # e.g. head() -> Pipeable(_ -> head(_)) return create_pipe_call(f, MetaArg("_"), *args, **kwargs) return f def register_pipe(f, cls): """Register a concrete function that returns a Pipeable when called.""" @f.register(cls) def wrapper(*args, **kwargs): return create_pipe_call(f, MetaArg("_"), *args, **kwargs) return f def register_pipe_call(f): """Register a concrete function that .""" @f.register(Call) def f_dispatch(__data, *args, **kwargs): call = __data if isinstance(call, MetaArg): # single _ passed as first arg to function # e.g. mutate(_, _.id) -> Pipeable(_ -> mutate(_, _.id)) return create_pipe_call(f, call, *args, **kwargs) else: # more complex _ expr passed as first arg to function # e.g. mutate(_.id) -> Pipeable(_ -> mutate(_, _.id)) return create_pipe_call(f, MetaArg("_"), call, *args, **kwargs) return f # option: no args, custom dispatch (e.g. register NoArgs) # strips symbols def verb_dispatch(cls, f = None): """Wrap singledispatch. Making sure to keep its attributes on the wrapper. This wrapper has three jobs: 1. strip symbols off of calls 2. pass NoArgs instance for calls like some_func(), so dispatcher can handle 3. return a Pipeable when the first arg of a call is a symbol Parameters ---------- cls : A class to dispatch on. f : A function to call if no classes match while dispatching. """ # classic way of allowing args to a decorator if f is None: return lambda f: verb_dispatch(cls, f) # initially registers func for object, so need to change to pd.DataFrame dispatch_func = singledispatch(f) if isinstance(cls, tuple): for c in cls: dispatch_func.register(c, f) else: dispatch_func.register(cls, f) # then, set the default object dispatcher to create a pipe register_pipe(dispatch_func, object) # register dispatcher for Call, and NoArgs register_pipe_call(dispatch_func) pipe_no_args(dispatch_func) @wraps(dispatch_func) def wrapper(*args, **kwargs): strip_args = map(strip_symbolic, args) strip_kwargs = {k: strip_symbolic(v) for k,v in kwargs.items()} if not args: return dispatch_func(NoArgs(), **strip_kwargs) return dispatch_func(*strip_args, **strip_kwargs) return wrapper # TODO: deprecate / remove singledispatch2 singledispatch2 = verb_dispatch # Pipe ======================================================================== class Pipeable: """Enable function composition through the right bitshift (>>) operator. Parameters ---------- f : A function to be called. calls : sequence, optional A list-like of functions to be called, with each result chained into the next. Examples -------- >>> f = lambda x: x + 1 Eager evaluation: >>> 1 >> Pipeable(f) 2 Defer to a pipe: >>> p = Pipeable(f) >> Pipeable(f) >>> 1 >> p 3 >>> p_undo = p >> (lambda x: x - 3) >>> 1 >> p_undo 0 >>> from siuba.siu import _ >>> p_undo_sym = p >> (_ - 3) >>> 1 >> p_undo_sym 0 """ def __init__(self, f = None, calls = None): # symbolics like _.some_attr need to be stripped down to a call, because # calling _.some_attr() returns another symbolic. f = strip_symbolic(f) if f is not None: if calls is not None: raise Exception() self.calls = [f] elif calls is not None: self.calls = calls def __rshift__(self, x) -> "Pipeable": """Defer evaluation when pipe is on the left (lazy piping).""" if isinstance(x, Pipeable): return Pipeable(calls = self.calls + x.calls) elif isinstance(x, (Symbolic, Call)): call = strip_symbolic(x) return Pipeable(calls = self.calls + [call]) elif callable(x): return Pipeable(calls = self.calls + [x]) raise Exception() def __rrshift__(self, x): """Potentially evaluate result when pipe is on the right (eager piping). This function handles two cases: * callable >> pipe -> pipe * otherewise, evaluate the pipe """ if isinstance(x, (Symbolic, Call)): call = strip_symbolic(x) return Pipeable(calls = [call] + self.calls) elif callable(x): return Pipeable(calls = [x] + self.calls) return self(x) def __call__(self, x): """Evaluate a pipe and return the result. Parameters ---------- x : An object to be passed into the first function in the pipe. """ res = x for f in self.calls: res = f(res) return res def create_pipe_call(obj, *args, **kwargs) -> Pipeable: """Return a Call of a function on its args and kwargs, wrapped in a Pipeable.""" first, *rest = args return Pipeable(Call( "__call__", strip_symbolic(obj), strip_symbolic(first), *(Lazy(strip_symbolic(x)) for x in rest), **{k: Lazy(strip_symbolic(v)) for k,v in kwargs.items()} ))
1,956
-3
229
fa1316628e51b9c7e416adbda3bf277df8f62831
3,899
py
Python
Ptr_Net_TSPTW/critic.py
samsvp/neural-combinatorial-optimization-rl-tensorflow
add7f1cbb46f363499bc17c085e42825d8e929d1
[ "MIT" ]
223
2017-09-25T11:09:33.000Z
2022-03-30T12:04:25.000Z
Ptr_Net_TSPTW/critic.py
samsvp/neural-combinatorial-optimization-rl-tensorflow
add7f1cbb46f363499bc17c085e42825d8e929d1
[ "MIT" ]
6
2017-11-16T11:01:06.000Z
2020-07-22T07:46:56.000Z
Ptr_Net_TSPTW/critic.py
samsvp/neural-combinatorial-optimization-rl-tensorflow
add7f1cbb46f363499bc17c085e42825d8e929d1
[ "MIT" ]
68
2017-11-17T19:55:39.000Z
2022-03-06T12:45:50.000Z
import tensorflow as tf from tensorflow.contrib.rnn import LSTMCell, MultiRNNCell, DropoutWrapper import numpy as np from tqdm import tqdm import matplotlib.pyplot as plt
53.410959
146
0.637599
import tensorflow as tf from tensorflow.contrib.rnn import LSTMCell, MultiRNNCell, DropoutWrapper import numpy as np from tqdm import tqdm import matplotlib.pyplot as plt class Critic(object): def __init__(self, config): self.config=config # Data config self.batch_size = config.batch_size # batch size self.max_length = config.max_length # input sequence length (number of cities) self.input_dimension = config.input_dimension # dimension of a city (coordinates) # Network config self.input_embed = config.input_embed # dimension of embedding space self.num_neurons = config.hidden_dim # dimension of hidden states (LSTM cell) self.initializer = tf.contrib.layers.xavier_initializer() # variables initializer # Baseline setup self.init_baseline = 7. #self.max_length/2 # good initial baseline for TSP # Training config self.is_training = not config.inference_mode def predict_rewards(self,input_): with tf.variable_scope("encoder"): with tf.variable_scope("embedding"): # Embed input sequence W_embed =tf.get_variable("weights", [1,self.input_dimension+2, self.input_embed], initializer=self.initializer) embedded_input = tf.nn.conv1d(input_, W_embed, 1, "VALID", name="embedded_input") # Batch Normalization embedded_input = tf.layers.batch_normalization(embedded_input, axis=2, training=self.is_training, name='layer_norm', reuse=None) with tf.variable_scope("dynamic_rnn"): # Encode input sequence cell1 = LSTMCell(self.num_neurons, initializer=self.initializer) # Return the output activations [Batch size, Sequence Length, Num_neurons] and last hidden state (c,h) as tensors. encoder_output, encoder_state = tf.nn.dynamic_rnn(cell1, embedded_input, dtype=tf.float32) #frame = tf.reduce_mean(encoder_output, 1) # [Batch size, Sequence Length, Num_neurons] to [Batch size, Num_neurons] frame = encoder_state[0] # [Batch size, Num_neurons] # Glimpse with tf.variable_scope("glimpse"): self.W_ref_g =tf.get_variable("W_ref_g",[1,self.num_neurons,self.num_neurons],initializer=self.initializer) self.W_q_g =tf.get_variable("W_q_g",[self.num_neurons,self.num_neurons],initializer=self.initializer) self.v_g =tf.get_variable("v_g",[self.num_neurons],initializer=self.initializer) # Attending mechanism encoded_ref_g = tf.nn.conv1d(encoder_output, self.W_ref_g, 1, "VALID", name="encoded_ref_g") # [Batch size, seq_length, n_hidden] encoded_query_g = tf.expand_dims(tf.matmul(frame, self.W_q_g, name="encoded_query_g"), 1) # [Batch size, 1, n_hidden] scores_g = tf.reduce_sum(self.v_g * tf.tanh(encoded_ref_g + encoded_query_g), [-1], name="scores_g") # [Batch size, seq_length] attention_g = tf.nn.softmax(scores_g, name="attention_g") # 1 glimpse = Linear combination of reference vectors (defines new query vector) glimpse = tf.multiply(encoder_output, tf.expand_dims(attention_g,2)) glimpse = tf.reduce_sum(glimpse,1) with tf.variable_scope("ffn"): # ffn 1 h0 = tf.layers.dense(glimpse, self.num_neurons, activation=tf.nn.relu, kernel_initializer=self.initializer) # ffn 2 w1 =tf.get_variable("w1", [self.num_neurons, 1], initializer=self.initializer) b1 = tf.Variable(self.init_baseline, name="b1") self.predictions = tf.squeeze(tf.matmul(h0, w1)+b1)
3,636
0
82
76f6d739cb23629b57aa94b4e728c30211cd2955
4,709
py
Python
woudc_data_registry/registry.py
ahurka/woudc-data-registry
29ce092076193f2730d488de5412d93a7b78b987
[ "MIT" ]
null
null
null
woudc_data_registry/registry.py
ahurka/woudc-data-registry
29ce092076193f2730d488de5412d93a7b78b987
[ "MIT" ]
null
null
null
woudc_data_registry/registry.py
ahurka/woudc-data-registry
29ce092076193f2730d488de5412d93a7b78b987
[ "MIT" ]
null
null
null
# ================================================================= # # Terms and Conditions of Use # # Unless otherwise noted, computer program source code of this # distribution # is covered under Crown Copyright, Government of # Canada, and is distributed under the MIT License. # # The Canada wordmark and related graphics associated with this # distribution are protected under trademark law and copyright law. # No permission is granted to use them outside the parameters of # the Government of Canada's corporate identity program. For # more information, see # http://www.tbs-sct.gc.ca/fip-pcim/index-eng.asp # # Copyright title to all 3rd party software distributed with this # software is held by the respective copyright holders as noted in # those files. Users are asked to read the 3rd Party Licenses # referenced with those assets. # # Copyright (c) 2019 Government of Canada # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # ================================================================= import logging from sqlalchemy import create_engine from sqlalchemy.exc import DataError from sqlalchemy.orm import sessionmaker from woudc_data_registry import config LOGGER = logging.getLogger(__name__) class Registry(object): """registry""" def __init__(self): """constructor""" LOGGER.debug('Creating SQLAlchemy connection') engine = create_engine(config.WDR_DATABASE_URL, echo=config.WDR_DB_DEBUG) Session = sessionmaker(bind=engine, expire_on_commit=False) self.session = Session() def query_distinct(self, domain): """ queries for distinct values :param domain: domain to be queried :returns: list of distinct values """ LOGGER.debug('Querying distinct values for {}'.format(domain)) values = [v[0] for v in self.session.query(domain).distinct()] return values def query_by_field(self, obj, obj_instance, by): """ query data by field :param obj: object (field) to be queried :param obj_instance: object instance to be queried :param by: value to be queried :returns: query results """ field = getattr(obj, by) value = getattr(obj_instance, by) LOGGER.debug('Querying for {}={}'.format(field, value)) results = self.session.query(obj).filter(field == value).all() return results def query_multiple_fields(self, table, values, fields=None): """ query a table by multiple fields :param table: table to be queried :param instance: dictionary with query values :param fields: fields to be filtered by :returns: query results """ conditions = [] if fields is None: for field in values: conditions.append(getattr(table, field) == values[field]) else: for field in fields: conditions.append(getattr(table, field) == values[field]) results = self.session.query(table).filter(*conditions).first() return results def save(self, obj=None): """ helper function to save object to registry :param obj: object to save (defualt None) :returns: void """ try: LOGGER.debug('Saving') if obj is not None: self.session.add(obj) # self.session.merge(obj) self.session.commit() self.session.close() except DataError as err: LOGGER.error('Failed to save to registry: {}'.format(err)) self.session.rollback()
32.475862
73
0.651094
# ================================================================= # # Terms and Conditions of Use # # Unless otherwise noted, computer program source code of this # distribution # is covered under Crown Copyright, Government of # Canada, and is distributed under the MIT License. # # The Canada wordmark and related graphics associated with this # distribution are protected under trademark law and copyright law. # No permission is granted to use them outside the parameters of # the Government of Canada's corporate identity program. For # more information, see # http://www.tbs-sct.gc.ca/fip-pcim/index-eng.asp # # Copyright title to all 3rd party software distributed with this # software is held by the respective copyright holders as noted in # those files. Users are asked to read the 3rd Party Licenses # referenced with those assets. # # Copyright (c) 2019 Government of Canada # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # ================================================================= import logging from sqlalchemy import create_engine from sqlalchemy.exc import DataError from sqlalchemy.orm import sessionmaker from woudc_data_registry import config LOGGER = logging.getLogger(__name__) class Registry(object): """registry""" def __init__(self): """constructor""" LOGGER.debug('Creating SQLAlchemy connection') engine = create_engine(config.WDR_DATABASE_URL, echo=config.WDR_DB_DEBUG) Session = sessionmaker(bind=engine, expire_on_commit=False) self.session = Session() def query_distinct(self, domain): """ queries for distinct values :param domain: domain to be queried :returns: list of distinct values """ LOGGER.debug('Querying distinct values for {}'.format(domain)) values = [v[0] for v in self.session.query(domain).distinct()] return values def query_by_field(self, obj, obj_instance, by): """ query data by field :param obj: object (field) to be queried :param obj_instance: object instance to be queried :param by: value to be queried :returns: query results """ field = getattr(obj, by) value = getattr(obj_instance, by) LOGGER.debug('Querying for {}={}'.format(field, value)) results = self.session.query(obj).filter(field == value).all() return results def query_multiple_fields(self, table, values, fields=None): """ query a table by multiple fields :param table: table to be queried :param instance: dictionary with query values :param fields: fields to be filtered by :returns: query results """ conditions = [] if fields is None: for field in values: conditions.append(getattr(table, field) == values[field]) else: for field in fields: conditions.append(getattr(table, field) == values[field]) results = self.session.query(table).filter(*conditions).first() return results def save(self, obj=None): """ helper function to save object to registry :param obj: object to save (defualt None) :returns: void """ try: LOGGER.debug('Saving') if obj is not None: self.session.add(obj) # self.session.merge(obj) self.session.commit() self.session.close() except DataError as err: LOGGER.error('Failed to save to registry: {}'.format(err)) self.session.rollback()
0
0
0
54b57db99bbed5cda2f6832054fc944284bdef27
322
py
Python
leadreader/str.py
raindrift/leadreader
c360630a75a149e20c9cf64bcb2c14646effb9ae
[ "MIT" ]
null
null
null
leadreader/str.py
raindrift/leadreader
c360630a75a149e20c9cf64bcb2c14646effb9ae
[ "MIT" ]
null
null
null
leadreader/str.py
raindrift/leadreader
c360630a75a149e20c9cf64bcb2c14646effb9ae
[ "MIT" ]
null
null
null
""" String utilities. """ def camelize(value): """ Return the camel-cased version of a string. Used for analysis class names. """ c = _camelcase() return "".join(next(c)(x) if x else '_' for x in value.split("_"))
21.466667
70
0.586957
""" String utilities. """ def camelize(value): """ Return the camel-cased version of a string. Used for analysis class names. """ def _camelcase(): while True: yield type(value).capitalize c = _camelcase() return "".join(next(c)(x) if x else '_' for x in value.split("_"))
57
0
26
130d68694e0cf1462e5ccacca80e498aa25a58b7
24,266
py
Python
Scripts/simulation/holidays/holiday_tradition.py
velocist/TS4CheatsInfo
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
[ "Apache-2.0" ]
null
null
null
Scripts/simulation/holidays/holiday_tradition.py
velocist/TS4CheatsInfo
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
[ "Apache-2.0" ]
null
null
null
Scripts/simulation/holidays/holiday_tradition.py
velocist/TS4CheatsInfo
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
[ "Apache-2.0" ]
null
null
null
# uncompyle6 version 3.7.4 # Python bytecode 3.7 (3394) # Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # Embedded file name: T:\InGame\Gameplay\Scripts\Server\holidays\holiday_tradition.py # Compiled at: 2020-06-15 20:09:55 # Size of source mod 2**32: 25532 bytes from _collections import defaultdict from buffs.tunable import TunableBuffReference from business.business_enums import BusinessType from event_testing.resolver import SingleSimResolver, GlobalResolver from holidays.holiday_globals import HolidayState, TraditionPreference from interactions import ParticipantType from interactions.utils.display_mixin import get_display_mixin from relationships.relationship_tests import TunableRelationshipTest from sims.sim_info_tests import SimInfoTest, TraitTest from sims4.localization import TunableLocalizedString from sims4.tuning.instances import HashedTunedInstanceMetaclass from sims4.tuning.tunable import HasTunableReference, TunableReference, TunableList, Tunable, TunableTuple, TunableEnumEntry, TunableVariant, OptionalTunable, AutoFactoryInit, HasTunableSingletonFactory, TunableEnumSet, TunableSingletonFactory, TunableMapping, TunableRange from sims4.tuning.tunable_base import GroupNames, ExportModes from situations.service_npcs.modify_lot_items_tuning import ModifyAllLotItems from situations.situation_curve import SituationCurve from situations.situation_guest_list import SituationGuestList from situations.tunable import DestroySituationsByTagsMixin from tag import TunableTags from tunable_time import TunableTimeOfDay import alarms, elements, enum, event_testing, services, sims4.log, sims4.resources, travel_group.travel_group_tests, world.world_tests logger = sims4.log.Logger('Holiday', default_owner='jjacobson') StartSituation = TunableSingletonFactory.create_auto_factory(_StartSituation, 'StartSituation') ModifyAllItems = TunableSingletonFactory.create_auto_factory(_ModifyAllItems, 'ModifyAllItems') DestroySituations = TunableSingletonFactory.create_auto_factory(_DestroySituations, 'DestroySituations') HolidayTraditionDisplayMixin = get_display_mixin(has_description=True, has_icon=True, has_tooltip=True, export_modes=(ExportModes.All)) START_SITUATION = 0 MODIFY_ITEMS = 1 L. 418 0 LOAD_FAST 'self' 2 LOAD_ATTR _state 4 LOAD_GLOBAL HolidayState 6 LOAD_ATTR PRE_DAY 8 COMPARE_OP == 10 POP_JUMP_IF_FALSE 82 'to 82' L. 421 12 LOAD_FAST 'sim' 14 LOAD_ATTR is_npc 16 POP_JUMP_IF_FALSE 22 'to 22' L. 422 18 LOAD_CONST None 20 RETURN_VALUE 22_0 COME_FROM 16 '16' L. 423 22 SETUP_LOOP 152 'to 152' 24 LOAD_FAST 'self' 26 LOAD_ATTR pre_holiday_buffs 28 GET_ITER 30_0 COME_FROM 56 '56' 30 FOR_ITER 78 'to 78' 32 STORE_FAST 'buff' L. 424 34 LOAD_FAST 'sim' 36 LOAD_ATTR add_buff 38 LOAD_FAST 'buff' 40 LOAD_FAST 'self' 42 LOAD_ATTR pre_holiday_buff_reason 44 LOAD_CONST ('buff_reason',) 46 CALL_FUNCTION_KW_2 2 '2 total positional and keyword args' 48 STORE_FAST 'buff_handle' L. 425 50 LOAD_FAST 'buff_handle' 52 LOAD_CONST None 54 COMPARE_OP is-not 56 POP_JUMP_IF_FALSE 30 'to 30' L. 426 58 LOAD_FAST 'self' 60 LOAD_ATTR _buffs_added 62 LOAD_FAST 'sim' 64 LOAD_ATTR sim_id 66 BINARY_SUBSCR 68 LOAD_METHOD append 70 LOAD_FAST 'buff_handle' 72 CALL_METHOD_1 1 '1 positional argument' 74 POP_TOP 76 JUMP_BACK 30 'to 30' 78 POP_BLOCK 80 JUMP_FORWARD 152 'to 152' 82_0 COME_FROM 10 '10' L. 427 82 LOAD_FAST 'self' 84 LOAD_ATTR _state 86 LOAD_GLOBAL HolidayState 88 LOAD_ATTR RUNNING 90 COMPARE_OP == 92 POP_JUMP_IF_FALSE 152 'to 152' L. 428 94 SETUP_LOOP 152 'to 152' 96 LOAD_FAST 'self' 98 LOAD_ATTR holiday_buffs 100 GET_ITER 102_0 COME_FROM 128 '128' 102 FOR_ITER 150 'to 150' 104 STORE_FAST 'buff' L. 429 106 LOAD_FAST 'sim' 108 LOAD_ATTR add_buff 110 LOAD_FAST 'buff' 112 LOAD_FAST 'self' 114 LOAD_ATTR holiday_buff_reason 116 LOAD_CONST ('buff_reason',) 118 CALL_FUNCTION_KW_2 2 '2 total positional and keyword args' 120 STORE_FAST 'buff_handle' L. 430 122 LOAD_FAST 'buff_handle' 124 LOAD_CONST None 126 COMPARE_OP is-not 128 POP_JUMP_IF_FALSE 102 'to 102' L. 431 130 LOAD_FAST 'self' 132 LOAD_ATTR _buffs_added 134 LOAD_FAST 'sim' 136 LOAD_ATTR sim_id 138 BINARY_SUBSCR 140 LOAD_METHOD append 142 LOAD_FAST 'buff_handle' 144 CALL_METHOD_1 1 '1 positional argument' 146 POP_TOP 148 JUMP_BACK 102 'to 102' 150 POP_BLOCK 152_0 COME_FROM_LOOP 94 '94' 152_1 COME_FROM 92 '92' 152_2 COME_FROM 80 '80' 152_3 COME_FROM_LOOP 22 '22' Parse error at or near `COME_FROM' instruction at offset 152_2
55.783908
401
0.623424
# uncompyle6 version 3.7.4 # Python bytecode 3.7 (3394) # Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # Embedded file name: T:\InGame\Gameplay\Scripts\Server\holidays\holiday_tradition.py # Compiled at: 2020-06-15 20:09:55 # Size of source mod 2**32: 25532 bytes from _collections import defaultdict from buffs.tunable import TunableBuffReference from business.business_enums import BusinessType from event_testing.resolver import SingleSimResolver, GlobalResolver from holidays.holiday_globals import HolidayState, TraditionPreference from interactions import ParticipantType from interactions.utils.display_mixin import get_display_mixin from relationships.relationship_tests import TunableRelationshipTest from sims.sim_info_tests import SimInfoTest, TraitTest from sims4.localization import TunableLocalizedString from sims4.tuning.instances import HashedTunedInstanceMetaclass from sims4.tuning.tunable import HasTunableReference, TunableReference, TunableList, Tunable, TunableTuple, TunableEnumEntry, TunableVariant, OptionalTunable, AutoFactoryInit, HasTunableSingletonFactory, TunableEnumSet, TunableSingletonFactory, TunableMapping, TunableRange from sims4.tuning.tunable_base import GroupNames, ExportModes from situations.service_npcs.modify_lot_items_tuning import ModifyAllLotItems from situations.situation_curve import SituationCurve from situations.situation_guest_list import SituationGuestList from situations.tunable import DestroySituationsByTagsMixin from tag import TunableTags from tunable_time import TunableTimeOfDay import alarms, elements, enum, event_testing, services, sims4.log, sims4.resources, travel_group.travel_group_tests, world.world_tests logger = sims4.log.Logger('Holiday', default_owner='jjacobson') class TraditionActivationEvent(enum.Int): HOLIDAY_ACTIVATE = 0 HOLIDAY_DEACTIVATE = 1 TRADITION_ADD = 2 TRADITION_REMOVE = 3 class TunablePreferenceTestVariant(TunableVariant): def __init__(self, description='A single tunable test.', **kwargs): (super().__init__)(relationship=TunableRelationshipTest(locked_args={'subject':ParticipantType.Actor, 'target_sim':ParticipantType.AllRelationships, 'test_event':0, 'tooltip':None}), sim_info=SimInfoTest.TunableFactory(locked_args={'tooltip': None}), trait=TraitTest.TunableFactory(locked_args={'tooltip': None}), location=world.world_tests.LocationTest.TunableFactory(locked_args={'subject':ParticipantType.Actor, 'tooltip':None}), travel_group=travel_group.travel_group_tests.TravelGroupTest.TunableFactory(locked_args={'tooltip': None}), default='sim_info', description=description, **kwargs) class TunablePreferenceTestList(event_testing.tests.TestListLoadingMixin): DEFAULT_LIST = event_testing.tests.TestList() def __init__(self, description=None): if description is None: description = 'A list of tests. All tests must succeed to pass the TestSet.' super().__init__(description=description, tunable=(TunablePreferenceTestVariant())) class _StartSituation(AutoFactoryInit): FACTORY_TUNABLES = {'situation': TunableReference(description='\n The situation to start.\n ', manager=(services.get_instance_manager(sims4.resources.Types.SITUATION)))} def perform(self, resolver): guest_list = self.situation.get_predefined_guest_list() if guest_list is None: guest_list = SituationGuestList(invite_only=True) services.get_zone_situation_manager().create_situation((self.situation), guest_list=guest_list, user_facing=False) StartSituation = TunableSingletonFactory.create_auto_factory(_StartSituation, 'StartSituation') class _ModifyAllItems(AutoFactoryInit): FACTORY_TUNABLES = {'item_modifications': ModifyAllLotItems.TunableFactory(description='\n Modify objects on the active lot.\n ')} def perform(self, resolver): self.item_modifications().modify_objects() ModifyAllItems = TunableSingletonFactory.create_auto_factory(_ModifyAllItems, 'ModifyAllItems') class _DestroySituations(DestroySituationsByTagsMixin, AutoFactoryInit): def perform(self, resolver): self._destroy_situations_by_tags(resolver) DestroySituations = TunableSingletonFactory.create_auto_factory(_DestroySituations, 'DestroySituations') class TraditionActionVariant(TunableVariant): def __init__(self, *args, **kwargs): (super().__init__)(start_situation=StartSituation(), destroy_situations=DestroySituations(), default='start_situation', **kwargs) class TraditionActions(HasTunableSingletonFactory, AutoFactoryInit): FACTORY_TUNABLES = {'trigger_on_events':TunableEnumSet(description='\n Event that would trigger these actions.\n ', enum_type=TraditionActivationEvent, enum_default=TraditionActivationEvent.HOLIDAY_ACTIVATE), 'actions_to_apply':TunableList(description='\n Actions to apply for this event.\n ', tunable=TraditionActionVariant())} def try_perform(self, resolver, activation_event): if activation_event in self.trigger_on_events: for action in self.actions_to_apply: action.perform(resolver) HolidayTraditionDisplayMixin = get_display_mixin(has_description=True, has_icon=True, has_tooltip=True, export_modes=(ExportModes.All)) START_SITUATION = 0 MODIFY_ITEMS = 1 class HolidayTradition(HasTunableReference, HolidayTraditionDisplayMixin, metaclass=HashedTunedInstanceMetaclass, manager=services.get_instance_manager(sims4.resources.Types.HOLIDAY_TRADITION)): INSTANCE_TUNABLES = {'situation_goal':TunableReference(description='\n This is the situation goal that will be offered when this tradition\n is active.\n ', manager=services.get_instance_manager(sims4.resources.Types.SITUATION_GOAL)), 'pre_holiday_buffs':TunableList(description='\n A list of buffs that will be given out to all of the player Sims\n during the pre-holiday period of each holiday.\n ', tunable=TunableReference(description='\n A buff that is given to all of the player Sims when it is the\n pre-holiday period.\n ', manager=(services.get_instance_manager(sims4.resources.Types.BUFF))), unique_entries=True), 'pre_holiday_buff_reason':OptionalTunable(description='\n If set, specify a reason why the buff was added.\n ', tunable=TunableLocalizedString(description='\n The reason the buff was added. This will be displayed in the\n buff tooltip.\n ')), 'holiday_buffs':TunableList(description='\n A list of buffs that will be given out to all Sims during each\n holiday.\n ', tunable=TunableReference(description='\n A buff that is given to all Sims during the holiday.\n ', manager=(services.get_instance_manager(sims4.resources.Types.BUFF))), unique_entries=True), 'holiday_buff_reason':OptionalTunable(description='\n If set, specify a reason why the buff was added.\n ', tunable=TunableLocalizedString(description='\n The reason the buff was added. This will be displayed in the\n buff tooltip.\n ')), 'drama_nodes_to_score':TunableList(description='\n Drama nodes that we will attempt to schedule and score when this\n tradition becomes active.\n ', tunable=TunableReference(description='\n A drama node that we will put in the scoring pass when this\n tradition becomes active.\n ', manager=(services.get_instance_manager(sims4.resources.Types.DRAMA_NODE))), unique_entries=True), 'drama_nodes_to_run':TunableList(description='\n Drama nodes that will be run when the tradition is activated.\n ', tunable=TunableReference(description='\n A drama node that we will run when the holiday becomes active.\n ', manager=(services.get_instance_manager(sims4.resources.Types.DRAMA_NODE))), unique_entries=True), 'additional_walkbys':SituationCurve.TunableFactory(description='\n An additional walkby schedule that will be added onto the walkby\n schedule when the tradition is active.\n ', get_create_params={'user_facing': False}), 'preference':TunableList(description='\n A list of pairs of preference categories and tests. To determine\n what a Sim feels about a tradition each set of tests in this list\n will be run in order. When one of the test sets passes then we\n will set that as the preference. If none of them pass we will\n default to LIKES.\n ', tunable=TunableTuple(description='\n A pair of preference and test set.\n ', preference=TunableEnumEntry(description='\n The preference that the Sim will have to this tradition if\n the test set passes.\n ', tunable_type=TraditionPreference, default=(TraditionPreference.LIKES)), tests=TunablePreferenceTestList(description='\n A set of tests that need to pass for the Sim to have the\n tuned preference.\n '), reason=OptionalTunable(description='\n If enabled then we will also give this reason as to why the\n preference is the way it is.\n ', tunable=TunableLocalizedString(description='\n The reason that the Sim has this preference.\n ')))), 'preference_reward_buff':OptionalTunable(description='\n If enabled then if the Sim loves this tradition when the holiday is\n completed they will get a special buff if they completed the\n tradition.\n ', tunable=TunableBuffReference(description='\n The buff given if this Sim loves the tradition and has completed\n it at the end of the holiday.\n ')), 'selectable':Tunable(description='\n If checked then this tradition will appear in the tradition\n selection.\n ', tunable_type=bool, default=True, tuning_group=GroupNames.UI, export_modes=ExportModes.All), 'lifecycle_actions':TunableList(description='\n Actions that occur as a result of the tradition activation/de-activation.\n ', tunable=TraditionActions.TunableFactory()), 'events':TunableList(description='\n A list of times and things we want to happen at that time.\n ', tunable=TunableTuple(description='\n A pair of a time of day and event of something that we want\n to occur.\n ', time=TunableTimeOfDay(description='\n The time of day this event will occur.\n '), event=TunableVariant(description='\n What we want to occur at this time.\n ', modify_items=(ModifyAllItems()), start_situation=(StartSituation()), default='start_situation'))), 'core_object_tags':TunableTags(description='\n Tags of all the core objects used in this tradition.\n ', filter_prefixes=('func', ), tuning_group=GroupNames.UI, export_modes=ExportModes.All), 'deco_object_tags':TunableTags(description='\n Tags of all the deco objects used in this tradition.\n ', filter_prefixes=('func', ), tuning_group=GroupNames.UI, export_modes=ExportModes.All), 'business_cost_multiplier':TunableMapping(description='\n A mapping between the business type and the cost multiplier that\n we want to use if this tradition is active.\n ', key_type=TunableEnumEntry(description='\n The type of business that we want to apply this price modifier\n on.\n ', tunable_type=BusinessType, default=(BusinessType.INVALID), invalid_enums=( BusinessType.INVALID,)), value_type=TunableRange(description='\n The value of the multiplier to use.\n ', tunable_type=float, default=1.0, minimum=0.0))} @classmethod def _verify_tuning_callback(cls): if cls._display_data.instance_display_description is None: logger.error('Tradition {} missing display description', cls) if cls._display_data.instance_display_icon is None: logger.error('Tradition {} missing display icon', cls) if cls._display_data.instance_display_name is None: logger.error('Tradition {} missing display name', cls) def __init__(self): self._state = HolidayState.INITIALIZED self._buffs_added = defaultdict(list) self._event_alarm_handles = {} self._drama_node_processor = None @property def state(self): return self._state @classmethod def get_buiness_multiplier(cls, business_type): return cls.business_cost_multiplier.get(business_type, 1.0) @classmethod def get_sim_preference(cls, sim_info): resolver = SingleSimResolver(sim_info) for possible_preference in cls.preference: if possible_preference.tests.run_tests(resolver): return ( possible_preference.preference, possible_preference.reason) return ( TraditionPreference.LIKES, None) def on_sim_spawned--- This code section failed: --- L. 418 0 LOAD_FAST 'self' 2 LOAD_ATTR _state 4 LOAD_GLOBAL HolidayState 6 LOAD_ATTR PRE_DAY 8 COMPARE_OP == 10 POP_JUMP_IF_FALSE 82 'to 82' L. 421 12 LOAD_FAST 'sim' 14 LOAD_ATTR is_npc 16 POP_JUMP_IF_FALSE 22 'to 22' L. 422 18 LOAD_CONST None 20 RETURN_VALUE 22_0 COME_FROM 16 '16' L. 423 22 SETUP_LOOP 152 'to 152' 24 LOAD_FAST 'self' 26 LOAD_ATTR pre_holiday_buffs 28 GET_ITER 30_0 COME_FROM 56 '56' 30 FOR_ITER 78 'to 78' 32 STORE_FAST 'buff' L. 424 34 LOAD_FAST 'sim' 36 LOAD_ATTR add_buff 38 LOAD_FAST 'buff' 40 LOAD_FAST 'self' 42 LOAD_ATTR pre_holiday_buff_reason 44 LOAD_CONST ('buff_reason',) 46 CALL_FUNCTION_KW_2 2 '2 total positional and keyword args' 48 STORE_FAST 'buff_handle' L. 425 50 LOAD_FAST 'buff_handle' 52 LOAD_CONST None 54 COMPARE_OP is-not 56 POP_JUMP_IF_FALSE 30 'to 30' L. 426 58 LOAD_FAST 'self' 60 LOAD_ATTR _buffs_added 62 LOAD_FAST 'sim' 64 LOAD_ATTR sim_id 66 BINARY_SUBSCR 68 LOAD_METHOD append 70 LOAD_FAST 'buff_handle' 72 CALL_METHOD_1 1 '1 positional argument' 74 POP_TOP 76 JUMP_BACK 30 'to 30' 78 POP_BLOCK 80 JUMP_FORWARD 152 'to 152' 82_0 COME_FROM 10 '10' L. 427 82 LOAD_FAST 'self' 84 LOAD_ATTR _state 86 LOAD_GLOBAL HolidayState 88 LOAD_ATTR RUNNING 90 COMPARE_OP == 92 POP_JUMP_IF_FALSE 152 'to 152' L. 428 94 SETUP_LOOP 152 'to 152' 96 LOAD_FAST 'self' 98 LOAD_ATTR holiday_buffs 100 GET_ITER 102_0 COME_FROM 128 '128' 102 FOR_ITER 150 'to 150' 104 STORE_FAST 'buff' L. 429 106 LOAD_FAST 'sim' 108 LOAD_ATTR add_buff 110 LOAD_FAST 'buff' 112 LOAD_FAST 'self' 114 LOAD_ATTR holiday_buff_reason 116 LOAD_CONST ('buff_reason',) 118 CALL_FUNCTION_KW_2 2 '2 total positional and keyword args' 120 STORE_FAST 'buff_handle' L. 430 122 LOAD_FAST 'buff_handle' 124 LOAD_CONST None 126 COMPARE_OP is-not 128 POP_JUMP_IF_FALSE 102 'to 102' L. 431 130 LOAD_FAST 'self' 132 LOAD_ATTR _buffs_added 134 LOAD_FAST 'sim' 136 LOAD_ATTR sim_id 138 BINARY_SUBSCR 140 LOAD_METHOD append 142 LOAD_FAST 'buff_handle' 144 CALL_METHOD_1 1 '1 positional argument' 146 POP_TOP 148 JUMP_BACK 102 'to 102' 150 POP_BLOCK 152_0 COME_FROM_LOOP 94 '94' 152_1 COME_FROM 92 '92' 152_2 COME_FROM 80 '80' 152_3 COME_FROM_LOOP 22 '22' Parse error at or near `COME_FROM' instruction at offset 152_2 def activate_pre_holiday(self): if self._state >= HolidayState.PRE_DAY: logger.error('Tradition {} is trying to be put into the pre_holiday, but is already in {} which is farther along.', self, self._state) return self._state = HolidayState.PRE_DAY if self.pre_holiday_buffs: services.sim_spawner_service().register_sim_spawned_callback(self.on_sim_spawned) for sim_info in services.active_household().instanced_sims_gen(): for buff in self.pre_holiday_buffs: buff_handle = sim_info.add_buff(buff, buff_reason=(self.pre_holiday_buff_reason)) if buff_handle is not None: self._buffs_added[sim_info.sim_id].append(buff_handle) def _remove_all_buffs(self): sim_info_manager = services.sim_info_manager() for sim_id, buff_handles in self._buffs_added.items(): sim_info = sim_info_manager.get(sim_id) if sim_info is None: continue if sim_info.Buffs is None: continue for buff_handle in buff_handles: sim_info.remove_buff(buff_handle) self._buffs_added.clear() def _deactivate_pre_holiday(self): if self.pre_holiday_buffs: services.sim_spawner_service().unregister_sim_spawned_callback(self.on_sim_spawned) self._remove_all_buffs() def deactivate_pre_holiday(self): if self._state != HolidayState.PRE_DAY: logger.error('Tradition {} is trying to deactivate the preday, but it is in the {} state, not that one.', self, self._state) self._state = HolidayState.SHUTDOWN self._deactivate_pre_holiday() def _create_event_alarm(self, key, event): def callback(_): event.event.perform(GlobalResolver()) del self._event_alarm_handles[key] now = services.time_service().sim_now time_to_event = now.time_till_next_day_time(event.time) if key in self._event_alarm_handles: alarms.cancel_alarm(self._event_alarm_handles[key]) self._event_alarm_handles[key] = alarms.add_alarm(self, time_to_event, callback) def _process_scoring_gen(self, timeline): try: try: yield from services.drama_scheduler_service().score_and_schedule_nodes_gen((self.drama_nodes_to_score), 1, timeline=timeline) except GeneratorExit: raise except Exception as exception: try: logger.exception('Exception while scoring DramaNodes: ', exc=exception, level=(sims4.log.LEVEL_ERROR)) finally: exception = None del exception finally: self._drama_node_processor = None if False: yield None def activate_holiday(self, from_load=False, from_customization=False): if self._state >= HolidayState.RUNNING: logger.error('Tradition {} is trying to be put into the Running state, but is already in {} which is farther along.', self, self._state) return self._deactivate_pre_holiday() self._state = HolidayState.RUNNING if self.holiday_buffs: services.sim_spawner_service().register_sim_spawned_callback(self.on_sim_spawned) for sim_info in services.sim_info_manager().instanced_sims_gen(): for buff in self.holiday_buffs: buff_handle = sim_info.add_buff(buff, buff_reason=(self.holiday_buff_reason)) if buff_handle is not None: self._buffs_added[sim_info.sim_id].append(buff_handle) for key, event in enumerate(self.events): self._create_event_alarm(key, event) if not from_load: resolver = GlobalResolver() for actions in self.lifecycle_actions: actions.try_perform(resolver, TraditionActivationEvent.TRADITION_ADD if from_customization else TraditionActivationEvent.HOLIDAY_ACTIVATE) if self.drama_nodes_to_score: sim_timeline = services.time_service().sim_timeline self._drama_node_processor = sim_timeline.schedule(elements.GeneratorElement(self._process_scoring_gen)) drama_scheduler = services.drama_scheduler_service() for drama_node in self.drama_nodes_to_run: drama_scheduler.run_node(drama_node, resolver) def deactivate_holiday(self, from_customization=False): if self._state != HolidayState.RUNNING: logger.error('Tradition {} is trying to deactivate the tradition, but it is in the {} state, not that one.', self, self._state) self._state = HolidayState.SHUTDOWN if self.holiday_buffs: services.sim_spawner_service().unregister_sim_spawned_callback(self.on_sim_spawned) self._remove_all_buffs() for alarm in self._event_alarm_handles.values(): alarms.cancel_alarm(alarm) self._event_alarm_handles.clear() resolver = GlobalResolver() for actions in self.lifecycle_actions: actions.try_perform(resolver, TraditionActivationEvent.TRADITION_REMOVE if from_customization else TraditionActivationEvent.HOLIDAY_DEACTIVATE) def get_additional_walkbys(self, predicate=lambda _: True): weighted_situations = self.additional_walkbys.get_weighted_situations(predicate=predicate) if weighted_situations is None: return () return weighted_situations
8,168
8,825
531
fca8a4cdcd60925e50a4fe8be2fd519f54dc6226
594
py
Python
yt_dlp/WS_Extractor/fox.py
evolution-ant/local-youtube-dl
e9be36e8cdc585c0e24a18c74d790b62af6e11a7
[ "Unlicense" ]
null
null
null
yt_dlp/WS_Extractor/fox.py
evolution-ant/local-youtube-dl
e9be36e8cdc585c0e24a18c74d790b62af6e11a7
[ "Unlicense" ]
null
null
null
yt_dlp/WS_Extractor/fox.py
evolution-ant/local-youtube-dl
e9be36e8cdc585c0e24a18c74d790b62af6e11a7
[ "Unlicense" ]
null
null
null
#encoding: utf-8 import re import json from ..extractor.fox import FOXIE as Old from ..utilsEX import download_webPage_by_PYCURL
39.6
163
0.725589
#encoding: utf-8 import re import json from ..extractor.fox import FOXIE as Old from ..utilsEX import download_webPage_by_PYCURL class FoxIE(Old): def _download_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True, tries=1, timeout=5, encoding=None, data=None, headers={}, query={}): try: return super(FoxIE, self)._download_webpage(url_or_request, video_id, note, errnote, fatal, tries, timeout, encoding, data, headers, query) except: return download_webPage_by_PYCURL(self, url_or_request, timeout, data, headers, query)
418
-4
49
610597d93ce9bc090b5257f9c8893cbb6e5557aa
213
py
Python
arena/objects/tetrahedron.py
syreal17/ARENA-py
84f73d09ca5bcfec5973b366784ec8385d5e156d
[ "BSD-3-Clause" ]
null
null
null
arena/objects/tetrahedron.py
syreal17/ARENA-py
84f73d09ca5bcfec5973b366784ec8385d5e156d
[ "BSD-3-Clause" ]
null
null
null
arena/objects/tetrahedron.py
syreal17/ARENA-py
84f73d09ca5bcfec5973b366784ec8385d5e156d
[ "BSD-3-Clause" ]
null
null
null
from .arena_object import Object class Tetrahedron(Object): """ Class for Tetrahedron in the ARENA. """
23.666667
61
0.661972
from .arena_object import Object class Tetrahedron(Object): """ Class for Tetrahedron in the ARENA. """ def __init__(self, **kwargs): super().__init__(object_type="tetrahedron", **kwargs)
70
0
26
b513b7325a18107a0d683adb1f74f7b43224fbe7
2,307
py
Python
tronx/modules/__init__.py
beastzx18/Tron
92207b841c80311e484e8f350b96f7df8a76d3b9
[ "MIT" ]
8
2021-08-22T06:43:34.000Z
2022-02-24T17:09:49.000Z
tronx/modules/__init__.py
beastzx18/Tron
92207b841c80311e484e8f350b96f7df8a76d3b9
[ "MIT" ]
61
2021-09-12T11:05:33.000Z
2021-12-07T15:26:18.000Z
tronx/modules/__init__.py
beastzx18/Tron
92207b841c80311e484e8f350b96f7df8a76d3b9
[ "MIT" ]
6
2021-09-08T08:43:04.000Z
2022-02-24T17:09:50.000Z
import os from pyrogram.types import Message from tronx import USER_ID, Config from tronx.database.postgres import dv_sql as dv # ----- MODULES = sorted(__list_all_plugins()) __all__ = MODULES + ["MODULES"] # ----- # types of message # chat type
19.066116
70
0.684439
import os from pyrogram.types import Message from tronx import USER_ID, Config from tronx.database.postgres import dv_sql as dv # ----- def __list_all_plugins(): from os.path import dirname, basename, isfile import glob mod_paths = glob.glob(dirname(__file__) + "/*.py") all_plugins = [ basename(f)[:-3] for f in mod_paths if isfile(f) and f.endswith(".py") and not f.endswith("__init__.py") ] return all_plugins MODULES = sorted(__list_all_plugins()) __all__ = MODULES + ["MODULES"] # ----- # types of message def types(m: Message): reply = m.reply_to_message if reply.text: cast = "text" name = None elif reply.photo: cast = "photo" name = reply.photo.file_name elif reply.video: cast = "video" name = reply.video.file_name elif reply.document: cast = "document" name = reply.document.file_name elif reply.contact: cast = "contact" name = None elif reply.audio: cast = "audio" name = reply.audio.file_name elif reply.sticker: cast = "sticker" name = reply.sticker.file_name elif reply.animation: cast = "animation" name = reply.animation.file_name elif reply.poll: cast = "poll" name = None else: cast = "unknown" name = None return cast, name # chat type def chattype(m: Message): chat = m.chat.type if chat == "supergroup": chat_type = "supergroup" elif chat == "group": chat_type == "group" elif chat == "private": chat_type = "private" else: chat_type = "unknown chat type" return chat_type def get_file_id(m: Message): reply = m.reply_to_message data = False caption = False if reply: if reply.text: data = m.text caption = False elif reply.photo: data = reply.photo.file_id if reply.caption: caption = reply.caption elif reply.video: data = reply.video.file_id if reply.caption: caption = reply.caption elif reply.document: data = reply.document.file_id if reply.caption: caption = reply.caption elif reply.sticker: data = reply.sticker.file_id if reply.caption: caption = reply.caption elif reply.animation: data = reply.animation.file_id if reply.caption: caption = reply.caption elif reply.audio: data = reply.audio.file_id if reply.caption: caption = reply.caption return {"data": data, "caption" : caption}
1,953
0
89
7388a16df3e442cbca8c2f4415458d3451d3daa3
2,892
py
Python
backend/socket_chat/tests/group_consumer/chat_events/test_list_group_event.py
nikifkon/ChatApp
1b34e25dca63db7389cec87d7fb1099d95a6308a
[ "MIT" ]
6
2019-09-15T09:35:09.000Z
2020-03-21T04:10:39.000Z
backend/socket_chat/tests/group_consumer/chat_events/test_list_group_event.py
nikifkon/ChatApp
1b34e25dca63db7389cec87d7fb1099d95a6308a
[ "MIT" ]
6
2019-09-14T11:09:19.000Z
2021-06-10T18:49:50.000Z
backend/socket_chat/tests/group_consumer/chat_events/test_list_group_event.py
nikifkon/ChatApp
1b34e25dca63db7389cec87d7fb1099d95a6308a
[ "MIT" ]
2
2019-10-17T21:11:53.000Z
2021-02-04T02:52:29.000Z
from copy import deepcopy import pytest from channels.testing import WebsocketCommunicator from backend.groups.models import ChatGroup, GroupMessage from backend.socket_chat.tests.utils import round_to_minutes pytestmark = [pytest.mark.asyncio, pytest.mark.django_db(transaction=True)] @pytest.fixture @pytest.fixture @pytest.fixture @pytest.fixture @pytest.fixture # The list method with a filter should not return groups without messages that match the filter
37.558442
123
0.755878
from copy import deepcopy import pytest from channels.testing import WebsocketCommunicator from backend.groups.models import ChatGroup, GroupMessage from backend.socket_chat.tests.utils import round_to_minutes pytestmark = [pytest.mark.asyncio, pytest.mark.django_db(transaction=True)] @pytest.fixture def group_last_message(group_message_data: dict) -> dict: return { "sender": group_message_data["sender"]["id"], "text": group_message_data["text"], "date": group_message_data["date"] } @pytest.fixture def request_data(group: ChatGroup) -> dict: return {} @pytest.fixture def successed_response_data(group_data: dict) -> list: return [group_data] @pytest.fixture def filled_data(yml_dataset: dict, group_message: GroupMessage, request_data: dict, successed_response_data: dict) -> dict: data = yml_dataset["test_list_group_event"] data["request"]["data"] = request_data data["successed_response"]["data"] = successed_response_data data["successed_response_without_chats"] = deepcopy(data["successed_response"]) data["successed_response_without_chats"]["data"] = [] return data @pytest.fixture def assert_custom_response(ok_status: str): def do_assert(response, expected_response) -> int: assert response["status"] == ok_status, response["data"] for group in response["data"]: for message in group["messages"]: message["date"] = round_to_minutes(message["date"]) group["last_message"]["date"] = round_to_minutes(group["last_message"]["date"]) assert expected_response == response, response["data"] return do_assert async def test_successed(filled_data: dict, auth_com: WebsocketCommunicator, assert_custom_response): await auth_com.send_json_to(filled_data["request"]) response = await auth_com.receive_json_from() assert_custom_response(response, filled_data["successed_response"]) # The list method with a filter should not return groups without messages that match the filter async def test_stared_filtering(filled_data: dict, auth_com: WebsocketCommunicator, assert_custom_response): request_with_stared_filter = filled_data["request"] request_with_stared_filter["data"]["filter"] = "stared" await auth_com.send_json_to(request_with_stared_filter) response = await auth_com.receive_json_from() assert_custom_response(response, filled_data["successed_response_without_chats"]) async def test_unread_filtering(filled_data: dict, auth_com: WebsocketCommunicator, assert_custom_response): request_with_unread_filter = filled_data["request"] request_with_unread_filter["data"]["filter"] = "unread" await auth_com.send_json_to(request_with_unread_filter) response = await auth_com.receive_json_from() assert_custom_response(response, filled_data["successed_response_without_chats"])
2,234
0
178
99e3b201284a27e95b30d576ebb5e398e5e6d326
7,740
py
Python
server.py
dplassgit/starter-snake-python
3264a50663c32e96d608268b8d74b38290cd5322
[ "MIT" ]
null
null
null
server.py
dplassgit/starter-snake-python
3264a50663c32e96d608268b8d74b38290cd5322
[ "MIT" ]
null
null
null
server.py
dplassgit/starter-snake-python
3264a50663c32e96d608268b8d74b38290cd5322
[ "MIT" ]
null
null
null
import os import random import cherrypy """ Dumb battlesnake server in python. For instructions see https://github.com/BattlesnakeOfficial/starter-snake-python/README.md """ move_names = ["up", "down", "left", "right"] moves_dx = [0, 0, -1, 1] moves_dy = [1, -1, 0, 0] """ Snake object. """ # Ideas: # 1. evaluate each direction, pick best # 2. how to evaluate a move: # *. if there's a blockage, -100 OK # *. if it's in the same direction, +1 OK? # *. if it has food and we're > 70 (modulo length?), +1 OK # *. if it has food and we're < 40 +10 OK # *. count "outs" - the more outs, the higher the score OK # 3. food override: if hungry, pick closest food and find the best move towards it. (?) (NOT DONE) # returns int # return a (good) move towards food # Map from key to AnSnake objects snakes = {} if __name__ == "__main__": server = Battlesnake() cherrypy.config.update({"server.socket_host": "0.0.0.0"}) cherrypy.config.update( {"server.socket_port": int(os.environ.get("PORT", "8080")),} ) print("Starting Battlesnake Server...") cherrypy.quickstart(server)
28.666667
108
0.589276
import os import random import cherrypy """ Dumb battlesnake server in python. For instructions see https://github.com/BattlesnakeOfficial/starter-snake-python/README.md """ move_names = ["up", "down", "left", "right"] moves_dx = [0, 0, -1, 1] moves_dy = [1, -1, 0, 0] """ Snake object. """ class AnSnake(object): def __init__(self, data): self.width = data["board"]["width"] self.height = data["board"]["height"] me = data["you"] self.id = me["id"] self.last_move = -1 self.food_target = None def make_board(self, data): board = [['.' for x in range(self.width)] for y in range(self.height)] # A fine foods amount. self.foods = [] for food in data["board"]["food"]: board[food["y"]][food["x"]] = "F" self.foods.append((food["y"], food["x"])) # Mark enemy segments with "e" for snake in data["board"]["snakes"]: body = snake["body"] for segment in body: board[segment["y"]][segment["x"]] = "e" head = snake["head"] # The head # TODO: get length somewhere board[head["y"]][head["x"]] = "E" # Mark my segments with "m" you = data["you"] for segment in you["body"]: board[segment["y"]][segment["x"]] = "m" head = you["head"] board[head["y"]][head["x"]] = "M" return board # Ideas: # 1. evaluate each direction, pick best # 2. how to evaluate a move: # *. if there's a blockage, -100 OK # *. if it's in the same direction, +1 OK? # *. if it has food and we're > 70 (modulo length?), +1 OK # *. if it has food and we're < 40 +10 OK # *. count "outs" - the more outs, the higher the score OK # 3. food override: if hungry, pick closest food and find the best move towards it. (?) (NOT DONE) # returns int def new_move_score(self, try_move_idx): dx = moves_dx[try_move_idx] dy = moves_dy[try_move_idx] movable = self.movable(self.y+dy, self.x+dx) if not movable: return -100 # TODO: if there's a head within this area and # their length is smaller than mine, higher score # TODO: if the LOOKAHEAD has a head, score it. # the new location is free. score = 0 if self.board[self.y+dy][self.x+dx] == 'F': if self.health < 40: score += 300 elif self.length < self.width: # Care about food a little more if we're short. score += 100 else: score += 25 outs = self.count_outs_ply(self.y+dy, self.x+dx, 4) print(f'Outs at {self.y+dy}, {self.x+dx}: {outs}') # 0 outs = bad score += outs #if try_move_idx == self.last_move: # score += 10 return score def count_outs_ply(self, y, x, ply): if ply == 0: return 0 outs = 0 temp = self.board[y][x] self.board[y][x] = 'x' # for row in range(self.height - 1, -1, -1): # print(''.join(self.board[row])) # print("") if self.movable(y+1,x): outs += ply outs += self.count_outs_ply(y+1, x, ply-1) if self.movable(y-1,x): outs += ply outs += self.count_outs_ply(y-1, x, ply-1) if self.movable(y,x+1): outs += ply outs += self.count_outs_ply(y, x+1, ply-1) if self.movable(y,x-1): outs += ply outs += self.count_outs_ply(y, x-1, ply-1) self.board[y][x] = temp return outs def movable(self, y, x): movable = True movable = movable and (y >= 0) movable = movable and (x >= 0) movable = movable and (y < self.height) movable = movable and (x < self.width) if not movable: return False return self.board[y][x] in ('.', 'F') # return a (good) move towards food def move_towards_food(self): if not self.foods: self.food_target = None return '' if not self.food_target: # 1. pick random food self.food_target = random.choice(self.foods) print("going towards food", str(self.food_target)) # 2. move towards it, if there's a good_move dx = self.food_target[1] - self.x dy = self.food_target[0] - self.y # right if dx > 0 and self.good_move(3): print("trying to go right towards food") return move_names[3] # left if dx < 0 and self.good_move(2): print("trying to go left towards food") return move_names[2] # down if dy < 0 and self.good_move(1): print("trying to go down towards food") return move_names[1] # up if dy > 0 and self.good_move(0): print("trying to go up towards food") return move_names[0] print("no good move towards food target") self.food_target = None # 3. TODO: if no good move, try a different food return '' def move(self, data): self.me = data["you"] self.x = self.me["head"]["x"] self.y = self.me["head"]["y"] self.health = self.me["health"] self.length = self.me["length"] self.board = self.make_board(data) #for row in range(self.height - 1, -1, -1): # print(''.join(self.board[row])) # Pick a direction to move in, unless it's bad if self.last_move == -1: self.last_move = 0 move = '' # if hungry, move towards food #if self.health < 40: #move = self.move_towards_food() if move == '': # Find best move best = -200 best_idx = self.last_move for idx in range(0, 4): score = self.new_move_score(idx) print(f'Eval direction {move_names[idx]} score {score}') if score > best: best = score best_idx = idx move = move_names[best_idx] print(f'Picked direction {move} score {best}') self.last_move = best_idx if move == '': print("Could not find a good move!") move = "up" return move # Map from key to AnSnake objects snakes = {} class Battlesnake(object): @cherrypy.expose @cherrypy.tools.json_out() def index(self): # This function is called when you register your Battlesnake on play.battlesnake.com # It controls your Battlesnake appearance and author permissions. # TIP: If you open your Battlesnake URL in browser you should see this data return { "APIVersion": "1", "Author": "dplassgit", "Color": "#0000ff", "Head": "bendr", "Tail": "pixel", } @cherrypy.expose @cherrypy.tools.json_in() def start(self): # This function is called everytime your snake is entered into a game. data = cherrypy.request.json snake = AnSnake(data) # extract id from data snakes[snake.id] = snake print(f"START id {snake.id}") return "ok" @cherrypy.expose @cherrypy.tools.json_in() @cherrypy.tools.json_out() def move(self): data = cherrypy.request.json turn = data["turn"] me = data["you"] snake_id = me["id"] snake = snakes[snake_id] if snake: move = snake.move(data) else: print(f"WTF UNKNOWN SNAKE ID {snake_id} RECEIVED") snake = AnSnake(data) # extract id from data snakes[snake.id] = snake move = snake.move(data) print(f"MOVE: {move} at {snake.x}, {snake.y} health {snake.health} length {snake.length} turn {turn}") return {"move": move} @cherrypy.expose @cherrypy.tools.json_in() def end(self): data = cherrypy.request.json me = data["you"] snake_id = me["id"] del snakes[snake_id] snakes[snake_id] = None print("END") return "ok" if __name__ == "__main__": server = Battlesnake() cherrypy.config.update({"server.socket_host": "0.0.0.0"}) cherrypy.config.update( {"server.socket_port": int(os.environ.get("PORT", "8080")),} ) print("Starting Battlesnake Server...") cherrypy.quickstart(server)
6,073
323
217
d0402f1bee556b094723200b8f549a690fa5a0f4
174
py
Python
modules/jenkins/chuck_module.py
msabramo/django-chuck
bbef6171c9b3738460dc05cb77e65ba88fcc5ad8
[ "BSD-2-Clause" ]
1
2020-05-29T04:27:50.000Z
2020-05-29T04:27:50.000Z
modules/jenkins/chuck_module.py
msabramo/django-chuck
bbef6171c9b3738460dc05cb77e65ba88fcc5ad8
[ "BSD-2-Clause" ]
null
null
null
modules/jenkins/chuck_module.py
msabramo/django-chuck
bbef6171c9b3738460dc05cb77e65ba88fcc5ad8
[ "BSD-2-Clause" ]
null
null
null
depends = ["unittest"] description = """ Plug and play integration with the Jenkins Coninuous Integration server. For more information, visit: http://www.jenkins-ci.org/ """
24.857143
72
0.747126
depends = ["unittest"] description = """ Plug and play integration with the Jenkins Coninuous Integration server. For more information, visit: http://www.jenkins-ci.org/ """
0
0
0
6226e87e91e3d07d4938eccf6653e2af46cf982f
5,595
py
Python
plastic_agent/agent/replay_buffer.py
pedMatias/matias_hfo
6d88e1043a1455f5c1f6cc11b9380869772f4176
[ "MIT" ]
1
2021-06-03T20:03:50.000Z
2021-06-03T20:03:50.000Z
plastic_agent/agent/replay_buffer.py
pedMatias/matias_hfo
6d88e1043a1455f5c1f6cc11b9380869772f4176
[ "MIT" ]
null
null
null
plastic_agent/agent/replay_buffer.py
pedMatias/matias_hfo
6d88e1043a1455f5c1f6cc11b9380869772f4176
[ "MIT" ]
1
2021-03-14T01:22:33.000Z
2021-03-14T01:22:33.000Z
import numpy as np import os import random from copy import copy from typing import List import pickle from agents.plastic_dqn_v1 import config REPLAY_MEMORY_SIZE = 50_000 # How many last steps to keep for model training MIN_REPLAY_MEMORY_SIZE = 1_000 # Minimum number of steps in a memory to class LearnBuffer: """ Buffer used during game. Saves and decides which steps to save """ @classmethod class ExperienceBuffer: """ Contains all the experience the agent retain during training """ @classmethod @classmethod def store_transition(self, curr_st: np.ndarray, action_idx: int, reward: int, new_st: np.ndarray, done: int): """ Adds step's data to a memory replay array (observation space, action, reward, new observation space, done) """ transition = np.array([curr_st, action_idx, reward, new_st, done]) self._data.append(transition) def store_episode(self, transitions: List[Transition]): """ Adds step's data to a memory replay array (observation space, action, reward, new observation space, done) """ if len(transitions) > 0: for transition in transitions: self._data.append(transition.to_tuple())
35.188679
80
0.597319
import numpy as np import os import random from copy import copy from typing import List import pickle from agents.plastic_dqn_v1 import config REPLAY_MEMORY_SIZE = 50_000 # How many last steps to keep for model training MIN_REPLAY_MEMORY_SIZE = 1_000 # Minimum number of steps in a memory to class Transition: def __init__(self, obs: np.ndarray, act: int, reward: int, new_obs: np.ndarray, done: bool, correct_action: bool): self.obs = obs self.act = act self.reward = reward self.new_obs = new_obs self.done = done # Auxiliar var: self.correct_action = correct_action def to_tuple(self) -> tuple: return tuple( [self.obs, self.act, self.reward, self.new_obs, self.done]) class LearnBuffer: """ Buffer used during game. Saves and decides which steps to save """ def __init__(self, buffer: list = None): if buffer is None: buffer = list() self.buffer = buffer @classmethod def load_from_file(cls, file_name: str): with open(file_name, 'rb') as f: data = pickle.load(f) if isinstance(data, LearnBuffer): return data elif isinstance(data, list): return cls(data) else: raise ValueError(f"Unexpected type of {file_name}") def to_list(self): return self.buffer def parse_episode(self, episodes_transitions: List[Transition], verbose: bool = False) -> list: if len(episodes_transitions) == 0: return [] # Remove last actions without ball: last_reward = copy(episodes_transitions[-1].reward) for idx in range(len(episodes_transitions) - 1, -1, -1): # Has ball: if episodes_transitions[idx].obs[5] > 0: episodes_transitions = episodes_transitions[:idx + 1] break # No ball: elif episodes_transitions[idx].obs[5] < 0: pass else: raise ValueError("Features has ball, wrong value!!") else: return [] # selected wrong action?: if episodes_transitions[-1].correct_action is False and last_reward > 0: episodes_transitions[-1].reward = -1 else: episodes_transitions[-1].reward = last_reward episodes_transitions[-1].done = True if verbose and random.random() > 0.99: print("\n ** Transictions:") #for el in episodes_transitions: #print_transiction(el.to_tuple(), self.actions, simplex=True) print('**') return episodes_transitions def save_episode(self, episode: List[Transition], verbose: bool = True): parsed_episode = self.parse_episode(episode, verbose) if parsed_episode: self.buffer += parsed_episode def export_to_pickle(self, file_name: str): with open(file_name, 'wb') as f: pickle.dump(self.buffer, f) class ExperienceBuffer: """ Contains all the experience the agent retain during training """ def __init__(self, data: list): self._data = data @classmethod def create_by_merge_files(cls, directory: str, team_name: str): print("[Experience Buffer] Merging smaller files") step = 0 exp_episodes = list() while True: # Check if file exists: experience_file = config.EXPERIENCE_BUFFER_FORMAT.format(step=step) data_file = os.path.join(directory, team_name, experience_file) if not os.path.isfile(data_file): break # Load Learn Buffer: learn_buffer = LearnBuffer.load_from_file(data_file) exp_episodes += learn_buffer.to_list() # Inc step: print(f"Add stage {step} data. SIZE={len(learn_buffer.to_list())}") step += 1 experience_buffer = cls(exp_episodes) experience_buffer.save_to_pickle(dir=directory, team_name=team_name) return experience_buffer @classmethod def load(cls, file_path: str): print("[Experience Buffer] Loading") with open(file_path, "rb") as fp: data = pickle.load(fp) if isinstance(data, np.ndarray): data = data.tolist() return cls(data) def to_list(self) -> list: return self._data def to_array(self) -> np.ndarray: return np.array(self._data) def save_to_pickle(self, dir: str, team_name: str): experience_file = config.REPLAY_BUFFER_FORMAT.format( base_dir=dir, team_name=team_name) with open(experience_file, 'wb') as f: pickle.dump(self.to_array(), f) def store_transition(self, curr_st: np.ndarray, action_idx: int, reward: int, new_st: np.ndarray, done: int): """ Adds step's data to a memory replay array (observation space, action, reward, new observation space, done) """ transition = np.array([curr_st, action_idx, reward, new_st, done]) self._data.append(transition) def store_episode(self, transitions: List[Transition]): """ Adds step's data to a memory replay array (observation space, action, reward, new observation space, done) """ if len(transitions) > 0: for transition in transitions: self._data.append(transition.to_tuple())
3,887
-4
435
9666ad76af020f393f10d46a89a360cc326f4eb9
1,312
py
Python
ucvm_v21_10/make_mesh_model.py
SCECcode/ucvm_docker
f472fb0ff512a15c557d88734dcda3ec6a550cc3
[ "BSD-3-Clause" ]
1
2022-03-08T00:12:08.000Z
2022-03-08T00:12:08.000Z
ucvm_v21_10/make_mesh_model.py
SCECcode/ucvm_docker
f472fb0ff512a15c557d88734dcda3ec6a550cc3
[ "BSD-3-Clause" ]
1
2021-12-10T20:35:20.000Z
2021-12-10T20:35:20.000Z
ucvm_v21_10/make_mesh_model.py
SCECcode/ucvm_docker
f472fb0ff512a15c557d88734dcda3ec6a550cc3
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 import sys import os import time # # Generate the master out.grid # Create a 3M point file of lat/lons - and write to ASCII file called out.grd. # This file will be used as input to ucvm_query for medium scale test for images # if not os.path.exists("out.grd"): print("Creating grd.out file.") cmd="/app/ucvm/utilities/makegrid" print(cmd) os.system(cmd) # # valid_model_strings = {"bbp1d":1, "cca":1, "wfcvm":1, "albacore":1, "cvlsu":1, "ivlsu":1, "cvms":1, "cvmh":1, "cvmsi":1, "cvms5":1} # Check model parameter if len (sys.argv) < 2: print("Input format: % make_mesh_model.py cvms") sys.exit() else: model_string = sys.argv[1] # # Check if model is valid print("Model string: {}".format(model_string)) try: valid = valid_model_strings[model_string.strip()] except: print("Unknown model: {}".format(model_string)) for key in valid_model_strings.items(): print(key, valid_model_strings[key]) sys.exit() # # Call each of the installed crustal models and time how # long it takes to populate the models # # # start = time.time() cmd="ucvm_query -f /app/ucvm/conf/ucvm.conf -m %s < out.grd > mesh_%s.out"%(model_string,model_string) print(cmd) os.system(cmd) end = time.time() print("Mesh extraction for model {} : {} seconds".format(model_string,(end-start)))
23.017544
102
0.699695
#!/usr/bin/env python3 import sys import os import time # # Generate the master out.grid # Create a 3M point file of lat/lons - and write to ASCII file called out.grd. # This file will be used as input to ucvm_query for medium scale test for images # if not os.path.exists("out.grd"): print("Creating grd.out file.") cmd="/app/ucvm/utilities/makegrid" print(cmd) os.system(cmd) # # valid_model_strings = {"bbp1d":1, "cca":1, "wfcvm":1, "albacore":1, "cvlsu":1, "ivlsu":1, "cvms":1, "cvmh":1, "cvmsi":1, "cvms5":1} # Check model parameter if len (sys.argv) < 2: print("Input format: % make_mesh_model.py cvms") sys.exit() else: model_string = sys.argv[1] # # Check if model is valid print("Model string: {}".format(model_string)) try: valid = valid_model_strings[model_string.strip()] except: print("Unknown model: {}".format(model_string)) for key in valid_model_strings.items(): print(key, valid_model_strings[key]) sys.exit() # # Call each of the installed crustal models and time how # long it takes to populate the models # # # start = time.time() cmd="ucvm_query -f /app/ucvm/conf/ucvm.conf -m %s < out.grd > mesh_%s.out"%(model_string,model_string) print(cmd) os.system(cmd) end = time.time() print("Mesh extraction for model {} : {} seconds".format(model_string,(end-start)))
0
0
0
b1ac4c296c0679ac6f18ecdf329130a62edd9cd2
1,633
py
Python
lib/pulsemodel/sigproc/scripts/bndspec2spec.py
qingyundou/tacotron_qdou
aca014e8ea73bbab617029b81368cee235f47ce2
[ "MIT" ]
2
2020-12-16T12:53:52.000Z
2021-09-18T06:52:05.000Z
lib/pulsemodel/sigproc/scripts/bndspec2spec.py
qingyundou/tacotron_qdou
aca014e8ea73bbab617029b81368cee235f47ce2
[ "MIT" ]
null
null
null
lib/pulsemodel/sigproc/scripts/bndspec2spec.py
qingyundou/tacotron_qdou
aca014e8ea73bbab617029b81368cee235f47ce2
[ "MIT" ]
1
2021-03-05T03:44:39.000Z
2021-03-05T03:44:39.000Z
#!/usr/bin/python ''' Copyright(C) 2016 Engineering Department, University of Cambridge, UK. License Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Author Gilles Degottex <gad27@cam.ac.uk> ''' import sys import argparse import numpy as np sys.path.append('/home/degottex/Research/CUED/Code') import sigproc as sp if __name__ == "__main__" : argpar = argparse.ArgumentParser() argpar.add_argument("bndspecfile", default=None, help="Input spectrum file") argpar.add_argument("--nbbands", type=int, help="Number of bands in the warped spectral representation") argpar.add_argument("--dftlen", default=4096, type=int, help="DFT size for the output spectrum") argpar.add_argument("--fs", default=16000, type=int, help="Sampling frequency[Hz]") argpar.add_argument("specfile", default=None, help="Output warped spectrum file") args, unknown = argpar.parse_known_args() BNDSPEC = np.fromfile(args.bndspecfile, dtype=np.float32) BNDSPEC = BNDSPEC.reshape((-1, args.nbbands)) SPEC = np.exp(sp.fwbnd2linbnd(BNDSPEC, args.fs, args.dftlen)) SPEC.astype('float32').tofile(args.specfile)
37.113636
108
0.736681
#!/usr/bin/python ''' Copyright(C) 2016 Engineering Department, University of Cambridge, UK. License Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Author Gilles Degottex <gad27@cam.ac.uk> ''' import sys import argparse import numpy as np sys.path.append('/home/degottex/Research/CUED/Code') import sigproc as sp if __name__ == "__main__" : argpar = argparse.ArgumentParser() argpar.add_argument("bndspecfile", default=None, help="Input spectrum file") argpar.add_argument("--nbbands", type=int, help="Number of bands in the warped spectral representation") argpar.add_argument("--dftlen", default=4096, type=int, help="DFT size for the output spectrum") argpar.add_argument("--fs", default=16000, type=int, help="Sampling frequency[Hz]") argpar.add_argument("specfile", default=None, help="Output warped spectrum file") args, unknown = argpar.parse_known_args() BNDSPEC = np.fromfile(args.bndspecfile, dtype=np.float32) BNDSPEC = BNDSPEC.reshape((-1, args.nbbands)) SPEC = np.exp(sp.fwbnd2linbnd(BNDSPEC, args.fs, args.dftlen)) SPEC.astype('float32').tofile(args.specfile)
0
0
0
1ae5a32e889489d3ae8d914a5b1ff679ee750c75
750
py
Python
measurements/migrations/0011_testrun_ordering.py
nat64check/zaphod_backend
b92191950825e1a4fd8c34207c8491d587cfb61f
[ "BSD-3-Clause" ]
1
2017-11-14T16:22:38.000Z
2017-11-14T16:22:38.000Z
measurements/migrations/0011_testrun_ordering.py
sjm-steffann/nat64check_zaphod_backend
b92191950825e1a4fd8c34207c8491d587cfb61f
[ "BSD-3-Clause" ]
5
2019-12-03T05:36:07.000Z
2021-06-25T15:20:04.000Z
measurements/migrations/0011_testrun_ordering.py
sjm-steffann/nat64check_zaphod_backend
b92191950825e1a4fd8c34207c8491d587cfb61f
[ "BSD-3-Clause" ]
null
null
null
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• # Copyright (c) 2018, S.J.M. Steffann. This software is licensed under the BSD # 3-Clause License. Please see the LICENSE file in the project root directory. # •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• # Generated by Django 2.0.7 on 2018-07-23 14:47 from django.db import migrations
34.090909
113
0.516
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• # Copyright (c) 2018, S.J.M. Steffann. This software is licensed under the BSD # 3-Clause License. Please see the LICENSE file in the project root directory. # •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• # Generated by Django 2.0.7 on 2018-07-23 14:47 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('measurements', '0010_instancerunmessage_source'), ] operations = [ migrations.AlterModelOptions( name='testrun', options={'ordering': ('requested',), 'verbose_name': 'test run', 'verbose_name_plural': 'test runs'}, ), ]
0
321
23
70e4742cf86e95b61a1b7955657907327064055a
5,915
py
Python
historia/intercept/select.py
thatdevsherry/historia
232026b38479fd855c1d8eff543d95e144bfdfe2
[ "MIT" ]
10
2020-12-12T23:00:27.000Z
2022-02-09T18:01:40.000Z
historia/intercept/select.py
ShehriyarQureshi/historia
feb48e1489563e4770d32d4e6465d3931407a345
[ "MIT" ]
3
2018-11-01T11:36:55.000Z
2018-11-09T04:43:54.000Z
historia/intercept/select.py
ShehriyarQureshi/temporalite
feb48e1489563e4770d32d4e6465d3931407a345
[ "MIT" ]
null
null
null
""" Copyright (c) 2019 Muhammad Shehriyar Qureshi Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import re
33.607955
131
0.647844
""" Copyright (c) 2019 Muhammad Shehriyar Qureshi Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import re class TemporalSelectQueryBuilder: def __init__( self, query, temporal_query=None, temporal_clause=None, table_name=None, temporal_table_name=None, selected_column=None, ): self.query = query self.temporal_query = temporal_query self.temporal_clause = temporal_clause self.table_name = table_name self.temporal_table_name = temporal_table_name self.selected_column = selected_column self.set_table_names() self.set_selected_column() self.set_temporal_clause() self.build_temporal_query() def get_regex_match(query, regex): """ Given a string and regex, return the first match. """ pattern = re.compile(regex) matches = pattern.finditer(query) for match in matches: return str.upper(match.group(0)) def set_table_names(self): TemporalSelectQueryBuilder.set_original_table_name(self) TemporalSelectQueryBuilder.set_temporal_table_name(self) def set_original_table_name(self): original_query = self.query match = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=from )[^ ]+" ) self.table_name = str.lower(match) def set_temporal_table_name(self): self.temporal_table_name = self.table_name + "_history" def set_selected_column(self): original_query = self.query match = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=select )[^ ]+" ) self.selected_column = str.lower(match) def set_temporal_clause(self): original_query = self.query match = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(as of|to|between|contained in)" ) self.temporal_clause = str.lower(match) def build_temporal_query(self): if self.temporal_clause == "as of": TemporalSelectQueryBuilder.as_of_query_builder(self) elif self.temporal_clause == "to": TemporalSelectQueryBuilder.from_to_query_builder(self) elif self.temporal_clause == "between": TemporalSelectQueryBuilder.between_and_query_builder(self) elif self.temporal_clause == "contained in": TemporalSelectQueryBuilder.contained_in_query_builder(self) else: raise Exception( "You entered a wrong temporal keyword or maybe you didn't enter one. Either way, you weren't supposed to see this." ) def as_of_query_builder(self): original_query = self.query entered_time = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=as of )[^ ]+" ) column = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=select )[^ ]+" ) self.temporal_query = ( "select {} from {} where valid_from <= {} and valid_to > {}".format( column, self.temporal_table_name, entered_time, entered_time ) ) def from_to_query_builder(self): original_query = self.query start_time = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<={} from )(.*)(?= to)".format(self.temporal_table_name) ) end_time = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=to ).*" ) self.temporal_query = ( "select {} from {} where valid_from < {} and valid_to > {}".format( self.selected_column, self.temporal_table_name, end_time, start_time ) ) def between_and_query_builder(self): original_query = self.query start_time = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=between )(.*)(?= and)" ) end_time = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=and )(.*)" ) self.temporal_query = ( "select {} from {} where valid_from <= {} and valid_to > {}".format( self.selected_column, self.temporal_table_name, end_time, start_time ) ) def contained_in_query_builder(self): original_query = self.query start_time = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=\()(.*)(?=,)" ) end_time = TemporalSelectQueryBuilder.get_regex_match( original_query, r"(?<=,)(.*)[^\)]+" ).strip(" ") self.temporal_query = ( "select {} from {} where valid_from >= {} and valid_to <= {}".format( self.selected_column, self.temporal_table_name, start_time, end_time ) )
4,218
585
23
71fc454f62555f12d5b2e0ba7a796fefb4f06480
3,716
py
Python
app/api/views.py
kwahome/pycon-monitoring-workshop
24ff177352fb13b686ab46a96c30066556d2e92d
[ "MIT" ]
7
2018-10-25T08:31:05.000Z
2021-12-31T11:41:50.000Z
app/api/views.py
nknganda/pycon-monitoring-workshop
24ff177352fb13b686ab46a96c30066556d2e92d
[ "MIT" ]
null
null
null
app/api/views.py
nknganda/pycon-monitoring-workshop
24ff177352fb13b686ab46a96c30066556d2e92d
[ "MIT" ]
5
2018-10-24T10:18:24.000Z
2019-09-13T09:15:59.000Z
from annoying.functions import get_object_or_None from app.api.serializers import ( SendMessageRequestSerializer ) from app.core.models import MessageRequest from app.core.routing import ROUTING_REGISTRY from app.core.views import ( BaseAPIView, GET, POST ) from rest_framework import status class HealthCheckView(BaseAPIView): """ View that monitoring services can use to check on the 'aliveness' of a running messaging service. """ allowed_methods = (GET,) operation_tag = 'health_check' class SendMessageView(BaseAPIView): """ Send message API view """ allowed_methods = (POST,) validator = SendMessageRequestSerializer operation_tag = 'send_message'
34.728972
75
0.592842
from annoying.functions import get_object_or_None from app.api.serializers import ( SendMessageRequestSerializer ) from app.core.models import MessageRequest from app.core.routing import ROUTING_REGISTRY from app.core.views import ( BaseAPIView, GET, POST ) from rest_framework import status class HealthCheckView(BaseAPIView): """ View that monitoring services can use to check on the 'aliveness' of a running messaging service. """ allowed_methods = (GET,) operation_tag = 'health_check' def get_handler(self, request, *args, **kwargs): return self.respond() class SendMessageView(BaseAPIView): """ Send message API view """ allowed_methods = (POST,) validator = SendMessageRequestSerializer operation_tag = 'send_message' def post_handler(self, request, *args, **kwargs): self._init_variables() self._init_message_request() duplicate, response = self.duplicate_check() return self.route_task() if not duplicate else response def _init_variables(self): self.message_id = self.req_data['messageId'] self.channel = self.req_data["channel"] self.message_type = self.req_data["messageType"] self.sender_id = self.req_data["senderId"] self.recipients = self.req_data["recipients"] self.message = self.req_data["message"] self.priority = self.req_data["priority"] self.callback = self.req_data.get("callback") def _init_message_request(self): self.message_obj = MessageRequest( message_id=self.message_id, data=dict( message=dict( sender_id=self.sender_id, # store as comma separated string of recipients recipients=",".join(self.recipients), message_type=self.message_type, channel=self.channel, message=self.message, priority=self.priority, ), status=dict(), callback=dict(url=self.callback) ) ) def duplicate_check(self): result = False, None if get_object_or_None(MessageRequest, message_id=self.message_id): message = dict( detail="A message with messageId=`{0}` has already been " "received".format(self.message_id) ) result = True, self.http_conflicting_request(data=message) return result def route_task(self): tag = "routing" routing_handler = ROUTING_REGISTRY.get(self.message_type, {}).get( self.channel) if routing_handler is None: error_message = dict( details="channel `{0}` and message type `{1}` not " "supported".format(self.channel, self.message_type) ) self.logger.info( event='{0}_error'.format(tag), message_obj=error_message, handler=self.__class__.__name__, ) response = self.http_bad_request(data=error_message) else: self.message_obj.save() self.logger.info( event='{0}_start'.format(tag), message_id=self.message_id, handler=self.__class__.__name__, ) routing_handler(self.message_obj).route_task() self.logger.info( event='{0}_end'.format(tag), message_id=self.message_id, handler=self.__class__.__name__, ) response = self.http_request_accepted() return response
2,840
0
162
ed185ed349862425c27643f54e86a86df7452b89
643
py
Python
main.py
BdEINSALyon/planning-downloader
6f0875702547d4746c5c9d766ee2922e0b2a26c5
[ "MIT" ]
null
null
null
main.py
BdEINSALyon/planning-downloader
6f0875702547d4746c5c9d766ee2922e0b2a26c5
[ "MIT" ]
null
null
null
main.py
BdEINSALyon/planning-downloader
6f0875702547d4746c5c9d766ee2922e0b2a26c5
[ "MIT" ]
null
null
null
import requests if __name__ == '__main__': main()
23.814815
82
0.491446
import requests def main(): PHPSESSID = 'oflbncfl7e2pk5rhhir703ruf3' URL = 'http://planning.gala.bde-insa-lyon.fr/orga/{id}/print' orgas = [ {'id': 1, 'name': 'SUPER ADMIN'} ] for o in orgas: print(o['name']) r = requests.get(URL.format(id=o['id']), cookies={'PHPSESSID': PHPSESSID}) path = o['name'] + '.pdf' if r.status_code == 200: print('OK') with open(path, 'wb') as f: for chunk in r: f.write(chunk) print('WRITTEN') else: print('ERROR') if __name__ == '__main__': main()
564
0
23
af679ac4a4259365b979a1379db022e999b02ea8
2,038
py
Python
chap08/sentiment.py
wtsnjp/nlp100
8beaabd0e8598e8a0cbd1eca690281ce5d783e93
[ "Unlicense" ]
3
2018-01-18T09:56:39.000Z
2019-03-14T05:54:11.000Z
chap08/sentiment.py
WatsonDNA/nlp100
8beaabd0e8598e8a0cbd1eca690281ce5d783e93
[ "Unlicense" ]
null
null
null
chap08/sentiment.py
WatsonDNA/nlp100
8beaabd0e8598e8a0cbd1eca690281ce5d783e93
[ "Unlicense" ]
1
2020-05-12T14:24:00.000Z
2020-05-12T14:24:00.000Z
# # Class SentimentSentences: sentences for polarity analysis # from nltk import stem from collections import Counter from bisect import bisect_left from sklearn.linear_model import LogisticRegression from sklearn.feature_extraction.text import CountVectorizer
30.878788
73
0.540236
# # Class SentimentSentences: sentences for polarity analysis # from nltk import stem from collections import Counter from bisect import bisect_left from sklearn.linear_model import LogisticRegression from sklearn.feature_extraction.text import CountVectorizer class SentimentSentences: def __init__(self, sentences): sl = list(map(self.__sentence_dict, [s.split() for s in sentences])) self.polar = [s[0] for s in sl] self.src = [s[1] for s in sl] self.vec = CountVectorizer() @staticmethod def __sentence_dict(s): p, src = 0, ' '.join(s[1:]) if s[0][0] == '+': p = 1 return p, src def feature_extraction(self, fr=99.9, uniq=False): """remove stop/unique words and stemming""" sl = [s.split() for s in self.src] stemmer = stem.PorterStemmer() cn = Counter([w for s in sl for w in list(set(s[1:]))]) st = len(sl) * fr / 100 self.stopwords = sorted([k for k, v in cn.items() if v > st]) if uniq: self.uniqwords = sorted([k for k, v in cn.items() if v == 1]) self.feature = [' '.join([stemmer.stem(w) for w in s if self.__is_useful(w)]) for s in sl] def __is_useful(self, w): ss = self.stopwords i, b = bisect_left(ss, w), True if i != len(ss) and ss[i] == w: b = False if hasattr(self, 'uniqwords'): us = self.uniqwords i = bisect_left(us, w) if i != len(us) and us[i] == w: b = False return b def train(self, eta=0.6): lr = LogisticRegression(C=eta) X = self.vec.fit_transform(self.feature) self.model = lr.fit(X, self.polar) def predict(self, text, th=50): vec = self.vec.transform([text]) pr = self.model.predict_proba(vec)[0] if pr[1] > th/100: return '+1', pr[1] else: return '-1', pr[0]
990
762
23
ebc20eb2ee4c18598edb19954bc42f8e40aa130d
196
py
Python
26-50/30.py
tonyyzy/ProjectEuler
f52de2f931ebd4df2020e32d12062866b1586e72
[ "MIT" ]
null
null
null
26-50/30.py
tonyyzy/ProjectEuler
f52de2f931ebd4df2020e32d12062866b1586e72
[ "MIT" ]
null
null
null
26-50/30.py
tonyyzy/ProjectEuler
f52de2f931ebd4df2020e32d12062866b1586e72
[ "MIT" ]
null
null
null
n = 10 result = [] while n < 1000000: a = str(n) temp = 0 for i in a: temp += int(i) ** 5 if temp == n: result.append(n) n += 1 print(sum(result))
15.076923
28
0.428571
n = 10 result = [] while n < 1000000: a = str(n) temp = 0 for i in a: temp += int(i) ** 5 if temp == n: result.append(n) n += 1 print(sum(result))
0
0
0
dec766a3420a14372b2ae35c3a7c1574968c32eb
35
py
Python
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/gumtree/tree/data/loop/for/in_4.py
JetBrains-Research/ast-transformations
0ab408af3275b520cc87a473f418c4b4dfcb0284
[ "MIT" ]
8
2021-01-19T21:15:54.000Z
2022-02-23T19:16:25.000Z
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/gumtree/tree/data/loop/for/in_4.py
JetBrains-Research/ast-transformations
0ab408af3275b520cc87a473f418c4b4dfcb0284
[ "MIT" ]
4
2020-11-17T14:28:25.000Z
2022-02-24T07:54:28.000Z
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/gumtree/tree/data/loop/for/in_4.py
nbirillo/ast-transformations
717706765a2da29087a0de768fc851698886dd65
[ "MIT" ]
1
2022-02-23T19:16:30.000Z
2022-02-23T19:16:30.000Z
for i in range(1, 100, 3): pass
17.5
26
0.571429
for i in range(1, 100, 3): pass
0
0
0
98aa14ecbcd5b5459adc0ed27c86aa5535c9c1d7
3,504
py
Python
django_local_apps/ufs_local_obj.py
weijia/django-local-apps
5befc41c8ac9625e98c1023257a270a57de700dd
[ "BSD-3-Clause" ]
null
null
null
django_local_apps/ufs_local_obj.py
weijia/django-local-apps
5befc41c8ac9625e98c1023257a270a57de700dd
[ "BSD-3-Clause" ]
null
null
null
django_local_apps/ufs_local_obj.py
weijia/django-local-apps
5befc41c8ac9625e98c1023257a270a57de700dd
[ "BSD-3-Clause" ]
null
null
null
import datetime import os from django.utils import timezone from tzlocal import get_localzone from obj_sys import obj_tools from obj_sys.models_ufs_obj import UfsObj from tagging.models import Tag
32.747664
107
0.66895
import datetime import os from django.utils import timezone from tzlocal import get_localzone from obj_sys import obj_tools from obj_sys.models_ufs_obj import UfsObj from tagging.models import Tag class ObjectIsNotAssigned(Exception): pass class UfsObjSaverBase(object): def __init__(self, user): super(UfsObjSaverBase, self).__init__() self.user = user self.source = UfsObj.SOURCE_WEB_POST self.last_modified = None self.parent = None self.create_param = None self.obj = None self.ufs_url = None self.full_path = None self.tag_app = None def filter_or_create(self): obj_filter = self.get_filter() if not obj_filter.exists(): self.obj, is_created = self.get_or_create() else: self.obj = obj_filter[0] return obj_filter def get_or_create(self): obj_filter = self.get_filter() if obj_filter.exists(): self.obj = obj_filter[0] return self.obj, False self.create_param = ({"ufs_url": self.ufs_url, "parent": self.parent, "user": self.user, "full_path": self.full_path, "ufs_obj_type": self.ufs_obj_type, "source": self.source}) if not (self.last_modified is None): self.create_param["last_modified"] = self.last_modified self.obj, is_created = UfsObj.objects.get_or_create(**self.create_param) return self.obj, is_created def append_tags(self, tag_name): if self.obj is None: raise ObjectIsNotAssigned Tag.objects.add_tag(self.obj, tag_name, tag_app=self.tag_app) def add_description(self, description): self.obj.descriptions.add(description) # self.obj.save() class UfsLocalObjSaver(UfsObjSaverBase): def __init__(self, user, ufs_obj_type=UfsObj.TYPE_UFS_OBJ): super(UfsLocalObjSaver, self).__init__(user) self.ufs_obj_type = ufs_obj_type def init_with_qt_url(self, qt_file_url): self.init_with_full_path(obj_tools.get_full_path_for_local_os(qt_file_url)) def init_with_full_path(self, full_path): self.full_path = full_path self.ufs_url = obj_tools.get_ufs_url_for_local_path(self.full_path) tz = get_localzone() self.last_modified = tz.localize(datetime.datetime.fromtimestamp(os.path.getmtime(self.full_path))) def get_filter(self): return UfsObj.objects.filter(full_path=self.full_path, user=self.user) def get_or_create(self): obj, is_created = super(UfsLocalObjSaver, self).get_or_create() if os.path.isdir(self.full_path): self.__append_folder_tag() return obj, is_created def __append_folder_tag(self): self.append_tags("folder") def update_from_local_path(self): self.get_filter().update(last_modified=self.last_modified) @staticmethod def get_full_path_from_qt_url(url): return url.replace("file:///", "") @staticmethod def get_qt_url_from_full_path(full_path): return "file:///%s" % full_path class UfsUrlObj(UfsObjSaverBase): def __init__(self, web_url, user, ufs_obj_type=UfsObj.TYPE_UFS_OBJ): super(UfsUrlObj, self).__init__(user) self.full_path = None self.ufs_url = web_url self.ufs_obj_type = ufs_obj_type def get_filter(self): return UfsObj.objects.filter(ufs_url=self.ufs_url, user=self.user)
2,679
343
279
2eda976df95eecd7f8b76221d9aed0bdf80df510
5,946
py
Python
app/debts.py
rkarell/pay-back
1cc91c58bca3b43e75770727c19c5669a46563d9
[ "MIT" ]
null
null
null
app/debts.py
rkarell/pay-back
1cc91c58bca3b43e75770727c19c5669a46563d9
[ "MIT" ]
null
null
null
app/debts.py
rkarell/pay-back
1cc91c58bca3b43e75770727c19c5669a46563d9
[ "MIT" ]
null
null
null
# When merging two debts, if the resulting debt is negative, this function is used to swap the creditor and debtor, so that the sum of debt is always positive
38.61039
186
0.55819
class Debts(object): def __init__(self): self.debts = [] self.participants = [] def addTransactions(self, transactions): for transaction in transactions.transactions: self.addParticipant(transaction.buyer) for participant in transaction.participants: self.addParticipant(participant) self.addDebt(participant, transaction.buyer, transaction.share) def addDebt(self, debtFrom, debtTo, amount): newDebt = Debt(debtFrom, debtTo, amount) debt = self.getDebt(debtFrom, debtTo) if debt is None: self.debts.append(newDebt) else: debt.merge(newDebt) if debt.amount == 0: self.debts.remove(debt) def getDebt(self, debtFrom, debtTo): for debt in self.debts: if ((debt.debtFrom == debtFrom) and (debt.debtTo == debtTo)) or ((debt.debtFrom == debtTo) and (debt.debtTo == debtFrom)): return debt return None def setDebt(self, debtFrom, debtTo, amount): debt = self.getDebt(debtFrom, debtTo) if debt is None: if amount > 0: self.addDebt(debtFrom, debtTo, amount) else: if amount == 0: self.debts.remove(debt) else: debt.debtFrom = debtFrom debt.debtTo = debtTo debt.amount = amount def getDebtsFromParticipant(self, participant): debts = [] for debt in self.debts: if debt.debtFrom == participant: debts.append(debt) return debts def addParticipant(self, participant): if not participant in self.participants: self.participants.append(participant) def optimizeAlgorithm1(self): reduced = True while reduced: reduced = False for debt in self.debts: sequentialDebts = self.getDebtsFromParticipant(debt.debtTo) if len(sequentialDebts) > 0: debt2 = sequentialDebts[0] difference = abs(debt.amount - debt2.amount) if debt.amount < debt2.amount: self.addDebt(debt.debtFrom, debt2.debtTo, debt.amount) self.setDebt(debt2.debtFrom, debt2.debtTo, difference) self.debts.remove(debt) else: self.addDebt(debt.debtFrom, debt2.debtTo, debt2.amount) self.setDebt(debt.debtFrom, debt.debtTo, difference) self.debts.remove(debt2) reduced = True break def optimizeAlgorithm2(self): reduced = True while reduced: reduced = False debtorPairs = self.getPairs(self.participants) for debtorPair in debtorPairs: debts1 = self.getDebtsFromParticipant(debtorPair[0]) debts2 = self.getDebtsFromParticipant(debtorPair[1]) creditorPair = self.getMatchingCreditorPair(debts1, debts2) if creditorPair: debts = [] for debtor in debtorPair: for creditor in creditorPair: debts.append(self.getDebt(debtor, creditor)) smallestDebt = sorted(debts, key=lambda x: x.amount)[0] d1 = smallestDebt.debtFrom #d1, d2, c1, c2 are the same as in media/algorithms.png d2 = debtorPair[not debtorPair.index(d1)] #'not' here gives the other index (0->1 or 1->0) c1 = smallestDebt.debtTo c2 = creditorPair[not creditorPair.index(c1)] self.addDebt(d1, c2, smallestDebt.amount) self.addDebt(d2, c1, smallestDebt.amount) self.addDebt(d2, c2, -smallestDebt.amount) self.debts.remove(smallestDebt) reduced = True break def getMatchingCreditorPair(self, debts1, debts2): creditorPairs1 = self.getPairs(self.getCreditors(debts1)) creditorPairs2 = self.getPairs(self.getCreditors(debts2)) for cp in creditorPairs1: if cp in creditorPairs2 or self.swapPair(cp) in creditorPairs2: return(cp) return(None) def getCreditors(self, debts): creditors = [] for debt in debts: creditors.append(debt.debtTo) return(creditors) def getPairs(self, list): pairs = [] if len(list) < 2: return [] for i in range(len(list)-1): for j in range(i+1,len(list)): pairs.append([list[i],list[j]]) return(pairs) def swapPair(self, pair): return [pair[1], pair[0]] def __str__(self): return '\n'.join(map(str, self.debts)) class Debt(): def __init__(self, debtFrom, debtTo, amount): self.debtFrom = debtFrom self.debtTo = debtTo self.amount = amount def __str__(self): return "From " + self.debtFrom + " to " + self.debtTo + ": " + str(round(self.amount,2)) def merge(self, otherDebt): if (self.debtFrom == otherDebt.debtFrom) and (self.debtTo == otherDebt.debtTo): # Merge is always called with the same two people but the direction of the debt is solved here self.amount += otherDebt.amount else: self.amount -= otherDebt.amount if self.amount < 0: self.swap() # When merging two debts, if the resulting debt is negative, this function is used to swap the creditor and debtor, so that the sum of debt is always positive def swap(self): temp = self.debtFrom self.debtFrom = self.debtTo self.debtTo = temp self.amount *= -1
5,262
-9
528
c52fa157935331613b97e35158cf26220a4a1a2f
4,645
py
Python
main/forms.py
mattr555/AtYourService
41af372176dc607e97851b2c1e8c8efac392787c
[ "MIT" ]
1
2020-11-05T07:29:46.000Z
2020-11-05T07:29:46.000Z
main/forms.py
mattr555/AtYourService
41af372176dc607e97851b2c1e8c8efac392787c
[ "MIT" ]
null
null
null
main/forms.py
mattr555/AtYourService
41af372176dc607e97851b2c1e8c8efac392787c
[ "MIT" ]
null
null
null
from django import forms from django.contrib.auth.models import User, Group from django.contrib.auth.forms import UserCreationForm from datetime import timedelta from main.models import UserEvent, UserProfile, Organization, Event import pytz
39.364407
117
0.651238
from django import forms from django.contrib.auth.models import User, Group from django.contrib.auth.forms import UserCreationForm from datetime import timedelta from main.models import UserEvent, UserProfile, Organization, Event import pytz class MyUserCreate(UserCreationForm): email = forms.EmailField(required=True) first_name = forms.CharField(required=True) last_name = forms.CharField(required=True) volunteer = forms.BooleanField(required=False) org_admin = forms.BooleanField(required=False) timezone = forms.ChoiceField(required=True, choices=[(i, i) for i in pytz.common_timezones]) class Meta: fields = ('first_name', 'last_name', 'email', 'volunteer', 'org_admin', 'timezone',) model = User def save(self, commit=True): user = User.objects.create_user(self.cleaned_data['username'], self.cleaned_data['email'], self.cleaned_data['password1']) user.first_name = self.cleaned_data['first_name'] user.last_name = self.cleaned_data['last_name'] if self.cleaned_data.get('volunteer'): user.groups.add(Group.objects.get(name="Volunteer")) if self.cleaned_data.get('org_admin'): user.groups.add(Group.objects.get(name="Org_Admin")) user.save() profile = UserProfile(user=user) profile.timezone = self.cleaned_data['timezone'] profile.save() return user def clean_org_admin(self): vol = self.cleaned_data.get('volunteer') org = self.cleaned_data.get('org_admin') if (not vol) and (not org): raise forms.ValidationError('A checkbox is required') class UserEventCreate(forms.ModelForm): date_start = forms.DateTimeField(required=True, widget=forms.DateTimeInput(format='%m/%d/%Y %I:%M %p')) date_end = forms.DateTimeField(widget=forms.DateTimeInput(format='%m/%d/%Y %I:%M %p')) class Meta: model = UserEvent fields = ('name', 'description', 'organization', 'date_start', 'date_end', 'location', 'hours_worked') def __init__(self, user=None, *args, **kwargs): super(UserEventCreate, self).__init__(*args, **kwargs) self._user = user def save(self, commit=True): event = super(UserEventCreate, self).save(commit=False) event.user = self._user if self.cleaned_data.get('date_end') is None: event.date_end = event.date_start + timedelta(hours=event.hours_worked) if commit: event.save() return event def clean_date_end(self): date_end = self.cleaned_data.get('date_end') date_start = self.cleaned_data.get('date_start') if date_start > date_end: raise forms.ValidationError("The start date should be before the end date!") return date_end class EventCreate(forms.ModelForm): date_start = forms.DateTimeField(required=True, widget=forms.DateTimeInput(format='%m/%d/%Y %I:%M %p')) date_end = forms.DateTimeField(widget=forms.DateTimeInput(format='%m/%d/%Y %I:%M %p')) class Meta: model = Event fields = ('organization', 'name', 'description', 'location', 'date_start', 'date_end', 'geo_lat', 'geo_lon',) def __init__(self, user=None, *args, **kwargs): super(EventCreate, self).__init__(*args, **kwargs) self._user = user def save(self, commit=True): event = super(EventCreate, self).save(commit=False) event.organizer = self._user event.organization_id = self.cleaned_data.get('organization').id if commit: event.save() return event def clean_organization(self): org = self.cleaned_data.get('organization') if org.admin_id != self._user.id: raise forms.ValidationError("That's not your organization!") return org def clean_date_end(self): date_end = self.cleaned_data.get('date_end') date_start = self.cleaned_data.get('date_start') if date_start > date_end: raise forms.ValidationError("The start date should be before the end date!") return date_end class OrganizationCreate(forms.ModelForm): class Meta: model = Organization fields = ('name', 'description', 'location', 'geo_lat', 'geo_lon') def __init__(self, user=None, *args, **kwargs): super(OrganizationCreate, self).__init__(*args, **kwargs) self._user = user def save(self, commit=True): o = super(OrganizationCreate, self).save(commit=False) o.admin = self._user if commit: o.save() return o
2,646
1,664
92
19a82d6f900b8f0c33e556f95dff04c5745664de
2,334
py
Python
examples/rel_vort_ex.py
dennissergeev/pyveccalc
c5d806e41c13ff3a22e4000764326e491b7d4283
[ "MIT" ]
3
2015-05-11T01:06:17.000Z
2015-12-11T12:16:15.000Z
examples/rel_vort_ex.py
dennissergeev/pyveccalc
c5d806e41c13ff3a22e4000764326e491b7d4283
[ "MIT" ]
2
2015-04-27T08:27:07.000Z
2015-05-16T14:21:35.000Z
examples/rel_vort_ex.py
dennissergeev/pyveccalc
c5d806e41c13ff3a22e4000764326e491b7d4283
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Compute magnitude and relative vorticity """ import numpy as np import matplotlib as mpl mpl.rcParams['mathtext.default'] = 'regular' import matplotlib.pyplot as plt from mpl_toolkits.basemap import Basemap, addcyclic from netCDF4 import Dataset from pyveccalc.standard import WindHorizontal from pyveccalc.tools import prep_data, recover_data example_data_path = './example_data/data.nc' with Dataset(example_data_path) as f: lon = f.variables['longitude'][:] lat = f.variables['latitude'][:] u = f.variables['u'][:] v = f.variables['v'][:] vo = f.variables['vo'][:] uu, info = prep_data(u,'tzyx') vv, _ = prep_data(v,'tzyx') W = WindHorizontal(uu,vv,lon,lat,'lonlat') rel_vort = W.vort_z() rel_vort = recover_data(rel_vort, info) # Pick a level and a moment in time # add a cyclic point (for plotting purposes) rel_vort_c, lon_c = addcyclic(rel_vort[0,0,:,:], lon) vo_c, _ = addcyclic(vo[0,0,:,:], lon) # # Plot the calculated relative vorticity along with relative vorticity from the dataset # fig, (ax1, ax2) = plt.subplots(nrows=2) # Note: excluding high latitudes due to small grid step and large errors in finite differences m = Basemap(ax=ax1, projection='cyl', resolution='c', \ llcrnrlon=0, llcrnrlat=-88, \ urcrnrlon=360.01, urcrnrlat=88) x, y = m(*np.meshgrid(lon_c, lat)) c1 = m.contourf(x, y, vo_c*1e4, cmap=plt.cm.RdBu_r, extend='both') m.drawcoastlines() m.drawparallels((-90, -60, -30, 0, 30, 60, 90), labels=[1,0,0,0]) m.drawmeridians((0, 60, 120, 180, 240, 300, 360), labels=[0,0,0,1]) plt.colorbar(c1, ax=ax1, orientation='horizontal') ax1.set_title('Relative vorticity ($10^{-4}$s$^{-1}$)\n ERA-Interim', fontsize=16) m = Basemap(ax=ax2, projection='cyl', resolution='c', \ llcrnrlon=0, llcrnrlat=-88, \ urcrnrlon=360.01, urcrnrlat=88) x, y = m(*np.meshgrid(lon_c, lat)) c2 = m.contourf(x, y, rel_vort_c*1e4, cmap=plt.cm.RdBu_r, extend='both') m.drawcoastlines() m.drawparallels((-90, -60, -30, 0, 30, 60, 90), labels=[1,0,0,0]) m.drawmeridians((0, 60, 120, 180, 240, 300, 360), labels=[0,0,0,1]) plt.colorbar(c2, ax=ax2, orientation='horizontal') ax2.set_title('Relative vorticity ($10^{-4}$s$^{-1}$)\n calculated by pyveccalc', fontsize=16) fig.tight_layout() plt.show()
32.873239
94
0.67138
# -*- coding: utf-8 -*- """ Compute magnitude and relative vorticity """ import numpy as np import matplotlib as mpl mpl.rcParams['mathtext.default'] = 'regular' import matplotlib.pyplot as plt from mpl_toolkits.basemap import Basemap, addcyclic from netCDF4 import Dataset from pyveccalc.standard import WindHorizontal from pyveccalc.tools import prep_data, recover_data example_data_path = './example_data/data.nc' with Dataset(example_data_path) as f: lon = f.variables['longitude'][:] lat = f.variables['latitude'][:] u = f.variables['u'][:] v = f.variables['v'][:] vo = f.variables['vo'][:] uu, info = prep_data(u,'tzyx') vv, _ = prep_data(v,'tzyx') W = WindHorizontal(uu,vv,lon,lat,'lonlat') rel_vort = W.vort_z() rel_vort = recover_data(rel_vort, info) # Pick a level and a moment in time # add a cyclic point (for plotting purposes) rel_vort_c, lon_c = addcyclic(rel_vort[0,0,:,:], lon) vo_c, _ = addcyclic(vo[0,0,:,:], lon) # # Plot the calculated relative vorticity along with relative vorticity from the dataset # fig, (ax1, ax2) = plt.subplots(nrows=2) # Note: excluding high latitudes due to small grid step and large errors in finite differences m = Basemap(ax=ax1, projection='cyl', resolution='c', \ llcrnrlon=0, llcrnrlat=-88, \ urcrnrlon=360.01, urcrnrlat=88) x, y = m(*np.meshgrid(lon_c, lat)) c1 = m.contourf(x, y, vo_c*1e4, cmap=plt.cm.RdBu_r, extend='both') m.drawcoastlines() m.drawparallels((-90, -60, -30, 0, 30, 60, 90), labels=[1,0,0,0]) m.drawmeridians((0, 60, 120, 180, 240, 300, 360), labels=[0,0,0,1]) plt.colorbar(c1, ax=ax1, orientation='horizontal') ax1.set_title('Relative vorticity ($10^{-4}$s$^{-1}$)\n ERA-Interim', fontsize=16) m = Basemap(ax=ax2, projection='cyl', resolution='c', \ llcrnrlon=0, llcrnrlat=-88, \ urcrnrlon=360.01, urcrnrlat=88) x, y = m(*np.meshgrid(lon_c, lat)) c2 = m.contourf(x, y, rel_vort_c*1e4, cmap=plt.cm.RdBu_r, extend='both') m.drawcoastlines() m.drawparallels((-90, -60, -30, 0, 30, 60, 90), labels=[1,0,0,0]) m.drawmeridians((0, 60, 120, 180, 240, 300, 360), labels=[0,0,0,1]) plt.colorbar(c2, ax=ax2, orientation='horizontal') ax2.set_title('Relative vorticity ($10^{-4}$s$^{-1}$)\n calculated by pyveccalc', fontsize=16) fig.tight_layout() plt.show()
0
0
0
c31a933e05468982f256a12e95b017ea0c4b2898
1,984
py
Python
SNA/data/preprocess_ES.py
deepesch/decentralizedtrustscore
406ac219cfa0eb37efa2bf5add484381fd78eb4a
[ "MIT" ]
null
null
null
SNA/data/preprocess_ES.py
deepesch/decentralizedtrustscore
406ac219cfa0eb37efa2bf5add484381fd78eb4a
[ "MIT" ]
null
null
null
SNA/data/preprocess_ES.py
deepesch/decentralizedtrustscore
406ac219cfa0eb37efa2bf5add484381fd78eb4a
[ "MIT" ]
1
2020-09-06T02:53:47.000Z
2020-09-06T02:53:47.000Z
'''Preprocess Epinions and Slashdot data''' '''Take in text file of data, create and save adjacency matrix''' import numpy as np, pickle import scipy.sparse as sp #Preprocess data #Optionally run normally or in test mode (when writing tests) if __name__ == "__main__": preprocess()
34.206897
86
0.677923
'''Preprocess Epinions and Slashdot data''' '''Take in text file of data, create and save adjacency matrix''' import numpy as np, pickle import scipy.sparse as sp #Preprocess data #Optionally run normally or in test mode (when writing tests) def preprocess(mode = "normal"): #FILE_PATH = "Raw Data/soc-sign-epinions.txt" FILE_PATH = "Raw Data/soc-sign-Slashdot090221.txt" #Dataset name (for filename of matrix) #Split off part right before file extension dataset = FILE_PATH.split(".txt")[0].split("-")[-1] with open(FILE_PATH, "rb") as data_file: data_lines = data_file.readlines() #Save components of data in three lists kept in synchrony from_data = list() to_data = list() labels = list() #Data format: each line FROM_ID TO_ID LABEL for line_index in range(4, len(data_lines)): #skip first 4 boilerplate lines data = data_lines[line_index].split() from_data.append(int(data[0])) to_data.append(int(data[1])) labels.append(int(data[2])) #Make a (square) adjacency matrix the size of the number of people #(as given by ID. note: ID starts at 0) max_id = max(max(from_data), max(to_data)) #aka number of people #Create in sparse row-major format #Note: ID starts at 0 so number of people is 1 more than max ID data_matrix = sp.csr_matrix((np.array(labels), (np.array(from_data), np.array(to_data)) ), shape=(max_id + 1, max_id + 1)) #correction to make data matrix symmetric if (data_matrix != data_matrix.transpose()).nnz > 0: #data matrix is not symmetric data_matrix = (data_matrix + data_matrix.transpose()).sign() #test data is a valid symmetric signed matrix if mode == "test": assert data_matrix.min() == -1 assert data_matrix.max() == 1 assert (data_matrix != data_matrix.transpose()).nnz == 0 #Save data np.save("Preprocessed Data/" + dataset + "_csr", data_matrix) if __name__ == "__main__": preprocess()
1,674
0
22
bc141a549fb309092bd3508f3eded2771ae00a62
242
py
Python
foobar-1.py
meetajain11/Neural-Networks
07b0b8c4d8e722b1a8c3763deecff36c53728808
[ "MIT" ]
null
null
null
foobar-1.py
meetajain11/Neural-Networks
07b0b8c4d8e722b1a8c3763deecff36c53728808
[ "MIT" ]
null
null
null
foobar-1.py
meetajain11/Neural-Networks
07b0b8c4d8e722b1a8c3763deecff36c53728808
[ "MIT" ]
null
null
null
demo(15324)
17.285714
28
0.438017
def demo(area): ans = [] while(area >= 4): ar = int(area**0.5) ans.append(ar*ar) area = area-ar*ar while(area > 0): ans.append(1) area = area-1 print(ans) demo(15324)
200
0
23
e24245eab1fca32b7fea62ffd5dfa3c5c2dd631a
10,243
py
Python
custom_components/acthor/acthor/registers.py
jatty/hass-acthor
9d5aaed3f01e9288fef031b47b0808e6e80c22d3
[ "MIT" ]
null
null
null
custom_components/acthor/acthor/registers.py
jatty/hass-acthor
9d5aaed3f01e9288fef031b47b0808e6e80c22d3
[ "MIT" ]
null
null
null
custom_components/acthor/acthor/registers.py
jatty/hass-acthor
9d5aaed3f01e9288fef031b47b0808e6e80c22d3
[ "MIT" ]
null
null
null
import abc import asyncio import datetime import logging from typing import Any, Coroutine, Generic, Iterable, Iterator, Tuple, TypeVar from .abc import ABCModbusProtocol, MultiRegister, SingleRegister logger = logging.getLogger(__name__) T = TypeVar("T") class ACThorRegistersMixin(ABCModbusProtocol, abc.ABC): """ Provides direct access to the registers with some additional helper methods for accessing multi-register values. """ __slots__ = () power = ReadWrite(1000) """W 0-3.000 M1, 0-6.000 M3, 0-9.000 AC•THOR 9s """ temp1 = ReadOnly(1001, 10) """°C""" temp2 = ReadOnly(1030, 10) """°C""" temp3 = ReadOnly(1031, 10) """°C""" temp4 = ReadOnly(1032, 10) """°C""" temp5 = ReadOnly(1033, 10) """°C""" temp6 = ReadOnly(1034, 10) """°C""" temp7 = ReadOnly(1035, 10) """°C""" temp8 = ReadOnly(1036, 10) """°C""" # Sensors 2-8 can be read with a single instruction _temp_range_2_8 = MultiRegister(1030, 7, factor=10) ww1_max = ReadWrite(1002, 10) """°C""" ww2_max = ReadWrite(1037, 10) """°C""" ww3_max = ReadWrite(1038, 10) """°C""" ww1_min = ReadWrite(1006, 10) """°C""" ww2_min = ReadWrite(1039, 10) """°C""" ww3_min = ReadWrite(1040, 10) """°C""" _ww_range_2_3 = MultiRegister(1037, 4) status = ReadOnly(1003) """ 0..... Off 1-8... device start-up 9... operation >=200 Error states power stage """ power_timeout = ReadWrite(1004) """sec""" boost_mode = ReadWrite(1005) """0: off, 1: on, 3: relay boost on""" boost_time1_start = ReadWrite(1007) """Hour""" boost_time1_stop = ReadWrite(1008) """Hour""" boost_time2_start = ReadWrite(1026) """Hour""" boost_time2_stop = ReadWrite(1027) """Hour""" _boost_time1_range = MultiRegister(1007, 2) _boost_time2_range = MultiRegister(1026, 2) hour = ReadWrite(1009) minute = ReadWrite(1010) second = ReadWrite(1011) _hms_range = MultiRegister(1009, 3) boost_activate = ReadWrite(1012) number = ReadWrite(1013) max_power = ReadWrite(1014) """500..3000W do not use with 9s """ tempchip = ReadOnly(1015, 10) """°C""" control_firmware_version = ReadOnly(1016) control_firmware_subversion = ReadOnly(1028) control_firmware_update_available = ReadOnly(1029) """ 0 : no new afw available, 1 : new afw available (download not started, fw-version in variable Fwup_actual_version) 2 : download started (ini-file download) 3 : download started (afw.bin-file download) 4 : downloading other files 5 : download interrupted 10: download finished, waiting for installation """ ps_firmware_version = ReadOnly(1017) serial_number = ReadOnlyText(1018, 8) rh1_max = ReadWrite(1041, 10) """°C""" rh2_max = ReadWrite(1042, 10) """°C""" rh3_max = ReadWrite(1043, 10) """°C""" rh1_day_min = ReadWrite(1044, 10) """°C""" rh2_day_min = ReadWrite(1045, 10) """°C""" rh3_day_min = ReadWrite(1046, 10) """°C""" rh1_night_min = ReadWrite(1047, 10) """°C""" rh2_night_min = ReadWrite(1048, 10) """°C""" rh3_night_min = ReadWrite(1049, 10) """°C""" _rhs_max_range = MultiRegister(1041, 3) _rhs_day_min_range = MultiRegister(1044, 3) _rhs_night_min_range = MultiRegister(1047, 3) night_flag = ReadOnly(1050) """0: day, 1: night""" utc_correction = ReadWrite(1051) """0..37""" dst_correction = ReadWrite(1052) """0, 1""" _time_correction_range = MultiRegister(1051, 2) legionella_interval = ReadWrite(1053) """Days""" legionella_start = ReadWrite(1054) """Hour""" legionella_temp = ReadWrite(1055) """°C""" legionella_mode = ReadWrite(1056) """0: off, 1: on""" _legionella_range = MultiRegister(1053, 4) stratification_flag = ReadOnly(1057) """0: off, 1: on""" relay1_status = ReadOnly(1058) """0: off, 1: on""" load_state = ReadOnly(1059) """0: off, 1: on""" load_nominal_power = ReadOnly(1060) """W""" u_l1 = ReadOnly(1061) """V""" u_l2 = ReadOnly(1067) """V 9s only, ACTHOR replies 0 """ u_l3 = ReadOnly(1072) """V 9s only, ACTHOR replies 0 """ i_l1 = ReadOnly(1062, 10) """A""" i_l2 = ReadOnly(1068, 10) """A 9s only, ACTHOR replies 0 """ i_l3 = ReadOnly(1073, 10) """A 9s only, ACTHOR replies 0 """ _l1_range = MultiRegister(1061, 2) _l2_range = MultiRegister(1067, 2) _l3_range = MultiRegister(1072, 2) u_out = ReadOnly(1063) """V""" freq = ReadOnly(1064) """mHz""" operation_mode = ReadWrite(1065) """1-8 since version a0010004 """ access_level = ReadWrite(1066) """1-3 since version a0010004 """ meter_power = ReadOnly(1069) """integer, negative is feed in""" control_type = ReadWrite(1070) """ 1 = http 2 = Modbus TCP 3 = Fronius Auto 4 = Fronius Manual 5 = SMA 6 = Steca / Kostal Piko MP 7 = Varta Auto 8 = Varta Manual 9 = my-PV Power Meter Auto 10 = my-PV Power Meter Manual 11 = my-PV Power Meter Direkt (not readable, no network connection) 12 = reserved 13 = Multimode slave 14 = RCT Power Manual 15 = Adjustable Modbus TCP """ pmax_abs = ReadOnly(1071) """ incl. Slave-Power in case of multi-unit-mode """ p_out1 = ReadOnly(1074) """W 9s only, ACTHOR replies 0 """ p_out2 = ReadOnly(1075) """W 9s only, ACTHOR replies 0 """ p_out3 = ReadOnly(1076) """W 9s only, ACTHOR replies 0 """ _p_out_range = MultiRegister(1074, 3) operation_state = ReadOnly(1077) """ 0 green tick flashes 1 yellow wave is on 2 yellow wave flashes 3 green tick is on 4 red cross is on 5 red cross flashes """ power_big = ReadWriteMulti(1078, 2) """W Only for large systems with several units (multi-mode) and output specifications greater than 65,535 watts. Power below this value is entered in register 1000. """ power_and_relays = ReadWrite(1080) """W 9s only Allows direct access to the AC•THOR 9s power stage and the relays in Modbus TCP mode. bit 15: relay Out-3 bit 14: relay Out-2 bit 13 and 12: 0 ... power stage off 1 ... power stage to Out-1 2 ... power stage to Out-2 3 ... power stage to Out-3 bit 11 – 0: power stage power 0 – 3.000 (watt) """ async def get_temps(self) -> Tuple[float, float, float, float, float, float, float, float]: """Get the temperatures. Reads all eight temperature sensors with only two instructions. Returns: 8-tuple containing the temperatures in celsius. """ first_temp, other_temps = await asyncio.gather(self.temp1, self._temp_range_2_8.read(self)) return (first_temp, *other_temps) async def get_temp(self, sensor: int) -> float: """Read the value of a temperature sensor. Args: sensor: Sensor number in [1..8]. Returns: Temperature of the sensor. """ if not 1 <= sensor <= 8: raise ValueError("sensor must be in range(1, 9)") return await getattr(self, f"temp{sensor}") async def get_control_firmware_version(self) -> Tuple[int, int]: """Read the full control firmware version. Returns: 2-tuple (major, sub).. """ maj, sub = await asyncio.gather(self.control_firmware_version, self.control_firmware_subversion) return maj, sub
24.622596
116
0.60617
import abc import asyncio import datetime import logging from typing import Any, Coroutine, Generic, Iterable, Iterator, Tuple, TypeVar from .abc import ABCModbusProtocol, MultiRegister, SingleRegister logger = logging.getLogger(__name__) T = TypeVar("T") class ReadOnlyMixin(Generic[T], abc.ABC): __slots__ = () def __get__(self, instance: ABCModbusProtocol, cls=None) -> Coroutine[Any, Any, T]: return self.read(instance) def __set__(self, instance: ABCModbusProtocol, _) -> None: raise AttributeError @abc.abstractmethod async def read(self, protocol: ABCModbusProtocol) -> T: ... class ReadWriteMixin(ReadOnlyMixin[T], abc.ABC): __slots__ = () def __set__(self, instance: ABCModbusProtocol, value: T) -> None: asyncio.create_task(self._write_handle_error(instance, value)) async def _write_handle_error(self, instance: ABCModbusProtocol, value: T) -> None: try: await self.write(instance, value) except Exception: logger.exception("failed to write %s to %s", repr(value), self) @abc.abstractmethod async def write(self, protocol: ABCModbusProtocol, value: T) -> None: ... class ReadOnly(SingleRegister, ReadOnlyMixin[int]): __slots__ = () class ReadOnlyText(MultiRegister, ReadOnlyMixin[str]): __slots__ = () async def read(self, protocol: ABCModbusProtocol) -> str: values = await super().read(protocol) return "".join(chr(value >> 8) + chr(value & 0xFF) for value in values) class ReadWrite(SingleRegister, ReadWriteMixin[int]): __slots__ = () def i16s_to_bytes(it: Iterable[int]) -> Iterator[int]: for value in it: yield from value.to_bytes(2, "big") def bytes_to_i16(it: Iterable[int]) -> Iterator[int]: it = iter(it) for high in it: low = next(it) yield (high << 8) | low class ReadWriteMulti(MultiRegister, ReadWriteMixin[int]): __slots__ = () async def read(self, protocol: ABCModbusProtocol) -> int: values = await super().read(protocol) return int.from_bytes(tuple(i16s_to_bytes(values)), "big") async def write(self, protocol: ABCModbusProtocol, value: int) -> None: byte_parts = value.to_bytes(2 * self._length, "big") await super().write(protocol, bytes_to_i16(byte_parts)) class ACThorRegistersMixin(ABCModbusProtocol, abc.ABC): """ Provides direct access to the registers with some additional helper methods for accessing multi-register values. """ __slots__ = () power = ReadWrite(1000) """W 0-3.000 M1, 0-6.000 M3, 0-9.000 AC•THOR 9s """ temp1 = ReadOnly(1001, 10) """°C""" temp2 = ReadOnly(1030, 10) """°C""" temp3 = ReadOnly(1031, 10) """°C""" temp4 = ReadOnly(1032, 10) """°C""" temp5 = ReadOnly(1033, 10) """°C""" temp6 = ReadOnly(1034, 10) """°C""" temp7 = ReadOnly(1035, 10) """°C""" temp8 = ReadOnly(1036, 10) """°C""" # Sensors 2-8 can be read with a single instruction _temp_range_2_8 = MultiRegister(1030, 7, factor=10) ww1_max = ReadWrite(1002, 10) """°C""" ww2_max = ReadWrite(1037, 10) """°C""" ww3_max = ReadWrite(1038, 10) """°C""" ww1_min = ReadWrite(1006, 10) """°C""" ww2_min = ReadWrite(1039, 10) """°C""" ww3_min = ReadWrite(1040, 10) """°C""" _ww_range_2_3 = MultiRegister(1037, 4) status = ReadOnly(1003) """ 0..... Off 1-8... device start-up 9... operation >=200 Error states power stage """ power_timeout = ReadWrite(1004) """sec""" boost_mode = ReadWrite(1005) """0: off, 1: on, 3: relay boost on""" boost_time1_start = ReadWrite(1007) """Hour""" boost_time1_stop = ReadWrite(1008) """Hour""" boost_time2_start = ReadWrite(1026) """Hour""" boost_time2_stop = ReadWrite(1027) """Hour""" _boost_time1_range = MultiRegister(1007, 2) _boost_time2_range = MultiRegister(1026, 2) hour = ReadWrite(1009) minute = ReadWrite(1010) second = ReadWrite(1011) _hms_range = MultiRegister(1009, 3) boost_activate = ReadWrite(1012) number = ReadWrite(1013) max_power = ReadWrite(1014) """500..3000W do not use with 9s """ tempchip = ReadOnly(1015, 10) """°C""" control_firmware_version = ReadOnly(1016) control_firmware_subversion = ReadOnly(1028) control_firmware_update_available = ReadOnly(1029) """ 0 : no new afw available, 1 : new afw available (download not started, fw-version in variable Fwup_actual_version) 2 : download started (ini-file download) 3 : download started (afw.bin-file download) 4 : downloading other files 5 : download interrupted 10: download finished, waiting for installation """ ps_firmware_version = ReadOnly(1017) serial_number = ReadOnlyText(1018, 8) rh1_max = ReadWrite(1041, 10) """°C""" rh2_max = ReadWrite(1042, 10) """°C""" rh3_max = ReadWrite(1043, 10) """°C""" rh1_day_min = ReadWrite(1044, 10) """°C""" rh2_day_min = ReadWrite(1045, 10) """°C""" rh3_day_min = ReadWrite(1046, 10) """°C""" rh1_night_min = ReadWrite(1047, 10) """°C""" rh2_night_min = ReadWrite(1048, 10) """°C""" rh3_night_min = ReadWrite(1049, 10) """°C""" _rhs_max_range = MultiRegister(1041, 3) _rhs_day_min_range = MultiRegister(1044, 3) _rhs_night_min_range = MultiRegister(1047, 3) night_flag = ReadOnly(1050) """0: day, 1: night""" utc_correction = ReadWrite(1051) """0..37""" dst_correction = ReadWrite(1052) """0, 1""" _time_correction_range = MultiRegister(1051, 2) legionella_interval = ReadWrite(1053) """Days""" legionella_start = ReadWrite(1054) """Hour""" legionella_temp = ReadWrite(1055) """°C""" legionella_mode = ReadWrite(1056) """0: off, 1: on""" _legionella_range = MultiRegister(1053, 4) stratification_flag = ReadOnly(1057) """0: off, 1: on""" relay1_status = ReadOnly(1058) """0: off, 1: on""" load_state = ReadOnly(1059) """0: off, 1: on""" load_nominal_power = ReadOnly(1060) """W""" u_l1 = ReadOnly(1061) """V""" u_l2 = ReadOnly(1067) """V 9s only, ACTHOR replies 0 """ u_l3 = ReadOnly(1072) """V 9s only, ACTHOR replies 0 """ i_l1 = ReadOnly(1062, 10) """A""" i_l2 = ReadOnly(1068, 10) """A 9s only, ACTHOR replies 0 """ i_l3 = ReadOnly(1073, 10) """A 9s only, ACTHOR replies 0 """ _l1_range = MultiRegister(1061, 2) _l2_range = MultiRegister(1067, 2) _l3_range = MultiRegister(1072, 2) u_out = ReadOnly(1063) """V""" freq = ReadOnly(1064) """mHz""" operation_mode = ReadWrite(1065) """1-8 since version a0010004 """ access_level = ReadWrite(1066) """1-3 since version a0010004 """ meter_power = ReadOnly(1069) """integer, negative is feed in""" control_type = ReadWrite(1070) """ 1 = http 2 = Modbus TCP 3 = Fronius Auto 4 = Fronius Manual 5 = SMA 6 = Steca / Kostal Piko MP 7 = Varta Auto 8 = Varta Manual 9 = my-PV Power Meter Auto 10 = my-PV Power Meter Manual 11 = my-PV Power Meter Direkt (not readable, no network connection) 12 = reserved 13 = Multimode slave 14 = RCT Power Manual 15 = Adjustable Modbus TCP """ pmax_abs = ReadOnly(1071) """ incl. Slave-Power in case of multi-unit-mode """ p_out1 = ReadOnly(1074) """W 9s only, ACTHOR replies 0 """ p_out2 = ReadOnly(1075) """W 9s only, ACTHOR replies 0 """ p_out3 = ReadOnly(1076) """W 9s only, ACTHOR replies 0 """ _p_out_range = MultiRegister(1074, 3) operation_state = ReadOnly(1077) """ 0 green tick flashes 1 yellow wave is on 2 yellow wave flashes 3 green tick is on 4 red cross is on 5 red cross flashes """ power_big = ReadWriteMulti(1078, 2) """W Only for large systems with several units (multi-mode) and output specifications greater than 65,535 watts. Power below this value is entered in register 1000. """ power_and_relays = ReadWrite(1080) """W 9s only Allows direct access to the AC•THOR 9s power stage and the relays in Modbus TCP mode. bit 15: relay Out-3 bit 14: relay Out-2 bit 13 and 12: 0 ... power stage off 1 ... power stage to Out-1 2 ... power stage to Out-2 3 ... power stage to Out-3 bit 11 – 0: power stage power 0 – 3.000 (watt) """ async def get_temps(self) -> Tuple[float, float, float, float, float, float, float, float]: """Get the temperatures. Reads all eight temperature sensors with only two instructions. Returns: 8-tuple containing the temperatures in celsius. """ first_temp, other_temps = await asyncio.gather(self.temp1, self._temp_range_2_8.read(self)) return (first_temp, *other_temps) async def get_temp(self, sensor: int) -> float: """Read the value of a temperature sensor. Args: sensor: Sensor number in [1..8]. Returns: Temperature of the sensor. """ if not 1 <= sensor <= 8: raise ValueError("sensor must be in range(1, 9)") return await getattr(self, f"temp{sensor}") async def get_time(self) -> datetime.time: hour, minute, second = await self._hms_range.read(self) # TODO build tzinfo tzinfo = datetime.timezone.utc return datetime.time( hour, minute, second, tzinfo=tzinfo, ) async def get_control_firmware_version(self) -> Tuple[int, int]: """Read the full control firmware version. Returns: 2-tuple (major, sub).. """ maj, sub = await asyncio.gather(self.control_firmware_version, self.control_firmware_subversion) return maj, sub
1,571
583
211
d5caa9127743b2e76bdcf8dc231561b1a3f77a1b
9,631
py
Python
pgbot/common.py
gresm/PygameCommunityBot
0da081704baaaa6fd6464f7abe43e6ba5043952d
[ "MIT" ]
77
2020-11-16T05:26:49.000Z
2021-03-08T06:27:06.000Z
pgbot/common.py
gresm/PygameCommunityBot
0da081704baaaa6fd6464f7abe43e6ba5043952d
[ "MIT" ]
71
2021-03-19T17:51:30.000Z
2022-02-19T12:42:19.000Z
pgbot/common.py
gresm/PygameCommunityBot
0da081704baaaa6fd6464f7abe43e6ba5043952d
[ "MIT" ]
19
2021-03-19T12:48:17.000Z
2021-12-18T04:41:08.000Z
""" This file is a part of the source code for the PygameCommunityBot. This project has been licensed under the MIT license. Copyright (c) 2020-present PygameCommunityDiscord This file defines some constants and variables used across the whole codebase """ import io import os from typing import Optional, Union import discord import pygame from dotenv import load_dotenv if os.path.isfile(".env"): load_dotenv() # take environment variables from .env # declare type alias for any channel Channel = Union[discord.TextChannel, discord.DMChannel, discord.GroupChannel] # For commonly used variables ints = discord.Intents.default() ints.members = True # needed for on_member_join bot = discord.Client(intents=ints) window = pygame.Surface((1, 1)) # This will later be redefined cmd_logs = {} # pygame community guild, or whichever is the 'primary' guild for the bot guild: Optional[discord.Guild] = None # IO object to redirect output to discord, gets patched later stdout: Optional[io.StringIO] = None # Tuple containing all admin commands, gets monkey-patched later admin_commands = () log_channel: discord.TextChannel arrivals_channel: discord.TextChannel roles_channel: discord.TextChannel guide_channel: discord.TextChannel entries_discussion_channel: discord.TextChannel console_channel: discord.TextChannel db_channel: discord.TextChannel rules_channel: discord.TextChannel entry_channels = {} entry_message_deletion_dict = {} __version__ = "1.5.3" # BONCC quiky stuff BONK = "<:pg_bonk:780423317718302781>" PG_ANGRY_AN = "<a:pg_snake_angry_an:779775305224159232>" TEST_MODE = "TEST_TOKEN" in os.environ TOKEN = os.environ["TEST_TOKEN" if TEST_MODE else "TOKEN"] TEST_USER_ID = int(os.environ["TEST_USER_ID"]) if "TEST_USER_ID" in os.environ else None TEST_USER_IDS = ( set(int(user_id) for user_id in os.environ["TEST_USER_IDS"].split()) if "TEST_USER_IDS" in os.environ else set() ) if TEST_USER_ID is not None: TEST_USER_IDS.add(TEST_USER_ID) PREFIX = "pd!" if TEST_MODE else "pg!" CMD_FUNC_PREFIX = "cmd_" BASIC_MAX_FILE_SIZE = 8_000_000 # bytes ZERO_SPACE = "\u200b" # U+200B DOC_EMBED_LIMIT = 3 BROWSE_MESSAGE_LIMIT = 500 # indicates whether the bot is in generic mode or not. Generic mode is useful # when you are testing the bot on other servers. Generic mode limits features of # the bot that requires access to server specific stuff GENERIC = False UNIQUE_POLL_MSG = "You cannot make multiple votes in this poll\n" WC_SCORING = ( ("Legendary Guardian ⚜️💫", 42), ("Elite Guardian ⚜️", 30), ("Guardian ⚜️", 15), ("Apprentice ⚜️", 1), ) class ServerConstants: """ Class of all server constants. If you ever want to make a copy of the bot run on your own server on non-generic mode, replicate this class, but with the constants from your server """ BOT_ID = 772788653326860288 SERVER_ID = 772505616680878080 RULES_CHANNEL_ID = 772509621747187712 ROLES_CHANNEL_ID = 772535163195228200 GUIDE_CHANNEL_ID = 772528306615615500 ARRIVALS_CHANNEL_ID = 774916117881159681 LOG_CHANNEL_ID = 793250875471822930 CONSOLE_CHANNEL_ID = 851123656880816138 ENTRY_CHANNEL_IDS = { "showcase": 772507247540437032, "resource": 810516093273768016, } ENTRIES_DISCUSSION_CHANNEL_ID = 780351772514058291 # eval is a pretty dangerous command, so grant it only for Admins and Senior Mages EVAL_ROLES = {772521884373614603, 772849669591400501} # Admin, Moderator, Senior Mage, Wizards, Lead Forgers ADMIN_ROLES = { 772521884373614603, 772508687256125440, 772849669591400501, 841338117968756766, 839869589343961099, } # Specialties, Helpfulies, Verified pygame contributors, Server Boosters PRIV_ROLES = { 774473681325785098, 778205389942030377, 780440736457031702, 787473199088533504, } DIVIDER_ROLES = {836645525372665887, 836645368744771654, 842754237774692392} # IDs of rules messages, in the order from rule 1 to rule 7 RULES = ( 799339450361577472, 799339479445405746, 799339501511639100, 799339582620827680, 799339603651067974, 799339620810358885, 819537779847200809, ) # NOTE: It is hardcoded in the bot to remove some messages in resource-entries, # if you want to remove more, add the ID to the set below MSGS_TO_FILTER = { 817137523905527889, 810942002114986045, 810942043488256060, } # Database channel DB_CHANNEL_ID = 838090567682490458 # remember to maintain the scores here in descending order WC_ROLES = ( (42, 889170053013061683), # Legendary Guardian (30, 889169676398100480), # Elite Guardian (15, 889169351645749311), # Guardian (1, 889168765479178240), # Apprentice ) # Link to pygame snake logo GUILD_ICON = "https://media.discordapp.net/attachments/793272780987826197/836600713672523826/Discord_Server_Animated_Logo_V5_512x512.gif" BOT_WELCOME_MSG = { "greet": ( "Hi", "Hello", "Welcome to **Pygame Community**", "Greetings", "Howdy", "Hi there, ", "Hey there", "*Hiss* Who's that? It's", "*Hiss* Welcome", "Hello there,", "Ooooh! Hello", "Hi there,", "*Hiss* Do I see a new user? *hiss*\n" + "Welcome to our wonderful chatroom", "Ooooh! It's", "Oooh! Look who has joined us, it's", ), "check": ( "Check out our", "Make sure to check out the", "Take a look at our", "See our", "Please see our", "Be sure to read our", "Be sure to check the", "Be sure to check out our", "Read our", "Have a look at our", "To get started here, please read the", ), "grab": ( ", grab", ". Then get some", ", take", ", then grab yourself some shiny", ". Get some fancy", ", get some", ", then get yourself some cool", ", then get yourself some", ", take some", ", then take some", ", then take some", ". Go get some cool roles at", ". Then go take some fancy", ", then grab some shiny", ), "end": ( " and have fun!", ", then have fun with pygame!", ", then have fun with pygame! *hiss*", " and have a nice time!", " and enjoy your stay!", " and have some fun! *hisss*", " and have fun here!", " and have fun with pygame!", " and have a wonderful time!", " and join us!", " and join the fun!", " and have fun with pygame! *hisss*", " and have fun here! *hisss*", ), } ILLEGAL_ATTRIBUTES = ( "__subclasses__", "__loader__", "__bases__", "__code__", "__getattribute__", "__setattr__", "__delattr_", "mro", "__class__", "__dict__", ) DEAD_CHAT_TRIGGERS = { "the chat is dead", "the chat is ded", "this chat is dead", "this is a ded chat", "this is a dead chat", "chat dead", "chat ded", "chatded", "chatdead", "dead chat", "ded chat", "dedchat", "this chat ded", "this chat dead", } BOT_MENTION = "the bot" if GENERIC else f"<@!{ServerConstants.BOT_ID}>" BOT_HELP_PROMPT = { "title": "Help", "color": 0xFFFF00, "description": f""" Hey there, do you want to use {BOT_MENTION} ? My command prefix is `{PREFIX}`. If you want me to run your code, use Discord's code block syntax. Learn more about Discord code formatting **[HERE](https://discord.com/channels/772505616680878080/774217896971730974/785510505728311306)**. If you want to know about a specifc command run {PREFIX}help [command], for example {PREFIX}help exec. ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━""", } BYDARIO_QUOTE = """ > Yea, if is dead bring it back to life or let it rest in peace > When you are death ppl dont go to your tomb and say: dead person > I know because I am dead and noone comes to visit me <@!691691416799150152> """ SHAKESPEARE_QUOTES = ( """ To be, or not to be, that is the question — SHAKESPEARE, _Hamlet_, Act 3 Scene 1, lines 56-83; Hamlet """, """ All the world's a stage, And all the men and women merely players — SHAKESPEARE, _As You Like It_, Act 2 Scene 7, lines 139-40; Jacques to Duke Senior and his companions """, """ We are such stuff As dreams are made on; and our little life Is rounded with a sleep — SHAKESPEARE, _The Tempest_, Act 4 Scene 1, lines 156-58; Prospero to Miranda and Ferdinand """, """ Out, out brief candle! Life's but a walking shadow, a poor player, That struts and frets his hour upon the stage. — SHAKESPEARE, _Macbeth_, Act 5 Scene 5, Lines 23-25; Macbeth to Seyton """, """ Be not afraid of greatness. Some are born great, some achieve greatness, and some have greatness thrust upon 'em. — SHAKESPEARE, _Twelfth Night_, Act 2 Scene 5, Lines 139-41; Malvolio """, """ When we are born we cry that we are come To this great stage of fools — SHAKESPEARE, _King Lear_, Act 4 Scene 6, lines 178-79; King Lear to Gloucester """, """ The web of our life is of a mingled yarn, good and ill together — SHAKESPEARE, _All's Well That Ends Well_, Act 4 Scene 3, lines 68-69; One lord to another """, """ You cannot, sir, take from me anything that I will not more willingly part withal - except my life, except my life, except my life — SHAKESPEARE, _Hamlet_, Act 2 Scene 2, lines 213-17; Hamlet """, )
28.921922
139
0.663586
""" This file is a part of the source code for the PygameCommunityBot. This project has been licensed under the MIT license. Copyright (c) 2020-present PygameCommunityDiscord This file defines some constants and variables used across the whole codebase """ import io import os from typing import Optional, Union import discord import pygame from dotenv import load_dotenv if os.path.isfile(".env"): load_dotenv() # take environment variables from .env # declare type alias for any channel Channel = Union[discord.TextChannel, discord.DMChannel, discord.GroupChannel] # For commonly used variables ints = discord.Intents.default() ints.members = True # needed for on_member_join bot = discord.Client(intents=ints) window = pygame.Surface((1, 1)) # This will later be redefined cmd_logs = {} # pygame community guild, or whichever is the 'primary' guild for the bot guild: Optional[discord.Guild] = None # IO object to redirect output to discord, gets patched later stdout: Optional[io.StringIO] = None # Tuple containing all admin commands, gets monkey-patched later admin_commands = () log_channel: discord.TextChannel arrivals_channel: discord.TextChannel roles_channel: discord.TextChannel guide_channel: discord.TextChannel entries_discussion_channel: discord.TextChannel console_channel: discord.TextChannel db_channel: discord.TextChannel rules_channel: discord.TextChannel entry_channels = {} entry_message_deletion_dict = {} __version__ = "1.5.3" # BONCC quiky stuff BONK = "<:pg_bonk:780423317718302781>" PG_ANGRY_AN = "<a:pg_snake_angry_an:779775305224159232>" TEST_MODE = "TEST_TOKEN" in os.environ TOKEN = os.environ["TEST_TOKEN" if TEST_MODE else "TOKEN"] TEST_USER_ID = int(os.environ["TEST_USER_ID"]) if "TEST_USER_ID" in os.environ else None TEST_USER_IDS = ( set(int(user_id) for user_id in os.environ["TEST_USER_IDS"].split()) if "TEST_USER_IDS" in os.environ else set() ) if TEST_USER_ID is not None: TEST_USER_IDS.add(TEST_USER_ID) PREFIX = "pd!" if TEST_MODE else "pg!" CMD_FUNC_PREFIX = "cmd_" BASIC_MAX_FILE_SIZE = 8_000_000 # bytes ZERO_SPACE = "\u200b" # U+200B DOC_EMBED_LIMIT = 3 BROWSE_MESSAGE_LIMIT = 500 # indicates whether the bot is in generic mode or not. Generic mode is useful # when you are testing the bot on other servers. Generic mode limits features of # the bot that requires access to server specific stuff GENERIC = False UNIQUE_POLL_MSG = "You cannot make multiple votes in this poll\n" WC_SCORING = ( ("Legendary Guardian ⚜️💫", 42), ("Elite Guardian ⚜️", 30), ("Guardian ⚜️", 15), ("Apprentice ⚜️", 1), ) class ServerConstants: """ Class of all server constants. If you ever want to make a copy of the bot run on your own server on non-generic mode, replicate this class, but with the constants from your server """ BOT_ID = 772788653326860288 SERVER_ID = 772505616680878080 RULES_CHANNEL_ID = 772509621747187712 ROLES_CHANNEL_ID = 772535163195228200 GUIDE_CHANNEL_ID = 772528306615615500 ARRIVALS_CHANNEL_ID = 774916117881159681 LOG_CHANNEL_ID = 793250875471822930 CONSOLE_CHANNEL_ID = 851123656880816138 ENTRY_CHANNEL_IDS = { "showcase": 772507247540437032, "resource": 810516093273768016, } ENTRIES_DISCUSSION_CHANNEL_ID = 780351772514058291 # eval is a pretty dangerous command, so grant it only for Admins and Senior Mages EVAL_ROLES = {772521884373614603, 772849669591400501} # Admin, Moderator, Senior Mage, Wizards, Lead Forgers ADMIN_ROLES = { 772521884373614603, 772508687256125440, 772849669591400501, 841338117968756766, 839869589343961099, } # Specialties, Helpfulies, Verified pygame contributors, Server Boosters PRIV_ROLES = { 774473681325785098, 778205389942030377, 780440736457031702, 787473199088533504, } DIVIDER_ROLES = {836645525372665887, 836645368744771654, 842754237774692392} # IDs of rules messages, in the order from rule 1 to rule 7 RULES = ( 799339450361577472, 799339479445405746, 799339501511639100, 799339582620827680, 799339603651067974, 799339620810358885, 819537779847200809, ) # NOTE: It is hardcoded in the bot to remove some messages in resource-entries, # if you want to remove more, add the ID to the set below MSGS_TO_FILTER = { 817137523905527889, 810942002114986045, 810942043488256060, } # Database channel DB_CHANNEL_ID = 838090567682490458 # remember to maintain the scores here in descending order WC_ROLES = ( (42, 889170053013061683), # Legendary Guardian (30, 889169676398100480), # Elite Guardian (15, 889169351645749311), # Guardian (1, 889168765479178240), # Apprentice ) # Link to pygame snake logo GUILD_ICON = "https://media.discordapp.net/attachments/793272780987826197/836600713672523826/Discord_Server_Animated_Logo_V5_512x512.gif" BOT_WELCOME_MSG = { "greet": ( "Hi", "Hello", "Welcome to **Pygame Community**", "Greetings", "Howdy", "Hi there, ", "Hey there", "*Hiss* Who's that? It's", "*Hiss* Welcome", "Hello there,", "Ooooh! Hello", "Hi there,", "*Hiss* Do I see a new user? *hiss*\n" + "Welcome to our wonderful chatroom", "Ooooh! It's", "Oooh! Look who has joined us, it's", ), "check": ( "Check out our", "Make sure to check out the", "Take a look at our", "See our", "Please see our", "Be sure to read our", "Be sure to check the", "Be sure to check out our", "Read our", "Have a look at our", "To get started here, please read the", ), "grab": ( ", grab", ". Then get some", ", take", ", then grab yourself some shiny", ". Get some fancy", ", get some", ", then get yourself some cool", ", then get yourself some", ", take some", ", then take some", ", then take some", ". Go get some cool roles at", ". Then go take some fancy", ", then grab some shiny", ), "end": ( " and have fun!", ", then have fun with pygame!", ", then have fun with pygame! *hiss*", " and have a nice time!", " and enjoy your stay!", " and have some fun! *hisss*", " and have fun here!", " and have fun with pygame!", " and have a wonderful time!", " and join us!", " and join the fun!", " and have fun with pygame! *hisss*", " and have fun here! *hisss*", ), } ILLEGAL_ATTRIBUTES = ( "__subclasses__", "__loader__", "__bases__", "__code__", "__getattribute__", "__setattr__", "__delattr_", "mro", "__class__", "__dict__", ) DEAD_CHAT_TRIGGERS = { "the chat is dead", "the chat is ded", "this chat is dead", "this is a ded chat", "this is a dead chat", "chat dead", "chat ded", "chatded", "chatdead", "dead chat", "ded chat", "dedchat", "this chat ded", "this chat dead", } BOT_MENTION = "the bot" if GENERIC else f"<@!{ServerConstants.BOT_ID}>" BOT_HELP_PROMPT = { "title": "Help", "color": 0xFFFF00, "description": f""" Hey there, do you want to use {BOT_MENTION} ? My command prefix is `{PREFIX}`. If you want me to run your code, use Discord's code block syntax. Learn more about Discord code formatting **[HERE](https://discord.com/channels/772505616680878080/774217896971730974/785510505728311306)**. If you want to know about a specifc command run {PREFIX}help [command], for example {PREFIX}help exec. ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━""", } BYDARIO_QUOTE = """ > Yea, if is dead bring it back to life or let it rest in peace > When you are death ppl dont go to your tomb and say: dead person > I know because I am dead and noone comes to visit me <@!691691416799150152> """ SHAKESPEARE_QUOTES = ( """ To be, or not to be, that is the question — SHAKESPEARE, _Hamlet_, Act 3 Scene 1, lines 56-83; Hamlet """, """ All the world's a stage, And all the men and women merely players — SHAKESPEARE, _As You Like It_, Act 2 Scene 7, lines 139-40; Jacques to Duke Senior and his companions """, """ We are such stuff As dreams are made on; and our little life Is rounded with a sleep — SHAKESPEARE, _The Tempest_, Act 4 Scene 1, lines 156-58; Prospero to Miranda and Ferdinand """, """ Out, out brief candle! Life's but a walking shadow, a poor player, That struts and frets his hour upon the stage. — SHAKESPEARE, _Macbeth_, Act 5 Scene 5, Lines 23-25; Macbeth to Seyton """, """ Be not afraid of greatness. Some are born great, some achieve greatness, and some have greatness thrust upon 'em. — SHAKESPEARE, _Twelfth Night_, Act 2 Scene 5, Lines 139-41; Malvolio """, """ When we are born we cry that we are come To this great stage of fools — SHAKESPEARE, _King Lear_, Act 4 Scene 6, lines 178-79; King Lear to Gloucester """, """ The web of our life is of a mingled yarn, good and ill together — SHAKESPEARE, _All's Well That Ends Well_, Act 4 Scene 3, lines 68-69; One lord to another """, """ You cannot, sir, take from me anything that I will not more willingly part withal - except my life, except my life, except my life — SHAKESPEARE, _Hamlet_, Act 2 Scene 2, lines 213-17; Hamlet """, )
0
0
0
6b35d0e3814027abbd9206a5231a54d20d277179
1,262
py
Python
plantcv/plantcv/hyperspectral/_avg_reflectance.py
Howzit123/plantcv
b4ff6ad765da36353f40827ce3816b33d1d3596a
[ "MIT" ]
2
2021-08-20T14:56:48.000Z
2021-08-24T23:12:56.000Z
plantcv/plantcv/hyperspectral/_avg_reflectance.py
Howzit123/plantcv
b4ff6ad765da36353f40827ce3816b33d1d3596a
[ "MIT" ]
null
null
null
plantcv/plantcv/hyperspectral/_avg_reflectance.py
Howzit123/plantcv
b4ff6ad765da36353f40827ce3816b33d1d3596a
[ "MIT" ]
1
2021-06-15T15:01:46.000Z
2021-06-15T15:01:46.000Z
# Calculate masked average background reflectance import numpy as np def _avg_reflectance(spectral_data, mask): """ Find average reflectance of masked hyperspectral data instance. This is useful for calculating a target signature (n_band x 1 - column array) which is required in various GatorSense hyperspectral tools (https://github.com/GatorSense/hsi_toolkit_py) Inputs: spectral_array = Hyperspectral data instance mask = Target wavelength value Returns: idx = Index :param spectral_data: __main__.Spectral_data :param mask: numpy.ndarray :return spectral_array: __main__.Spectral_data """ # Initialize list of average reflectance values avg_r = [] # For each band in a hyperspectral datacube mask and take the average for i in range(0, len(spectral_data.wavelength_dict)): band = spectral_data.array_data[:, :, [i]] band_reshape = np.transpose(np.transpose(band)[0]) masked_band = band_reshape[np.where(mask > 0)] band_avg = np.average(masked_band) avg_r.append(band_avg) # Convert into array object rather than list avg_r = np.asarray(avg_r) return avg_r
34.108108
111
0.669572
# Calculate masked average background reflectance import numpy as np def _avg_reflectance(spectral_data, mask): """ Find average reflectance of masked hyperspectral data instance. This is useful for calculating a target signature (n_band x 1 - column array) which is required in various GatorSense hyperspectral tools (https://github.com/GatorSense/hsi_toolkit_py) Inputs: spectral_array = Hyperspectral data instance mask = Target wavelength value Returns: idx = Index :param spectral_data: __main__.Spectral_data :param mask: numpy.ndarray :return spectral_array: __main__.Spectral_data """ # Initialize list of average reflectance values avg_r = [] # For each band in a hyperspectral datacube mask and take the average for i in range(0, len(spectral_data.wavelength_dict)): band = spectral_data.array_data[:, :, [i]] band_reshape = np.transpose(np.transpose(band)[0]) masked_band = band_reshape[np.where(mask > 0)] band_avg = np.average(masked_band) avg_r.append(band_avg) # Convert into array object rather than list avg_r = np.asarray(avg_r) return avg_r
0
0
0
dc8c9e5d8239e35faa52d813da6c2b78279456f0
843
py
Python
LeetCode/LeetCode_Python-master/LeetCode_Python-master/Algorithm-Easy/14_Longest_Common_Prefix.py
Sycamore-City-passerby/ML
605cfc70bdda2c99e5f1c16b25812b59c98a72ad
[ "MIT" ]
null
null
null
LeetCode/LeetCode_Python-master/LeetCode_Python-master/Algorithm-Easy/14_Longest_Common_Prefix.py
Sycamore-City-passerby/ML
605cfc70bdda2c99e5f1c16b25812b59c98a72ad
[ "MIT" ]
null
null
null
LeetCode/LeetCode_Python-master/LeetCode_Python-master/Algorithm-Easy/14_Longest_Common_Prefix.py
Sycamore-City-passerby/ML
605cfc70bdda2c99e5f1c16b25812b59c98a72ad
[ "MIT" ]
null
null
null
if __name__ == "__main__": print(Solution().longestCommonPrefix(["hello", "heaven", "heavy"])) """ Time Complexity = O(N*k), k is the length of the common prefix Space Complexity = O(1) Write a function to find the longest common prefix string amongst an array of strings. If there is no common prefix, return an empty string "". Example : Input: ["flower","flow","flight"] Output: "fl" """
24.085714
94
0.52669
class Solution: def longestCommonPrefix(self, strs): """ :type strs: List[str] :rtype: str """ if not strs: return "" for i in range(len(strs[0])): for string in strs[1:]: if i >= len(string) or string[i] != strs[0][i]: return strs[0][:i] return strs[0] if __name__ == "__main__": print(Solution().longestCommonPrefix(["hello", "heaven", "heavy"])) """ Time Complexity = O(N*k), k is the length of the common prefix Space Complexity = O(1) Write a function to find the longest common prefix string amongst an array of strings. If there is no common prefix, return an empty string "". Example : Input: ["flower","flow","flight"] Output: "fl" """
0
353
22
05ab655deb7f84c746142d1073d20e615dff176c
1,191
py
Python
main.py
hyeon0145/xts-aes-256
5f4c25cba7a9b81566635fbf2bd273ac71e1bf64
[ "WTFPL" ]
2
2017-08-21T11:11:49.000Z
2019-02-12T09:28:45.000Z
main.py
hyeon0145/xts-aes-256
5f4c25cba7a9b81566635fbf2bd273ac71e1bf64
[ "WTFPL" ]
null
null
null
main.py
hyeon0145/xts-aes-256
5f4c25cba7a9b81566635fbf2bd273ac71e1bf64
[ "WTFPL" ]
2
2018-09-05T09:33:19.000Z
2019-07-12T19:36:40.000Z
import sys import binascii from xts_aes import XTSAES TEXT_TYPES = { 'encryption': 'plaintext', 'decryption': 'ciphertext', } arguments = sys.argv[1:] mode = 'encryption' inverse_mode = 'decryption' if arguments and (arguments[0] == '-d'): mode = 'decryption' inverse_mode = 'encryption' key = read_hex_string('key') if len(key) != 64: sys.exit('key should be 64-byte') tweak = read_hex_string('tweak') if len(tweak) != 16: sys.exit('tweak should be 16-byte') text = read_hex_string(TEXT_TYPES[mode]) if len(text) < 16: sys.exit('{text_type} should be greater than or equal to 16-byte'.format(text_type=TEXT_TYPES[mode])) xts_aes = XTSAES(key, tweak) encryptor = xts_aes.encrypt if mode == 'encryption' else xts_aes.decrypt ciphertext = encryptor(text) print('{ciphertext_type}: {ciphertext}'.format(ciphertext_type=TEXT_TYPES[inverse_mode], ciphertext=binascii.hexlify(ciphertext).decode()))
27.068182
139
0.702771
import sys import binascii from xts_aes import XTSAES TEXT_TYPES = { 'encryption': 'plaintext', 'decryption': 'ciphertext', } def read_hex_string(name): try: hex_string = input('{name}: '.format(name=name)) hex_string = binascii.unhexlify(hex_string) except binascii.Error: sys.exit('{name} should be hex string'.format(name=name)) return hex_string arguments = sys.argv[1:] mode = 'encryption' inverse_mode = 'decryption' if arguments and (arguments[0] == '-d'): mode = 'decryption' inverse_mode = 'encryption' key = read_hex_string('key') if len(key) != 64: sys.exit('key should be 64-byte') tweak = read_hex_string('tweak') if len(tweak) != 16: sys.exit('tweak should be 16-byte') text = read_hex_string(TEXT_TYPES[mode]) if len(text) < 16: sys.exit('{text_type} should be greater than or equal to 16-byte'.format(text_type=TEXT_TYPES[mode])) xts_aes = XTSAES(key, tweak) encryptor = xts_aes.encrypt if mode == 'encryption' else xts_aes.decrypt ciphertext = encryptor(text) print('{ciphertext_type}: {ciphertext}'.format(ciphertext_type=TEXT_TYPES[inverse_mode], ciphertext=binascii.hexlify(ciphertext).decode()))
239
0
23
37e8351577681ea1ea7a116a7d719adb4de5025d
74
py
Python
bin/botocore/vendored/requests/packages/urllib3/packages/__init__.py
iilness2/bash-lambda-layer-custom
0b054d4ccb0623460354ba1f58059258c095a494
[ "MIT" ]
2,177
2015-01-02T09:56:51.000Z
2022-03-27T01:48:37.000Z
pip/_vendor/requests/packages/urllib3/packages/__init__.py
alex/pip
d51a4b345b31ec4c8defbefe7f12b996c00c67fa
[ "MIT" ]
713
2015-11-06T10:48:58.000Z
2018-11-27T16:32:18.000Z
pip/_vendor/requests/packages/urllib3/packages/__init__.py
alex/pip
d51a4b345b31ec4c8defbefe7f12b996c00c67fa
[ "MIT" ]
742
2015-08-22T23:17:54.000Z
2022-01-18T08:55:33.000Z
from __future__ import absolute_import from . import ssl_match_hostname
14.8
38
0.851351
from __future__ import absolute_import from . import ssl_match_hostname
0
0
0
1cd31b4890f3b6fdc7244e05a71517bd89b89233
48
py
Python
napari_plot/layers/line/__init__.py
lukasz-migas/napari-1d
b0f081a8711ae941b3e4b5c58c3aea56bd0e3277
[ "BSD-3-Clause" ]
13
2021-08-27T23:01:09.000Z
2022-03-22T13:51:35.000Z
napari_plot/layers/line/__init__.py
lukasz-migas/napari-1d
b0f081a8711ae941b3e4b5c58c3aea56bd0e3277
[ "BSD-3-Clause" ]
71
2021-08-28T13:29:17.000Z
2022-03-28T21:22:12.000Z
napari_plot/layers/line/__init__.py
lukasz-migas/napari-1d
b0f081a8711ae941b3e4b5c58c3aea56bd0e3277
[ "BSD-3-Clause" ]
null
null
null
"""Line""" from .line import Line # noqa: F401
16
36
0.625
"""Line""" from .line import Line # noqa: F401
0
0
0
7dfdebaa1e091eb8231a2ef9b239f464cd5c54da
5,737
py
Python
tests/test_doit_cmd.py
m4ta1l/doit
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
[ "MIT" ]
3
2021-08-07T14:14:51.000Z
2021-08-20T01:16:01.000Z
tests/test_doit_cmd.py
m4ta1l/doit
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
[ "MIT" ]
5
2020-09-07T19:21:51.000Z
2020-09-07T19:41:44.000Z
tests/test_doit_cmd.py
m4ta1l/doit
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
[ "MIT" ]
null
null
null
import os from unittest.mock import Mock import pytest from doit.exceptions import InvalidCommand from doit.cmd_run import Run from doit.cmd_list import List from doit import doit_cmd
35.196319
79
0.630818
import os from unittest.mock import Mock import pytest from doit.exceptions import InvalidCommand from doit.cmd_run import Run from doit.cmd_list import List from doit import doit_cmd def cmd_main(args): main = doit_cmd.DoitMain() main.BIN_NAME = 'doit' return main.run(args) class TestRun(object): def test_version(self, capsys): cmd_main(["--version"]) out, err = capsys.readouterr() assert "lib" in out def test_usage(self, capsys): cmd_main(["--help"]) out, err = capsys.readouterr() assert "doit list" in out def test_run_is_default(self, monkeypatch): mock_run = Mock() monkeypatch.setattr(Run, "execute", mock_run) cmd_main([]) assert 1 == mock_run.call_count def test_run_other_subcommand(self, monkeypatch): mock_list = Mock() monkeypatch.setattr(List, "execute", mock_list) cmd_main(["list"]) assert 1 == mock_list.call_count def test_cmdline_vars(self, monkeypatch): mock_run = Mock() monkeypatch.setattr(Run, "execute", mock_run) cmd_main(['x=1', 'y=abc']) assert '1' == doit_cmd.get_var('x') assert 'abc' == doit_cmd.get_var('y') assert None is doit_cmd.get_var('z') def test_cmdline_novars(self, monkeypatch): mock_run = Mock() monkeypatch.setattr(Run, "execute", mock_run) cmd_main(['x=1']) # Simulate the variable below not being initialized by a subprocess on # Windows. See https://github.com/pydoit/doit/issues/164. doit_cmd._CMDLINE_VARS = None assert None is doit_cmd.get_var('x') def test_cmdline_vars_not_opts(self, monkeypatch): mock_run = Mock() monkeypatch.setattr(Run, "execute", mock_run) cmd_main(['--z=5']) assert None == doit_cmd.get_var('--z') def test_cmdline_loader_option_before_cmd_name(self, monkeypatch): mock_list = Mock() monkeypatch.setattr(List, "execute", mock_list) cmd_main(['-k', 'list', '--all']) assert mock_list.called params, args = mock_list.call_args[0] assert params['subtasks'] == True assert params['seek_file'] == True assert args == [] def test_cmdline_loader_option_mixed(self, monkeypatch): mock_run = Mock() monkeypatch.setattr(Run, "execute", mock_run) cmd_main(['-c', '-k', 'lala']) assert mock_run.called params, args = mock_run.call_args[0] assert params['continue'] == True assert params['seek_file'] == True assert args == ['lala'] def test_task_loader_has_cmd_list(self, monkeypatch): cmd_names = [] def save_cmd_names(self, params, args): cmd_names.extend(self.loader.cmd_names) monkeypatch.setattr(Run, "execute", save_cmd_names) cmd_main([]) assert 'list' in cmd_names def test_extra_config(self, monkeypatch, depfile_name): outfile_val = [] def monkey_run(self, opt_values, pos_args): outfile_val.append(opt_values['outfile']) monkeypatch.setattr(Run, "execute", monkey_run) extra_config = { 'outfile': 'foo.txt', 'dep_file': depfile_name, } doit_cmd.DoitMain(extra_config={'GLOBAL': extra_config}).run([]) assert outfile_val[0] == 'foo.txt' class TestErrors(object): def test_interrupt(self, monkeypatch): def my_raise(*args): raise KeyboardInterrupt() mock_cmd = Mock(side_effect=my_raise) monkeypatch.setattr(Run, "execute", mock_cmd) pytest.raises(KeyboardInterrupt, cmd_main, []) def test_user_error(self, capsys, monkeypatch): mock_cmd = Mock(side_effect=InvalidCommand) monkeypatch.setattr(Run, "execute", mock_cmd) got = cmd_main([]) assert 3 == got out, err = capsys.readouterr() assert "ERROR" in err def test_internal_error(self, capsys, monkeypatch): mock_cmd = Mock(side_effect=Exception) monkeypatch.setattr(Run, "execute", mock_cmd) got = cmd_main([]) assert 3 == got out, err = capsys.readouterr() # traceback from Exception (this case code from mock lib) assert "mock.py" in err class TestConfig(object): def test_no_ini_config_file(self): main = doit_cmd.DoitMain(config_filenames=()) main.run(['--version']) def test_load_plugins_command(self): config_filename = os.path.join(os.path.dirname(__file__), 'sample.cfg') main = doit_cmd.DoitMain(config_filenames=config_filename) assert 1 == len(main.config['COMMAND']) # test loaded plugin command is actually used with plugin name assert 'foo' in main.get_cmds() def test_merge_api_ini_config(self): config_filename = os.path.join(os.path.dirname(__file__), 'sample.cfg') api_config = {'GLOBAL': {'opty':'10', 'optz':'10'}} main = doit_cmd.DoitMain(config_filenames=config_filename, extra_config=api_config) assert 1 == len(main.config['COMMAND']) # test loaded plugin command is actually used with plugin name assert 'foo' in main.get_cmds() # INI has higher preference the api_config assert main.config['GLOBAL'] == {'optx':'6', 'opty':'7', 'optz':'10'} def test_execute_command_plugin(self, capsys): config_filename = os.path.join(os.path.dirname(__file__), 'sample.cfg') main = doit_cmd.DoitMain(config_filenames=config_filename) main.run(['foo']) got = capsys.readouterr()[0] assert got == 'this command does nothing!\n'
4,961
9
575
13c81ad5d59ddeac7225490f08cb685f14f20c5b
1,970
py
Python
trajectory/utils/serialization.py
SeanNobel/trajectory-transformer
b5497e992734b2320add01adbbfd26b3ee16f2d2
[ "MIT" ]
63
2021-11-23T08:00:27.000Z
2022-03-31T04:03:05.000Z
trajectory/utils/serialization.py
SeanNobel/trajectory-transformer
b5497e992734b2320add01adbbfd26b3ee16f2d2
[ "MIT" ]
7
2021-12-08T04:01:13.000Z
2022-03-31T07:42:37.000Z
trajectory/utils/serialization.py
SeanNobel/trajectory-transformer
b5497e992734b2320add01adbbfd26b3ee16f2d2
[ "MIT" ]
12
2021-12-13T10:55:32.000Z
2022-03-24T09:06:22.000Z
import time import sys import os import glob import pickle import json import torch import pdb def mkdir(savepath, prune_fname=False): """ returns `True` iff `savepath` is created """ if prune_fname: savepath = os.path.dirname(savepath) if not os.path.exists(savepath): try: os.makedirs(savepath) except: print(f'[ utils/serialization ] Warning: did not make directory: {savepath}') return False return True else: return False
26.621622
89
0.65736
import time import sys import os import glob import pickle import json import torch import pdb def mkdir(savepath, prune_fname=False): """ returns `True` iff `savepath` is created """ if prune_fname: savepath = os.path.dirname(savepath) if not os.path.exists(savepath): try: os.makedirs(savepath) except: print(f'[ utils/serialization ] Warning: did not make directory: {savepath}') return False return True else: return False def get_latest_epoch(loadpath): states = glob.glob1(loadpath, 'state_*') latest_epoch = -1 for state in states: epoch = int(state.replace('state_', '').replace('.pt', '')) latest_epoch = max(epoch, latest_epoch) return latest_epoch def load_model(*loadpath, epoch=None, device='cuda:0'): loadpath = os.path.join(*loadpath) config_path = os.path.join(loadpath, 'model_config.pkl') if epoch is 'latest': epoch = get_latest_epoch(loadpath) print(f'[ utils/serialization ] Loading model epoch: {epoch}') state_path = os.path.join(loadpath, f'state_{epoch}.pt') config = pickle.load(open(config_path, 'rb')) state = torch.load(state_path) model = config() model.to(device) model.load_state_dict(state, strict=True) print(f'\n[ utils/serialization ] Loaded config from {config_path}\n') print(config) return model, epoch def load_config(*loadpath): loadpath = os.path.join(*loadpath) config = pickle.load(open(loadpath, 'rb')) print(f'[ utils/serialization ] Loaded config from {loadpath}') print(config) return config def load_from_config(*loadpath): config = load_config(*loadpath) return config.make() def load_args(*loadpath): from .setup import Parser loadpath = os.path.join(*loadpath) args_path = os.path.join(loadpath, 'args.json') args = Parser() args.load(args_path) return args
1,323
0
115
1619dc70d15eabc62f53e2b1f4e694140295a184
12,160
py
Python
paper/code/analysis.py
skearnes/color-features
a6af3686c82a5d1d6b68341fe5e5b16e8e4ed356
[ "BSD-3-Clause" ]
5
2016-06-07T04:17:24.000Z
2021-05-27T07:41:57.000Z
paper/code/analysis.py
skearnes/color-features
a6af3686c82a5d1d6b68341fe5e5b16e8e4ed356
[ "BSD-3-Clause" ]
1
2018-04-28T14:40:26.000Z
2018-05-19T21:55:47.000Z
paper/code/analysis.py
skearnes/color-features
a6af3686c82a5d1d6b68341fe5e5b16e8e4ed356
[ "BSD-3-Clause" ]
1
2018-12-06T22:54:43.000Z
2018-12-06T22:54:43.000Z
"""Analyze results. Use the saved model output to calculate AUC and other metrics. """ import collections import cPickle as pickle import gflags as flags import gzip import logging import numpy as np import os import pandas as pd from sklearn import metrics from statsmodels.stats import proportion import sys flags.DEFINE_string('root', None, 'Root directory containing model results.') flags.DEFINE_string('dataset_file', None, 'Filename containing datasets.') flags.DEFINE_string('prefix', None, 'Dataset prefix.') flags.DEFINE_boolean('tversky', False, 'If True, use Tversky features.') flags.DEFINE_integer('num_folds', 5, 'Number of cross-validation folds.') flags.DEFINE_boolean('cycle', False, 'If True, expect multiple query molecules.') flags.DEFINE_string('reload', None, 'Load previously analyzed results.') flags.DEFINE_string('subset', None, 'Subset.') FLAGS = flags.FLAGS logging.getLogger().setLevel(logging.INFO) FEATURES_MAP = { 'rocs': 'TanimotoCombo', 'shape_color': 'ST-CT', 'shape_color_components': 'ST-CCT', 'shape_color_overlaps': 'ST-CAO', 'shape_color_components_overlaps': 'ST-CCT-CAO', 'rocs_tversky': 'TverskyCombo', 'shape_color_tversky': 'STv-CTv', 'shape_color_components_tversky': 'STv-CCTv', 'shape_color_components_tversky_overlaps': 'STv-CCTv-CAO', } MODEL_MAP = { 'logistic': 'LR', 'random_forest': 'RF', 'svm': 'SVM', } def roc_enrichment(fpr, tpr, target_fpr): """Get ROC enrichment.""" assert fpr[0] == 0 assert fpr[-1] == 1 assert np.all(np.diff(fpr) >= 0) return np.true_divide(np.interp(target_fpr, fpr, tpr), target_fpr) def get_cv_metrics(y_true, y_pred): """Get 5-fold mean AUC.""" assert len(y_true) == len(y_pred) fold_metrics = collections.defaultdict(list) for yt, yp in zip(y_true, y_pred): assert len(yt) == len(yp) fold_metrics['auc'].append(metrics.roc_auc_score(yt, yp)) fpr, tpr, _ = metrics.roc_curve(yt, yp) for x in [0.005, 0.01, 0.02, 0.05, 0.1, 0.2]: fold_metrics['e-%g' % x].append(roc_enrichment(fpr, tpr, x)) return fold_metrics def add_rows(features, scores, rows, dataset, index=None): """Record per-fold and averaged cross-validation results.""" for fold in range(len(scores['auc'])): row = {'dataset': dataset, 'features': features, 'fold': fold} if index is not None: row['index'] = index for key, values in scores.iteritems(): row[key] = values[fold] rows.append(row) # Averages row = {'dataset': dataset, 'features': features, 'fold': 'all'} if index is not None: row['index'] = index for key, values in scores.iteritems(): row[key] = np.mean(values) rows.append(row) def load_output_and_calculate_metrics(model, subset): """Calculate metrics using saved model output. Args: model: String model type (e.g. logistic). subset: String query subset (e.g. omega1). Returns: DataFrame containing calculated metrics for each model/subset, including per-fold and average values for each reference molecule. """ with open(FLAGS.dataset_file) as f: datasets = [line.strip() for line in f] rows = [] for dataset in datasets: ref_idx = 0 while True: # Cycle through reference molecules. ref_idx_exists = get_ref_rows(model, subset, dataset, ref_idx, rows) if not FLAGS.cycle or not ref_idx_exists: break ref_idx += 1 logging.info('%s\t%d', dataset, ref_idx) return pd.DataFrame(rows) def confidence_interval(delta, metric): """Calculate a two-sided 95% confidence interval for differences.""" # Wilson score interval for sign test. num_successes = np.count_nonzero(delta > 0) num_trials = np.count_nonzero(delta != 0) # Exclude zero differences. lower, upper = proportion.proportion_confint( num_successes, num_trials, alpha=0.05, method='wilson') median_delta = delta.median() if metric == 'auc': median = r'%.3f' % median_delta ci = r'(%.2f, %.2f)' % (lower, upper) else: median = r'%.0f' % median_delta ci = r'(%.2f, %.2f)' % (lower, upper) if lower < 0.5 and upper < 0.5: median = r'\bfseries \color{red} ' + median ci = r'\bfseries \color{red} ' + ci elif lower > 0.5 and upper > 0.5: median = r'\bfseries ' + median ci = r'\bfseries ' + ci return median, ci def data_table(data, subsets, models, kind=None, tversky=False): """Get medians and compare everything to ROCS. Args: data: DataFrame containing model performance. subsets: List of query subsets. models: List of models to include in the table. kind: List of metrics to report. Defaults to ['auc']. tversky: Boolean whether to use Tversky features. If False, use Tanimoto features. """ if kind is None: kind = ['auc'] if tversky: rocs_baseline = 'rocs_tversky' features_order = ['shape_color_tversky', 'shape_color_components_tversky', 'shape_color_overlaps', 'shape_color_components_tversky_overlaps'] else: rocs_baseline = 'rocs' features_order = ['shape_color', 'shape_color_components', 'shape_color_overlaps', 'shape_color_components_overlaps'] table = [] # Get ROCS row. row = [r'\cellcolor{white} ROCS', FEATURES_MAP[rocs_baseline]] for subset in subsets: rocs_mask = ((data['features'] == rocs_baseline) & (data['subset'] == subset) & (data['model'] == models[0])) rocs_df = data[rocs_mask] logging.info('Confidence interval N = %d', len(rocs_df)) logging.info('Number of datasets = %d', len(pd.unique(rocs_df['dataset']))) for metric in kind: if metric == 'auc': number = '%.3f' else: number = '%.0f' row.extend([number % rocs_df[metric].median(), '', '']) table.append(' & '.join(row)) # Get model rows. for model in models: for features in features_order: if features == features_order[-1]: row = [r'\multirow{-%d}{*}{\cellcolor{white} %s}' % ( len(features_order), MODEL_MAP[model])] else: row = [r'\cellcolor{white}'] row.append(FEATURES_MAP[features]) for subset in subsets: mask = ((data['features'] == features) & (data['subset'] == subset) & (data['model'] == model)) df = data[mask] rocs_mask = ((data['features'] == rocs_baseline) & (data['subset'] == subset) & (data['model'] == model)) rocs_df = data[rocs_mask] for metric in kind: if metric == 'auc': number = '%.3f' else: number = '%.0f' row.append(number % df[metric].median()) if features == rocs_baseline: row.append('') row.append('') else: assert np.array_equal(df['dataset'].values, rocs_df['dataset'].values) if 'index' in df.columns: assert np.array_equal(df['index'].values, rocs_df['index'].values) delta = df.copy() delta[metric] -= rocs_df[metric].values row.extend(confidence_interval(delta[metric], metric)) table.append(' & '.join(row)) print ' \\\\\n'.join(table) if __name__ == '__main__': flags.MarkFlagAsRequired('root') flags.MarkFlagAsRequired('dataset_file') flags.MarkFlagAsRequired('prefix') FLAGS(sys.argv) main()
35.870206
80
0.565789
"""Analyze results. Use the saved model output to calculate AUC and other metrics. """ import collections import cPickle as pickle import gflags as flags import gzip import logging import numpy as np import os import pandas as pd from sklearn import metrics from statsmodels.stats import proportion import sys flags.DEFINE_string('root', None, 'Root directory containing model results.') flags.DEFINE_string('dataset_file', None, 'Filename containing datasets.') flags.DEFINE_string('prefix', None, 'Dataset prefix.') flags.DEFINE_boolean('tversky', False, 'If True, use Tversky features.') flags.DEFINE_integer('num_folds', 5, 'Number of cross-validation folds.') flags.DEFINE_boolean('cycle', False, 'If True, expect multiple query molecules.') flags.DEFINE_string('reload', None, 'Load previously analyzed results.') flags.DEFINE_string('subset', None, 'Subset.') FLAGS = flags.FLAGS logging.getLogger().setLevel(logging.INFO) FEATURES_MAP = { 'rocs': 'TanimotoCombo', 'shape_color': 'ST-CT', 'shape_color_components': 'ST-CCT', 'shape_color_overlaps': 'ST-CAO', 'shape_color_components_overlaps': 'ST-CCT-CAO', 'rocs_tversky': 'TverskyCombo', 'shape_color_tversky': 'STv-CTv', 'shape_color_components_tversky': 'STv-CCTv', 'shape_color_components_tversky_overlaps': 'STv-CCTv-CAO', } MODEL_MAP = { 'logistic': 'LR', 'random_forest': 'RF', 'svm': 'SVM', } def roc_enrichment(fpr, tpr, target_fpr): """Get ROC enrichment.""" assert fpr[0] == 0 assert fpr[-1] == 1 assert np.all(np.diff(fpr) >= 0) return np.true_divide(np.interp(target_fpr, fpr, tpr), target_fpr) def get_cv_metrics(y_true, y_pred): """Get 5-fold mean AUC.""" assert len(y_true) == len(y_pred) fold_metrics = collections.defaultdict(list) for yt, yp in zip(y_true, y_pred): assert len(yt) == len(yp) fold_metrics['auc'].append(metrics.roc_auc_score(yt, yp)) fpr, tpr, _ = metrics.roc_curve(yt, yp) for x in [0.005, 0.01, 0.02, 0.05, 0.1, 0.2]: fold_metrics['e-%g' % x].append(roc_enrichment(fpr, tpr, x)) return fold_metrics def add_rows(features, scores, rows, dataset, index=None): """Record per-fold and averaged cross-validation results.""" for fold in range(len(scores['auc'])): row = {'dataset': dataset, 'features': features, 'fold': fold} if index is not None: row['index'] = index for key, values in scores.iteritems(): row[key] = values[fold] rows.append(row) # Averages row = {'dataset': dataset, 'features': features, 'fold': 'all'} if index is not None: row['index'] = index for key, values in scores.iteritems(): row[key] = np.mean(values) rows.append(row) def load_output_and_calculate_metrics(model, subset): """Calculate metrics using saved model output. Args: model: String model type (e.g. logistic). subset: String query subset (e.g. omega1). Returns: DataFrame containing calculated metrics for each model/subset, including per-fold and average values for each reference molecule. """ with open(FLAGS.dataset_file) as f: datasets = [line.strip() for line in f] rows = [] for dataset in datasets: ref_idx = 0 while True: # Cycle through reference molecules. ref_idx_exists = get_ref_rows(model, subset, dataset, ref_idx, rows) if not FLAGS.cycle or not ref_idx_exists: break ref_idx += 1 logging.info('%s\t%d', dataset, ref_idx) return pd.DataFrame(rows) def get_ref_rows(model, subset, dataset, ref_idx, rows): logging.debug('ref_idx %d', ref_idx) for features in FEATURES_MAP.keys(): logging.debug('Features: %s', features) fold_y_true = [] fold_y_pred = [] for fold_idx in range(FLAGS.num_folds): filename = get_output_filename(dataset, model, subset, features, fold_idx, ref_idx) if not os.path.exists(filename): return False logging.debug(filename) with gzip.open(filename) as f: df = pickle.load(f) fold_y_true.append(df['y_true'].values) fold_y_pred.append(df['y_pred'].values) scores = get_cv_metrics(fold_y_true, fold_y_pred) add_rows(features, scores, rows, dataset, index=ref_idx) return True def get_output_filename(dataset, model, subset, features, fold_idx, ref_idx): if FLAGS.cycle: filename = os.path.join( '%s-%s' % (FLAGS.root, subset), dataset, 'fold-%d' % fold_idx, '%s-%s-%s-%s-%s-fold-%d-ref-%d-output.pkl.gz' % ( FLAGS.prefix, dataset, model, subset, features, fold_idx, ref_idx)) else: assert ref_idx == 0 filename = os.path.join( '%s-%s' % (FLAGS.root, subset), dataset, 'fold-%d' % fold_idx, '%s-%s-%s-%s-%s-fold-%d-output.pkl.gz' % ( FLAGS.prefix, dataset, model, subset, features, fold_idx)) return filename def load_data(model, subset): data = [] with open(FLAGS.dataset_file) as f: for line in f: dataset = line.strip() filename = os.path.join(FLAGS.root, '%s-%s-%s-%s.pkl.gz' % ( FLAGS.prefix, dataset, model, subset)) assert os.path.exists(filename) logging.info(filename) with gzip.open(filename) as g: df = pickle.load(g) data.append(df) return pd.concat(data) def confidence_interval(delta, metric): """Calculate a two-sided 95% confidence interval for differences.""" # Wilson score interval for sign test. num_successes = np.count_nonzero(delta > 0) num_trials = np.count_nonzero(delta != 0) # Exclude zero differences. lower, upper = proportion.proportion_confint( num_successes, num_trials, alpha=0.05, method='wilson') median_delta = delta.median() if metric == 'auc': median = r'%.3f' % median_delta ci = r'(%.2f, %.2f)' % (lower, upper) else: median = r'%.0f' % median_delta ci = r'(%.2f, %.2f)' % (lower, upper) if lower < 0.5 and upper < 0.5: median = r'\bfseries \color{red} ' + median ci = r'\bfseries \color{red} ' + ci elif lower > 0.5 and upper > 0.5: median = r'\bfseries ' + median ci = r'\bfseries ' + ci return median, ci def data_table(data, subsets, models, kind=None, tversky=False): """Get medians and compare everything to ROCS. Args: data: DataFrame containing model performance. subsets: List of query subsets. models: List of models to include in the table. kind: List of metrics to report. Defaults to ['auc']. tversky: Boolean whether to use Tversky features. If False, use Tanimoto features. """ if kind is None: kind = ['auc'] if tversky: rocs_baseline = 'rocs_tversky' features_order = ['shape_color_tversky', 'shape_color_components_tversky', 'shape_color_overlaps', 'shape_color_components_tversky_overlaps'] else: rocs_baseline = 'rocs' features_order = ['shape_color', 'shape_color_components', 'shape_color_overlaps', 'shape_color_components_overlaps'] table = [] # Get ROCS row. row = [r'\cellcolor{white} ROCS', FEATURES_MAP[rocs_baseline]] for subset in subsets: rocs_mask = ((data['features'] == rocs_baseline) & (data['subset'] == subset) & (data['model'] == models[0])) rocs_df = data[rocs_mask] logging.info('Confidence interval N = %d', len(rocs_df)) logging.info('Number of datasets = %d', len(pd.unique(rocs_df['dataset']))) for metric in kind: if metric == 'auc': number = '%.3f' else: number = '%.0f' row.extend([number % rocs_df[metric].median(), '', '']) table.append(' & '.join(row)) # Get model rows. for model in models: for features in features_order: if features == features_order[-1]: row = [r'\multirow{-%d}{*}{\cellcolor{white} %s}' % ( len(features_order), MODEL_MAP[model])] else: row = [r'\cellcolor{white}'] row.append(FEATURES_MAP[features]) for subset in subsets: mask = ((data['features'] == features) & (data['subset'] == subset) & (data['model'] == model)) df = data[mask] rocs_mask = ((data['features'] == rocs_baseline) & (data['subset'] == subset) & (data['model'] == model)) rocs_df = data[rocs_mask] for metric in kind: if metric == 'auc': number = '%.3f' else: number = '%.0f' row.append(number % df[metric].median()) if features == rocs_baseline: row.append('') row.append('') else: assert np.array_equal(df['dataset'].values, rocs_df['dataset'].values) if 'index' in df.columns: assert np.array_equal(df['index'].values, rocs_df['index'].values) delta = df.copy() delta[metric] -= rocs_df[metric].values row.extend(confidence_interval(delta[metric], metric)) table.append(' & '.join(row)) print ' \\\\\n'.join(table) def main(): if FLAGS.prefix == 'muv': subsets = ['omega1'] assert FLAGS.cycle elif FLAGS.prefix == 'dude': subsets = ['xtal', 'omega1'] elif FLAGS.prefix == 'chembl': subsets = ['omega1'] assert FLAGS.cycle else: raise ValueError(FLAGS.prefix) if FLAGS.subset is not None: subsets = [FLAGS.subset] # Load data from output or previously processed. models = ['logistic', 'random_forest', 'svm'] if FLAGS.reload is not None: logging.info('Loading processed data from %s', FLAGS.reload) data = pd.read_pickle(FLAGS.reload) else: data = [] for model in models: for subset in subsets: logging.info('%s\t%s', model, subset) df = load_output_and_calculate_metrics(model, subset) df['model'] = model df['subset'] = subset data.append(df) data = pd.concat(data) # Save processed data. filename = '%s-processed.pkl.gz' % FLAGS.prefix logging.info('Saving processed data to %s', filename) with gzip.open(filename, 'wb') as f: pickle.dump(data, f, pickle.HIGHEST_PROTOCOL) # Only keep 5-fold mean information. mask = data['fold'] == 'all' data = data[mask] # AUC tables. # Combine subsets into a single table here. logging.info('AUC table') data_table(data, subsets, models, kind=['auc'], tversky=FLAGS.tversky) # Enrichment tables. # One per FPR. for metric in ['e-0.005', 'e-0.01', 'e-0.02', 'e-0.05']: logging.info('Metric: %s', metric) logging.info('Enrichment table') data_table(data, subsets, models, kind=[metric], tversky=FLAGS.tversky) if __name__ == '__main__': flags.MarkFlagAsRequired('root') flags.MarkFlagAsRequired('dataset_file') flags.MarkFlagAsRequired('prefix') FLAGS(sys.argv) main()
3,768
0
92
c49431bc20102646a020c6ae047aba4f99d141c1
992
py
Python
cgi-bin/reset_svg.py
errollw/smartboards
c511d4aff8a907e9a07065f7dee1b07f34d8fecd
[ "MIT" ]
4
2018-06-14T12:19:37.000Z
2022-02-08T18:36:37.000Z
cgi-bin/reset_svg.py
errollw/smartboards
c511d4aff8a907e9a07065f7dee1b07f34d8fecd
[ "MIT" ]
null
null
null
cgi-bin/reset_svg.py
errollw/smartboards
c511d4aff8a907e9a07065f7dee1b07f34d8fecd
[ "MIT" ]
2
2017-02-16T14:21:02.000Z
2020-05-16T08:21:43.000Z
#!/usr/bin/env python import cgi import os from utils import simple_success_response_JSON from shutil import copyfile args = cgi.FieldStorage() u_id = args['u_id'].value ### Check if SVG exists for each user. If it doesn't, create one ### ------------------------------------------------------------ svg_path = os.path.join('..', 'content') # Path to user SVG files os.chdir(svg_path) svg_base = 'svg_base.svg' # Name of file with basic wb SVG properties user_svg_file = u_id + '.svg' copyfile(svg_base, user_svg_file) # Copy base SVG file into the user's SVG ### Update that room's 'last-mod' field in its config file ### ------------------------------------------------------------- config_filename = r_id + '.xml' config_path = os.path.join('..', 'config', 'room', config_filename) tree = ET.parse(config_path) tree.find('last-mod').text = str( int(time.time()) * 1000 ) tree.write(config_path, encoding="utf-8", xml_declaration=True) simple_success_response_JSON();
32
76
0.623992
#!/usr/bin/env python import cgi import os from utils import simple_success_response_JSON from shutil import copyfile args = cgi.FieldStorage() u_id = args['u_id'].value ### Check if SVG exists for each user. If it doesn't, create one ### ------------------------------------------------------------ svg_path = os.path.join('..', 'content') # Path to user SVG files os.chdir(svg_path) svg_base = 'svg_base.svg' # Name of file with basic wb SVG properties user_svg_file = u_id + '.svg' copyfile(svg_base, user_svg_file) # Copy base SVG file into the user's SVG ### Update that room's 'last-mod' field in its config file ### ------------------------------------------------------------- config_filename = r_id + '.xml' config_path = os.path.join('..', 'config', 'room', config_filename) tree = ET.parse(config_path) tree.find('last-mod').text = str( int(time.time()) * 1000 ) tree.write(config_path, encoding="utf-8", xml_declaration=True) simple_success_response_JSON();
0
0
0
3cf445961c7fbec62141706a180b03f1a5a6c68c
1,671
py
Python
bioprocs/scripts/tsv/pTsvColFilter.py
pwwang/biopipen
d53b78aa192fd56a5da457463b099b2aa833b284
[ "MIT" ]
2
2021-09-10T00:17:52.000Z
2021-10-10T09:53:09.000Z
bioprocs/scripts/tsv/pTsvColFilter.py
pwwang/biopipen
d53b78aa192fd56a5da457463b099b2aa833b284
[ "MIT" ]
1
2021-12-02T07:54:09.000Z
2021-12-02T07:54:09.000Z
bioprocs/scripts/tsv/pTsvColFilter.py
pwwang/biopipen
d53b78aa192fd56a5da457463b099b2aa833b284
[ "MIT" ]
2
2021-09-10T00:17:54.000Z
2021-10-10T09:56:40.000Z
from os import path from collections import OrderedDict from diot import Diot from pyppl.utils import alwaysList from bioprocs.utils.tsvio2 import TsvReader, TsvWriter infile = {{i.infile | quote}} colfile = {{i.colfile | quote}} outfile = {{o.outfile | quote}} inopts = {{args.inopts | repr}} cols = {{args.cols | repr}} keep = {{args.keep | repr}} from_file = False if path.isfile(colfile): cols = TsvReader(colfile, cnames = False).dump(0) from_file = True elif colfile: cols = alwaysList(colfile) elif path.isfile(str(cols)): cols = TsvReader(cols, cnames = False).dump(0) from_file = True elif cols: cols = alwaysList(cols) else: raise ValueError('Columns not provided.') if not from_file and not isinstance(cols[0], int) and cols[0].isdigit(): cols = [int(c) for c in cols] reader = TsvReader(infile, **inopts) writer = TsvWriter(outfile, delimit = inopts.get('delimit', "\t")) if reader.cnames and not isinstance(cols[0], int): cols = [reader.cnames.index(c) for c in cols if c in reader.cnames] elif not reader.cnames and not isinstance(cols[0], int): raise ValueError("Input file doesn't have column names") elif min(cols) < -len(reader.cnames) or (reader.cnames and max(cols) >= len(reader.cnames)): raise IndexError("Provided columns beyond input file range.") if reader.cnames: ncol = len(reader.cnames) else: ncol = len(reader.next()) reader.rewind() cols = [ncol + c if c < 0 else c for c in cols] if not keep: cols = [c for c in range(ncol) if c not in cols] if reader.cnames: writer.cnames = [reader.cnames[c] for c in cols] writer.writeHead() for r in reader: rec = [r[c] for c in cols] writer.write(rec) writer.close()
29.315789
92
0.707361
from os import path from collections import OrderedDict from diot import Diot from pyppl.utils import alwaysList from bioprocs.utils.tsvio2 import TsvReader, TsvWriter infile = {{i.infile | quote}} colfile = {{i.colfile | quote}} outfile = {{o.outfile | quote}} inopts = {{args.inopts | repr}} cols = {{args.cols | repr}} keep = {{args.keep | repr}} from_file = False if path.isfile(colfile): cols = TsvReader(colfile, cnames = False).dump(0) from_file = True elif colfile: cols = alwaysList(colfile) elif path.isfile(str(cols)): cols = TsvReader(cols, cnames = False).dump(0) from_file = True elif cols: cols = alwaysList(cols) else: raise ValueError('Columns not provided.') if not from_file and not isinstance(cols[0], int) and cols[0].isdigit(): cols = [int(c) for c in cols] reader = TsvReader(infile, **inopts) writer = TsvWriter(outfile, delimit = inopts.get('delimit', "\t")) if reader.cnames and not isinstance(cols[0], int): cols = [reader.cnames.index(c) for c in cols if c in reader.cnames] elif not reader.cnames and not isinstance(cols[0], int): raise ValueError("Input file doesn't have column names") elif min(cols) < -len(reader.cnames) or (reader.cnames and max(cols) >= len(reader.cnames)): raise IndexError("Provided columns beyond input file range.") if reader.cnames: ncol = len(reader.cnames) else: ncol = len(reader.next()) reader.rewind() cols = [ncol + c if c < 0 else c for c in cols] if not keep: cols = [c for c in range(ncol) if c not in cols] if reader.cnames: writer.cnames = [reader.cnames[c] for c in cols] writer.writeHead() for r in reader: rec = [r[c] for c in cols] writer.write(rec) writer.close()
0
0
0
078d96e72eb252112b59995c7c4436d0430ccb52
7,510
py
Python
quantfig.py
zmoxq/quantfig
0bd9df3a936569c38b4d7cffe47ff7eb4de2544a
[ "MIT" ]
null
null
null
quantfig.py
zmoxq/quantfig
0bd9df3a936569c38b4d7cffe47ff7eb4de2544a
[ "MIT" ]
null
null
null
quantfig.py
zmoxq/quantfig
0bd9df3a936569c38b4d7cffe47ff7eb4de2544a
[ "MIT" ]
null
null
null
from app.signal_calc import SignalCalc import plotly.graph_objects as go from plotly.subplots import make_subplots import math import numpy as np import pandas as pd def line_fig(date, y_axis, y_dict): """ :param date: xaxis, :param y_label: yaxis label e.g. 'y2','y3' :param y_dict: line data :return: list """ y_values = [list(k) for k in y_dict.values()] y_labels = [l for l in y_dict.keys()] y_data = np.array(y_values) fig_res = [] for i in range(len(y_values)): fig = go.Scatter( x=date, y=y_data[i], mode='lines', marker=dict(opacity=0.8), name=y_labels[i], connectgaps=True, yaxis=y_axis, ) fig_res.append(fig) return fig_res
27.509158
102
0.488815
from app.signal_calc import SignalCalc import plotly.graph_objects as go from plotly.subplots import make_subplots import math import numpy as np import pandas as pd def line_fig(date, y_axis, y_dict): """ :param date: xaxis, :param y_label: yaxis label e.g. 'y2','y3' :param y_dict: line data :return: list """ y_values = [list(k) for k in y_dict.values()] y_labels = [l for l in y_dict.keys()] y_data = np.array(y_values) fig_res = [] for i in range(len(y_values)): fig = go.Scatter( x=date, y=y_data[i], mode='lines', marker=dict(opacity=0.8), name=y_labels[i], connectgaps=True, yaxis=y_axis, ) fig_res.append(fig) return fig_res class QuantFig: def __init__(self, df: pd.DataFrame, start_date: str = '', end_date: str = ''): self._increasing_color = '#FF5C5C' self._decreasing_color = '#46A346' df['color_label'] = self._decreasing_color df.loc[df.close < df.open, 'color_label'] = self._decreasing_color df.loc[df.close >= df.open, 'color_label'] = self._increasing_color df = df.sort_values(by='date') # order old -> new self._df = df self._start_date = start_date self._end_date = end_date self._date = df['date'] self._open = df['open'] self._high = df['high'] self._low = df['low'] self._close = df['close'] self._volume = df['volume'] self._color_label = df['color_label'] self._data = [self.add_bars()] self._label = ['Price'] self._ta = SignalCalc(self._df) self._shapes = [] def show(self): fig = make_subplots(shared_xaxes=True, vertical_spacing=0.02) data = self._data layout = go.Layout( autosize=True, height=800, # width=1280, yaxis=dict( domain=[0.41, 1], title=self._label[0], ), xaxis_rangeslider_visible=False, showlegend=False, ) fig = go.Figure(data=data, layout=layout) t = 40 margin = 2 ysize = math.floor(t / (len(self._label) - 1)) for i in range(1, len(self._label)): yax_name = 'yaxis' + str(i + 1) d_upbound = t - margin t -= ysize d_lowbound = t if t >= ysize else 0 fig.update_layout( **{yax_name: dict( domain=[d_lowbound / 100, d_upbound / 100], title=self._label[i], )} ) fig.update_xaxes(showline=True, linewidth=0, linecolor='black', mirror=True) fig.update_yaxes(showline=True, linewidth=1, linecolor='black', mirror=True, automargin=True) fig.update_layout( hovermode="x unified", template="gridon", margin=dict( l=10, r=30, b=10, t=10, pad=4 ), paper_bgcolor="#f3f5f0", ) for shape in self._shapes: fig.add_shape(shape) return fig def add_annot(self, annot): buy = None for _, data in annot[['date', 'text']].iterrows(): date, text = data if text == 'Buy' or text == 'Sell': if text == 'Buy': buy = date self._shapes.append(dict( type='line', x0=date, x1=date, y0=0, y1=1, xref='x', yref='paper', line_width=1, line=dict(color="Orange") )) else: if buy is not None: # self._shapes.append(dict( # type="rect", x0=buy, x1=date, y0=0, y1=1, xref='x', yref='paper', # line_width=1, line=dict(width=0), fillcolor="LightSkyBlue", opacity=0.5) # ) buy = None self._shapes.append(dict( type='line', x0=date, x1=date, y0=0, y1=1, xref='x', yref='paper', line_width=1, line=dict(color='RoyalBlue') )) def reset_annot(self): self._shapes.clear() def add_bars(self): bar = go.Candlestick( x=self._date, open=self._open, high=self._high, low=self._low, close=self._close, increasing={'line': {'color': self._increasing_color}}, decreasing={'line': {'color': self._decreasing_color}}, yaxis='y', name="" ) return bar def add_volume(self): len1 = len(self._label) ylabel = 'y' + str(len1 + 1) label = ['Volume'] volume = go.Bar( x=self._date, y=self._volume, name="Volume", marker={'color': self._color_label}, yaxis=ylabel ) self._data = self._data + [volume] self._label = self._label + label return def add_ema(self, tp1=3, tp2=10): y_axis = 'y' y_dict = self._ta.ema(tp1=tp1, tp2=tp2) y_dict.pop('signal0') fig_res = line_fig(self._date, y_axis, y_dict) self._data = self._data + fig_res return def add_dualma(self, tp1=20, tp2=20): y_axis = 'y' y_dict = self._ta.dualma(tp1=tp1, tp2=tp2) y_dict.pop('signal0') fig_res = line_fig(self._date, y_axis, y_dict) self._data = self._data + fig_res return def add_macd(self): y_axis = 'y' + str(len(self._label) + 1) label = ['MACD'] y_dict = self._ta.macd() y_dict.pop('signal0') # fig_res = line_fig(self._date, y_axis, y_dict) y_values = [list(k) for k in y_dict.values()] y_labels = [l for l in y_dict.keys()] y_data = np.array(y_values) fig_res = [] for i in range(0, len(y_values)): if i == len(y_values)-1: fig = go.Bar( x=self._date, y=y_data[i], name=y_labels[i], marker={'color': "#bfbfbf"}, yaxis=y_axis, ) else: fig = go.Scatter( x=self._date, y=y_data[i], mode='lines', name=y_labels[i], connectgaps=True, yaxis=y_axis, ) fig_res.append(fig) self._data = self._data + fig_res self._label = self._label + label return def add_kdj(self): y_axis = 'y' + str(len(self._label) + 1) label = ['KDJ'] y_dict = self._ta.kdj() y_dict.pop('signal0') fig_res = line_fig(self._date, y_axis, y_dict) self._data = self._data + fig_res self._label = self._label + label return def add_rsi(self): y_axis = 'y' + str(len(self._label) + 1) label = ['RSI'] y_dict = self._ta.rsi() y_dict.pop('signal0') fig_res = line_fig(self._date, y_axis, y_dict) self._data = self._data + fig_res self._label = self._label + label return def addLines(self): return
6,373
-6
347
0cc0c9f3e41e7549d1812b602079b749456b4e01
2,906
py
Python
examples/bank-marketing/splitnn_dataloader.py
lemonviv/PyVertical
42d4e3e836b03a80929d7f7a08e06bfdbe80896a
[ "Apache-2.0" ]
null
null
null
examples/bank-marketing/splitnn_dataloader.py
lemonviv/PyVertical
42d4e3e836b03a80929d7f7a08e06bfdbe80896a
[ "Apache-2.0" ]
null
null
null
examples/bank-marketing/splitnn_dataloader.py
lemonviv/PyVertical
42d4e3e836b03a80929d7f7a08e06bfdbe80896a
[ "Apache-2.0" ]
null
null
null
class SplitDataLoader: """ This class distributes each sample among different workers. It returns a dictionary with key as data party's id and value as a pointer to the list of data batches at party's location. example: >>> from splitnn_dataloader import SplitDataLoader >>> splitnn_trainloader = SplitDataLoader(data_parties=data_parties, data_loader=trainloader) >>> splitnn_trainloader.data_pointer[1]['active_party'].shape, obj.data_pointer[1]['passive_party'].shape (torch.Size([64, 10]), torch.Size([64, 10])) """ def __init__(self, data_parties, data_loader): """ Args: data_parties: tuple of data parties data_loader: torch.utils.data.DataLoader """ self.data_parties = data_parties self.data_loader = data_loader self.no_of_parties = len(data_parties) self.data_pointer = [] self.labels = [] """ self.data_pointer: list of dictionaries where (key, value) = (id of the data holder, a pointer to the list of batches at that data holder). example: self.data_pointer = [ {"active_party": pointer_to_active_batch1, "passive_party": pointer_to_passive_batch1}, {"active_party": pointer_to_active_batch2, "passive_party": pointer_to_passive_batch2}, ... ] """ # iterate over each batch of dataloader for split sample and send to VirtualWorker for samples, labels in self.data_loader: curr_data_dict = {} # calculate the feature number for each party according to the no. of workers feature_num_per_party = samples.shape[-1] // self.no_of_parties self.labels.append(labels) # iterate over each worker for distributing current batch of the self.data_loader for i, party in enumerate(self.data_parties[:-1]): # split the samples and send it to VirtualWorker (which is supposed to be an party or client) sample_part_ptr = samples[:, feature_num_per_party * i:feature_num_per_party * (i + 1)].send( party ) curr_data_dict[party.id] = sample_part_ptr # repeat same for the remaining part of the samples last_party = self.data_parties[-1] last_part_ptr = samples[:, feature_num_per_party * (i + 1):].send(last_party) curr_data_dict[last_party.id] = last_part_ptr self.data_pointer.append(curr_data_dict)
44.030303
123
0.60702
class SplitDataLoader: """ This class distributes each sample among different workers. It returns a dictionary with key as data party's id and value as a pointer to the list of data batches at party's location. example: >>> from splitnn_dataloader import SplitDataLoader >>> splitnn_trainloader = SplitDataLoader(data_parties=data_parties, data_loader=trainloader) >>> splitnn_trainloader.data_pointer[1]['active_party'].shape, obj.data_pointer[1]['passive_party'].shape (torch.Size([64, 10]), torch.Size([64, 10])) """ def __init__(self, data_parties, data_loader): """ Args: data_parties: tuple of data parties data_loader: torch.utils.data.DataLoader """ self.data_parties = data_parties self.data_loader = data_loader self.no_of_parties = len(data_parties) self.data_pointer = [] self.labels = [] """ self.data_pointer: list of dictionaries where (key, value) = (id of the data holder, a pointer to the list of batches at that data holder). example: self.data_pointer = [ {"active_party": pointer_to_active_batch1, "passive_party": pointer_to_passive_batch1}, {"active_party": pointer_to_active_batch2, "passive_party": pointer_to_passive_batch2}, ... ] """ # iterate over each batch of dataloader for split sample and send to VirtualWorker for samples, labels in self.data_loader: curr_data_dict = {} # calculate the feature number for each party according to the no. of workers feature_num_per_party = samples.shape[-1] // self.no_of_parties self.labels.append(labels) # iterate over each worker for distributing current batch of the self.data_loader for i, party in enumerate(self.data_parties[:-1]): # split the samples and send it to VirtualWorker (which is supposed to be an party or client) sample_part_ptr = samples[:, feature_num_per_party * i:feature_num_per_party * (i + 1)].send( party ) curr_data_dict[party.id] = sample_part_ptr # repeat same for the remaining part of the samples last_party = self.data_parties[-1] last_part_ptr = samples[:, feature_num_per_party * (i + 1):].send(last_party) curr_data_dict[last_party.id] = last_part_ptr self.data_pointer.append(curr_data_dict) def __iter__(self): for data_ptr, label in zip(self.data_pointer[:-1], self.labels[:-1]): yield data_ptr, label def __len__(self): return len(self.data_loader) - 1
148
0
78
5a882d3d8363079955e3ebfe7587ce3969adff5a
13,368
py
Python
app.py
aarunishsinha/Course-Management-System
80364ffd1c7f6110da9687d9c24f146ab45180f9
[ "MIT" ]
2
2022-02-14T04:31:06.000Z
2022-03-01T18:26:07.000Z
app.py
aarunishsinha/Course_Management_System
80364ffd1c7f6110da9687d9c24f146ab45180f9
[ "MIT" ]
null
null
null
app.py
aarunishsinha/Course_Management_System
80364ffd1c7f6110da9687d9c24f146ab45180f9
[ "MIT" ]
1
2022-02-14T04:31:08.000Z
2022-02-14T04:31:08.000Z
from flask import Flask, render_template, send_from_directory, request, redirect, url_for import os import psycopg2 # import students from students import studentRoutes from Admin import adminRoutes # import nltk # import numpy as np # import pandas as pd # conn = psycopg2.connect('dbname=postgres') conn = psycopg2.connect('dbname=group_13 user=group_13 password=p0XvR8Ch4BAGb host=10.17.50.232 port=5432') cur = conn.cursor() app = Flask(__name__) app.register_blueprint(studentRoutes) app.register_blueprint(adminRoutes) # app['debug'] = True # UPLOAD_FOLDER ='./uploads/' # app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER # CONFIG_FOLDER ='./config/' # app.config['CONFIG_FOLDER'] = CONFIG_FOLDER # PDF_FOLDER ='./pdfs/' # app.config['PDF_FOLDER'] = PDF_FOLDER # app.config['TMP'] = './tmp/' currentStudentLoginId = "" # the student who is currently logged in currentProfLoginId = "" # # the instructor who is currently logged in COID ="" SN = "" schedule = [] @app.route("/") #main webpage rendering @app.route("/instructorScreen", methods = ["POST"]) @app.route("/instructorScreen/AddCourse", methods = ["POST"]) @app.route("/instructorScreen/Requests", methods = ["POST"]) @app.route("/instructorScreen/ProcessRequests", methods = ["POST"]) @app.route("/instructorScreen/Schedule", methods = ["POST"]) @app.route("/instructorScreen/enrollment", methods = ["POST"]) @app.route("/instructorScreen/addGradeDistribution", methods = ["POST"]) @app.route("/instructorScreen/getGradeDistribution", methods = ["POST"]) @app.route("/instructorScreen/room", methods = ["POST"]) @app.route("/instructorScreen/searchCourse", methods = ["POST"]) if __name__ == '__main__': app.run(host='127.0.0.1', port=5013)
33.503759
242
0.596349
from flask import Flask, render_template, send_from_directory, request, redirect, url_for import os import psycopg2 # import students from students import studentRoutes from Admin import adminRoutes # import nltk # import numpy as np # import pandas as pd # conn = psycopg2.connect('dbname=postgres') conn = psycopg2.connect('dbname=group_13 user=group_13 password=p0XvR8Ch4BAGb host=10.17.50.232 port=5432') cur = conn.cursor() app = Flask(__name__) app.register_blueprint(studentRoutes) app.register_blueprint(adminRoutes) # app['debug'] = True # UPLOAD_FOLDER ='./uploads/' # app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER # CONFIG_FOLDER ='./config/' # app.config['CONFIG_FOLDER'] = CONFIG_FOLDER # PDF_FOLDER ='./pdfs/' # app.config['PDF_FOLDER'] = PDF_FOLDER # app.config['TMP'] = './tmp/' currentStudentLoginId = "" # the student who is currently logged in currentProfLoginId = "" # # the instructor who is currently logged in COID ="" SN = "" schedule = [] @app.route("/") #main webpage rendering def main(): return render_template("HomeScreen.html") #the main form @app.route("/instructorScreen", methods = ["POST"]) def inst(): global currentProfLoginId global schedule currentProfLoginId = request.form.get("ProfID") TC = "" try: query1=""" BEGIN; SELECT * from current_term; """ cur.execute(query1) TC=cur.fetchall() cur.execute("COMMIT;") TC=TC[0][0] except Exception as e: print(e) cur.execute("ROLLBACK;") # schedule = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT * from get_instructor_schedule(%s,%s); """ % (str(currentProfLoginId),str(TC)) cur.execute(query) schedule=cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") # try: # query1=""" # BEGIN; # SELECT * from current_term; # """ # cur.execute(query1) # TC=cur.fetchall() # cur.execute("COMMIT;") # TC=TC[0][0] # except Exception as e: # print(e) # # schedule = EXECUTE DATABASE QUERY HERE # try: # query=""" # BEGIN; # SELECT * from get_instructor_schedule(%s,%s); # """ % (str(currentProfLoginId),str(TC)) # cur.execute(query) # schedule=cur.fetchall() # cur.execute("COMMIT;") # except Exception as e: # print (e) return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = [],addGradeMsg = "", grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/AddCourse", methods = ["POST"]) def instAC(): # See what is to be done with AddCourseMsg global currentProfLoginId global schedule CID = request.form.get("CID") TC = "" # TC = request.form.get("TC") SN = request.form.get("SN") LM = request.form.get("LM") ST = request.form.get("ST") SC = request.form.get("SC") RoomReq = request.form.get("RoomReq") try: query1=""" BEGIN; SELECT * from current_term; """ cur.execute(query1) TC=cur.fetchall() cur.execute("COMMIT;") TC=TC[0][0] except Exception as e: print(e) cur.execute("ROLLBACK;") # EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT add_course_offering('%s',%s,%s,%s,%s,'%s',%s,'%s'); COMMIT; """ % (str(CID),str(TC),str(SN),str(LM),str(RoomReq),str(ST),str(currentProfLoginId),str(SC)) cur.execute(query) except Exception as e: print (e) cur.execute("ROLLBACK;") try: query=""" BEGIN; SELECT * from get_instructor_schedule(%s,%s); """ % (str(currentProfLoginId),str(TC)) cur.execute(query) schedule=cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = [],addGradeMsg = "", grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/Requests", methods = ["POST"]) def instRequests(): # See what is to be done with AddCourseMsg global currentProfLoginId global schedule global COID global SN COID = request.form.get("COID") SN = request.form.get("SN") requests = [(0,123),(0,1231),(0,13),(0,144),(0,123),(0,1231),(0,13),(0,144),(0,123),(0,1231),(0,13),(0,144)] # requests = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT * from get_pending_requests('%s',%s); """ % (str(COID),str(SN)) cur.execute(query) requests = cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") # print(requests) return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = requests, enrollment = [],addGradeMsg = "", grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/ProcessRequests", methods = ["POST"]) def instProcessRequests(): # See what is to be done with AddCourseMsg global currentProfLoginId global schedule global COID global SN studentID = request.form.get("studentID") requests = [] if request.form.get('Accept') == 'Accept': # a = 2 # dummy line try: query=""" BEGIN; SELECT process_pending_request('%s',%s,%s,%s); COMMIT; """ % (str(COID),str('true'),str(SN),str(studentID)) cur.execute(query) except Exception as e: print (e) cur.execute("ROLLBACK;") else: # a = 2 # dummy line try: query=""" BEGIN; SELECT process_pending_request('%s',%s,%s,%s); COMMIT; """ % (str(COID),str('false'),str(SN),str(studentID)) cur.execute(query) except Exception as e: print (e) cur.execute("ROLLBACK;") # requests = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT * from get_pending_requests('%s',%s); """ % (str(COID),str(SN)) cur.execute(query) requests = cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = requests, enrollment = [],addGradeMsg = "", grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/Schedule", methods = ["POST"]) def instSchedule(): # See what is to be done with AddCourseMsg global currentProfLoginId global schedule TC="" # TC = request.form.get("TC") schedule = [] try: query1=""" BEGIN; SELECT * from current_term; """ cur.execute(query1) TC=cur.fetchall() cur.execute("COMMIT;") TC=TC[0][0] except Exception as e: print(e) cur.execute("ROLLBACK;") # schedule = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT * from get_instructor_schedule(%s,%s); """ % (str(currentProfLoginId),str(TC)) cur.execute(query) schedule=cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") # print(schedule); # Will make schedule.html once query is executed and exact form is known return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = [],addGradeMsg = "", grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/enrollment", methods = ["POST"]) def instEnrollments(): # See what is to be done with AddCourseMsg global currentProfLoginId global schedule COID = request.form.get("COID") SN = request.form.get("SN") enrollment = ['Aniket','Aarunish','Jai'] # enrollment = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT * from get_student_list('%s',%s); """ % (str(COID),str(SN)) cur.execute(query) enrollment=cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = enrollment,addGradeMsg = "", grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/addGradeDistribution", methods = ["POST"]) def instAddGD(): # See what is to be done with AddCourseMsg global currentProfLoginId global schedule COID = request.form.get("COID") SN = request.form.get("SN") a = request.form.get("a") ab = request.form.get("ab") b = request.form.get("b") bc = request.form.get("bc") c = request.form.get("c") d = request.form.get("d") f = request.form.get("f") s = request.form.get("s") u = request.form.get("u") cr = request.form.get("cr") n = request.form.get("n") p = request.form.get("p") i = request.form.get("i") nw = request.form.get("nw") nr = request.form.get("nr") others = request.form.get("others") addGradeMsg = "Grade Distribution Updated" # addGradeMsg = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT set_grade_distribution('%s',%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s); COMMIT; """ % (str(COID),str(SN),str(a),str(ab),str(b),str(bc),str(c),str(d),str(f),str(s),str(u),str(cr),str(n),str(p),str(i),str(nw),str(nr),str(others)) cur.execute(query) except Exception as e: print (e) cur.execute("ROLLBACK;") addGradeMsg="Error occured while updating" return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = [],addGradeMsg = addGradeMsg, grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/getGradeDistribution", methods = ["POST"]) def instGetGD(): global currentProfLoginId global schedule grades = [1,2,3,4,5,6,7,7,8,8,34,2,6,42,6634,24,523] COID = request.form.get("COID") SN = request.form.get("SN") # grades = EXECUTE DATABASE QUERY HER try: query=""" BEGIN; SELECT * from get_grade_distribution('%s',%s); """ % (str(COID),str(SN)) cur.execute(query) grades = cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = [],addGradeMsg = "", grades = grades, room = "", facultyCode = "", schedule = schedule, searchResults=[]) @app.route("/instructorScreen/room", methods = ["POST"]) def instRoom(): global currentProfLoginId global schedule COID = request.form.get("COID") SN = request.form.get("SN") output = [('LH121','Narula101')] # output = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT * from get_room_instr('%s',%s); """ % (str(COID),str(SN)) cur.execute(query) output=cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") room = output[0][0] facultyCode = output[0][1] return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = [],addGradeMsg = "", grades = [], room = room, facultyCode = facultyCode, schedule = schedule, searchResults=[]) @app.route("/instructorScreen/searchCourse", methods = ["POST"]) def instsearchCourse(): global currentProfLoginId global schedule CName = request.form.get("CName") searchResults = [(1,"course A"), (2,"Course B"),(1,"course A"), (2,"Course B"),(1,"course A"), (2,"Course B")] # searchResults = EXECUTE DATABASE QUERY HERE try: query=""" BEGIN; SELECT * from search_course_instructor('%s'); """ % (str(CName)) cur.execute(query) searchResults=cur.fetchall() cur.execute("COMMIT;") except Exception as e: print (e) cur.execute("ROLLBACK;") return render_template("instructor.html",currentProfLoginId = currentProfLoginId,AddCourseMsg="", requests = [], enrollment = [],addGradeMsg = "", grades = [], room = "", facultyCode = "", schedule = schedule, searchResults=searchResults) if __name__ == '__main__': app.run(host='127.0.0.1', port=5013)
11,390
0
242
8e54a2ad2d5ce89659043e4f426413cb107524f6
1,447
py
Python
setup.py
vznncv/vznncv-miniterm
a5999744435350304e26c4b4c97f7a999b2e5abd
[ "MIT" ]
1
2022-02-17T20:23:12.000Z
2022-02-17T20:23:12.000Z
setup.py
vznncv/vznncv-miniterm
a5999744435350304e26c4b4c97f7a999b2e5abd
[ "MIT" ]
null
null
null
setup.py
vznncv/vznncv-miniterm
a5999744435350304e26c4b4c97f7a999b2e5abd
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import re from setuptools import setup, find_namespace_packages project_name = 'vznncv-miniterm' with open('README.md') as readme_file: readme = readme_file.read() readme = re.sub(r'!\[[^\[\]]*\]\S*', '', readme) _locals = {} with open('src/' + project_name.replace('-', '/') + '/_version.py') as fp: exec(fp.read(), None, _locals) __version__ = _locals['__version__'] with open('requirements_dev.txt') as fp: test_requirements = fp.read() setup( author="Konstantin Kochin", classifiers=[ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Topic :: Terminals :: Serial', ], description="Line buffered version of pyserial miniterm tool", long_description=readme, long_description_content_type="text/markdown", license='MIT', include_package_data=True, name=project_name, packages=find_namespace_packages(where='src'), package_dir={'': 'src'}, entry_points={ 'console_scripts': [ 'vznncv-miniterm = vznncv.miniterm._cli:main', ] }, install_requires=[ 'pyserial>=3.4,<4', 'pyserial-asyncio>=0.4,<1', 'prompt_toolkit>=3,<4', ], tests_require=test_requirements, version=__version__, python_requires='~=3.6', )
27.826923
74
0.624741
#!/usr/bin/env python # -*- coding: utf-8 -*- import re from setuptools import setup, find_namespace_packages project_name = 'vznncv-miniterm' with open('README.md') as readme_file: readme = readme_file.read() readme = re.sub(r'!\[[^\[\]]*\]\S*', '', readme) _locals = {} with open('src/' + project_name.replace('-', '/') + '/_version.py') as fp: exec(fp.read(), None, _locals) __version__ = _locals['__version__'] with open('requirements_dev.txt') as fp: test_requirements = fp.read() setup( author="Konstantin Kochin", classifiers=[ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Topic :: Terminals :: Serial', ], description="Line buffered version of pyserial miniterm tool", long_description=readme, long_description_content_type="text/markdown", license='MIT', include_package_data=True, name=project_name, packages=find_namespace_packages(where='src'), package_dir={'': 'src'}, entry_points={ 'console_scripts': [ 'vznncv-miniterm = vznncv.miniterm._cli:main', ] }, install_requires=[ 'pyserial>=3.4,<4', 'pyserial-asyncio>=0.4,<1', 'prompt_toolkit>=3,<4', ], tests_require=test_requirements, version=__version__, python_requires='~=3.6', )
0
0
0
b7625fb779461162066385c07a1a8355c1818475
4,187
py
Python
tinkycare.py
m-clare/tinky-care
a2c7831d9f6908949265d51708d5d27fe89901ff
[ "MIT" ]
5
2021-04-08T23:55:13.000Z
2021-11-23T19:30:47.000Z
tinkycare.py
m-clare/tinky-care
a2c7831d9f6908949265d51708d5d27fe89901ff
[ "MIT" ]
null
null
null
tinkycare.py
m-clare/tinky-care
a2c7831d9f6908949265d51708d5d27fe89901ff
[ "MIT" ]
null
null
null
import os import json from PIL import Image, ImageDraw, ImageFont from inky.inky_uc8159 import Inky from datetime import datetime as dt from bots.orgbot import get_org_image from bots.twitterbot import get_tweet_img from bots.twitterbot import get_recent_care_tweet from bots.pomodorobot import get_pomodoro_time from bots.pomodorobot import get_pomodoro from bots.calendarbot import get_next_event from bots.calendarbot import get_event_img # Inky display information inky_display = Inky() # Global because only one inky to pass around... DESATURATED_PALETTE = ( 0, 0, 0, 255, 255, 255, 0, 255, 0, 0, 0, 255, 255, 0, 0, 255, 255, 0, 255, 140, 0, 255, 255, 255, ) + (0, 0, 0) * 248 SATURATED_PALETTE = ( 57, 48, 57, 255, 255, 255, 58, 91, 70, 61, 59, 94, 156, 72, 75, 208, 190, 71, 177, 106, 73, 255, 255, 255, ) + (0, 0, 0) * 248 MID_PALETTE = tuple(sum(x) // 2 for x in zip(DESATURATED_PALETTE, SATURATED_PALETTE)) DEFAULT_DATA = { "tomato": 0, "cycle": "still working", "start_time": int(dt.utcnow().timestamp()) % 86400, "pomodoro_mode": True, "reset": False, "tweet": "", "event_counter": 0, "event": { "name": 'No events scheduled.', "location": None, "start": None, "end": None, "active": False, }, } # initialize data data = DEFAULT_DATA if __name__ == "__main__": run_tinky_care()
23.522472
85
0.583711
import os import json from PIL import Image, ImageDraw, ImageFont from inky.inky_uc8159 import Inky from datetime import datetime as dt from bots.orgbot import get_org_image from bots.twitterbot import get_tweet_img from bots.twitterbot import get_recent_care_tweet from bots.pomodorobot import get_pomodoro_time from bots.pomodorobot import get_pomodoro from bots.calendarbot import get_next_event from bots.calendarbot import get_event_img # Inky display information inky_display = Inky() # Global because only one inky to pass around... DESATURATED_PALETTE = ( 0, 0, 0, 255, 255, 255, 0, 255, 0, 0, 0, 255, 255, 0, 0, 255, 255, 0, 255, 140, 0, 255, 255, 255, ) + (0, 0, 0) * 248 SATURATED_PALETTE = ( 57, 48, 57, 255, 255, 255, 58, 91, 70, 61, 59, 94, 156, 72, 75, 208, 190, 71, 177, 106, 73, 255, 255, 255, ) + (0, 0, 0) * 248 MID_PALETTE = tuple(sum(x) // 2 for x in zip(DESATURATED_PALETTE, SATURATED_PALETTE)) DEFAULT_DATA = { "tomato": 0, "cycle": "still working", "start_time": int(dt.utcnow().timestamp()) % 86400, "pomodoro_mode": True, "reset": False, "tweet": "", "event_counter": 0, "event": { "name": 'No events scheduled.', "location": None, "start": None, "end": None, "active": False, }, } def save_status(data, PATH): with open(PATH + "/assets/status.json", "w") as fh: json.dump(data, fh) def rgb_to_inky(canvas): pal_img = Image.new("P", (1, 1)) pal_img.putpalette(SATURATED_PALETTE) img = canvas.convert("RGB").quantize(palette=pal_img) inky_display.rotation = 180 inky_display.set_image(img) inky_display.show() def make_canvas(data, tweet_only, PATH): canvas = Image.new( "RGB", (inky_display.WIDTH, inky_display.HEIGHT), (255, 255, 255) ) org = Image.open(PATH + "/assets/org.png") canvas.paste(org, (0, 0)) if tweet_only is True: tweet = get_tweet_img(376, 344, toFile=False) event = get_event_img(376, 104, data["event"], toFile=False) else: pom = get_pomodoro(data["tomato"], data["cycle"]) tweet = get_tweet_img(376, 252, toFile=False) event = get_event_img(376, 104, data["event"], toFile=False) canvas.paste(pom, (org.width, tweet.height + event.height)) canvas.paste(tweet, (org.width, event.height)) canvas.paste(event, (org.width, 0)) return canvas def check_display(data, PATH): num_tomato, status_text = get_pomodoro_time(data["start_time"]) tweet = get_recent_care_tweet() # fetch calendar data only every 10 min if data["event_counter"] % 10 == 0: event = get_next_event() else: event = data["event"] pomodoro = data["pomodoro_mode"] reset = data["reset"] if pomodoro is False: # check if twitter or event has changed, otherwise don't update if ( tweet != data["tweet"] or reset is True or event["active"] != data["event"]["active"] or event["name"] != data["event"]["name"] ): data["tweet"] = tweet data["reset"] = False data["event"] = event canvas = make_canvas(data, True, PATH) rgb_to_inky(canvas) return elif ( reset is True or (status_text != data["cycle"]) or (num_tomato != data["tomato"]) or (tweet != data["tweet"]) or (event["active"] != data["event"]["active"]) or (event["name"] != data["event"]["name"]) ): data["tomato"] = num_tomato data["cycle"] = status_text data["tweet"] = tweet data["reset"] = False data["event"] = event canvas = make_canvas(data, False, PATH) rgb_to_inky(canvas) return # initialize data data = DEFAULT_DATA def run_tinky_care(): while True: data["event_counter"] += 1 check_display(data) time.sleep(60) if __name__ == "__main__": run_tinky_care()
2,496
0
115
ceedc8ffab527acf38eb4912b4ec135b78d4964d
6,473
py
Python
loss/loss_functions.py
akio-kobayashi/acoustic_scene
b4d0678474d1b0dde4478a1710aa489e2ffad875
[ "Apache-2.0" ]
null
null
null
loss/loss_functions.py
akio-kobayashi/acoustic_scene
b4d0678474d1b0dde4478a1710aa489e2ffad875
[ "Apache-2.0" ]
null
null
null
loss/loss_functions.py
akio-kobayashi/acoustic_scene
b4d0678474d1b0dde4478a1710aa489e2ffad875
[ "Apache-2.0" ]
null
null
null
import numpy as np import tensorflow as tf import keras.backend as K ''' Cauchy-Schwarz distance between 2 samples The distance measures 'distinguishability' between samples. So, if 2 samples belong to same class, the distance would have a small value. ''' ''' Compute Cauchy-Schwarz distance loss among samples in the batch based on true-label information That is, if two samples belong to the same class, the distance would be small. ''' def renyi_crossentropy_loss(y_true, y_pred): ''' 2nd-order Renyi entropy --- collision entorpy ''' renyi_loss = -tf.log(K.mean(tf.multiply(y_true, y_pred), axis=1)) # (samples, 1) return K.mean(renyi_loss, -1) ''' 1,2,3-order hinge loss ''' ''' Wasserstein loss '''
35.762431
113
0.663525
import numpy as np import tensorflow as tf import keras.backend as K ''' Cauchy-Schwarz distance between 2 samples The distance measures 'distinguishability' between samples. So, if 2 samples belong to same class, the distance would have a small value. ''' def cauchy_schwarz_distance(logits1, logits2, cross=False): # distance based on Renyi cross entropy from logits # distance = H_2(logits1) + H_2(logits2) + H^x_2(logits1,logits2) dist1 = -1.0 * tf.reduce_mean(tf.log(tf.add(tf.square(logits1), tf.square(1.0-logits1)))) dist2 = -1.0 * tf.reduce_mean(tf.log(tf.add(tf.square(logits2), tf.square(1.0-logits2)))) dist3 = tf.reduce_mean(tf.log(tf.add(tf.multiply(logits1, logits2), tf.multiply(1.0-logits1, 1.0-logits2)))) #return tf.add(dist1, dist2), dist3 if cross is False: return tf.add((dist1, dist2), 2.0*dist3) else: return -1.0*dist3 ''' Compute Cauchy-Schwarz distance loss among samples in the batch based on true-label information That is, if two samples belong to the same class, the distance would be small. ''' def cauchy_schwarz_distance_loss(batch_size): # inputs must be flattenend # maximize between-class loss / minimize inner-class loss def loss(y_true, y_pred): logits=tf.sigmoid(y_pred) logits_list=tf.split(logits, num_or_size_splits=batch_size, axis=0) true_list=tf.split(y_true, num_or_size_splits=batch_size, axis=0) loss_list=[] for n in range(batch_size-1): for k in range(batch_size): if n>=k: continue sgn=2.0*tf.reduce_max(tf.multiply(true_list[n], true_list[k]), keepdims=False, axis=1)-1.0 dist = cauchy_schwarz_distance(logits_list[n], logits_list[k]) loss_list.append(tf.multiply(sgn, dist)) #loss_list.append(tf.multiply(sgn, cross_ent)) #return tf.reduce_mean(tf.concat(loss_list, -1), -1) return tf.reduce_mean(tf.concat(loss_list, -1), -1) # return loss return loss def collision_crossentropy_loss(batch_size): # inputs must be flattenend # maximize between-class loss / minimize inner-class loss def loss(y_true, y_pred): logits=tf.sigmoid(y_pred) logits_list=tf.split(logits, num_or_size_splits=batch_size, axis=0) true_list=tf.split(y_true, num_or_size_splits=batch_size, axis=0) loss_list=[] for n in range(batch_size-1): for k in range(batch_size): if n>k: continue sgn=2.0*tf.reduce_max(tf.multiply(true_list[n], true_list[k]), keepdims=False)-1.0 cross_ent = cauchy_schwarz_distance(logits_list[n], logits_list[k], cross=True) loss_list.append(tf.multiply(sgn, cross_ent)) return tf.reduce_mean(tf.concat(loss_list)) # return loss return loss def renyi_binary_crossentropy_loss(y_true, y_pred): renyi_loss=-1.0*tf.mean(K.log(K.add(tf.multiply(y_true, y_pred), tf.multiply(1.0-y_true, 1.0-y_pred))), axis=1) return K.mean(renyi_loss, -1) def renyi_crossentropy_loss(y_true, y_pred): ''' 2nd-order Renyi entropy --- collision entorpy ''' renyi_loss = -tf.log(K.mean(tf.multiply(y_true, y_pred), axis=1)) # (samples, 1) return K.mean(renyi_loss, -1) def renyi_quad_loss(y_true, y_pred): loss = -K.mean(tf.log(K.sum(tf.square(y_pred), axis=1))) return loss def renyi_quad_with_ce_loss(weight): def loss(y_true, y_pred): ce_loss = K.mean(K.categorical_crossentropy(y_true, y_pred), axis=-1) r_loss = renyi_quad_loss(y_true, y_pred) return tf.add((1.0-weight)*ce_loss, weight * r_loss) return loss def renyi_cross_with_ce_loss(weight): def loss(y_true, y_pred): ce_loss = K.mean(K.categorical_crossentropy(y_true, y_pred), axis=-1) r_loss = renyi_crossentropy_loss(y_true, y_pred) return tf.add((1.0-weight)*ce_loss, weight * r_loss) return loss def jensen_loss(y_true, y_pred): return tf.add(K.mean(K.categorical_crossentropy(y_true, y_pred), axis=-1), - K.mean(tf.log(K.sum(tf.multiply(y_true, y_pred), -1))) ) def cauchy_schwarz_binary_loss(y_true, y_pred): ce_loss = K.mean(K.binary_crossentropy(y_true, y_pred), axis=-1) rloss = 0.5*K.mean(tf.log(K.mean(K.add(tf.square(y_pred)+tf.square(1.0-y_pred)), axis=1)), -1) return ce_loss + rloss def cauchy_schwarz_loss(y_true, y_pred): ce_loss = K.mean(K.categorical_crossentropy(y_true, y_pred), axis=-1) rloss = 0.5*K.mean(tf.log(K.sum(tf.square(y_pred), axis=1))) return ce_loss + rloss ''' 1,2,3-order hinge loss ''' def polarity(tensor): return 2.0*tensor-1.0 def hinge_loss(y_true, y_pred): return tf.add(tf.max(0.0, 0.5 - tf.multiply(polarity(y_pred), polarity(y_true))), tf.max(0.0, 0.5 - tf.multiply(polarity(1.0-y_pred), polarity(1.0-y_true)))) def hinge_loss1(y_true, y_pred): loss = hinge_loss(y_true, y_pred) return K.mean(loss, axis=-1) def hinge_loss2(y_true, y_pred): loss = hinge_loss(y_true, y_pred) return K.mean(tf.square(loss), axis=-1) def hinge_loss3(y_true, y_pred): loss = hinge_loss(y_true, y_pred) return K.mean(tf.multiply(tf.square(loss), loss), -1) ''' Wasserstein loss ''' def wasserstein_loss(y_true, y_pred): return K.mean(y_pred) - K.mean(y_true) def wasserstein_gen_loss(y_true, y_pred): return -K.mean(y_pred) def logits_entropy(logits1, logits2): return -1.0*K.sum(tf.add(tf.multiply(logits1, tf.log(logits2+K.epsilon())), tf.multiply(1.0-logits1, tf.log(1.0-logits2+K.epsilon()))), axis=1) def kl_divergence_loss(batch_size): def loss(y_true, y_pred): logits=tf.sigmoid(y_pred) logits_list=tf.split(logits, num_or_size_splits=batch_size, axis=0) true_list=tf.split(y_true, num_or_size_splits=batch_size, axis=0) loss_list=[] for n in range(batch_size): ent=tf.reduce_mean(logits_entropy(logits_list[n], logits_list[n])) for k in range(batch_size): if n == k: continue sgn=2.0*tf.reduce_max(tf.multiply(true_list[n], true_list[k]))-1.0 cross_ent=tf.reduce_mean(logits_entropy(logits_list[n],logits_list[k])) loss_list.append(sgn * (tf.add(ent, -1.0*cross_ent))) return tf.reduce_mean(tf.stack(loss_list,axis=-1)) return loss
5,272
0
433
c717da8e40954a97b84dcdb68eb545035fb322a9
1,925
py
Python
wolk/data.py
bverhoeve/wolk
61e63c535cac38a2e6026ae48765d8ede4787404
[ "MIT" ]
1
2020-05-31T17:38:19.000Z
2020-05-31T17:38:19.000Z
wolk/data.py
bverhoeve/wolk
61e63c535cac38a2e6026ae48765d8ede4787404
[ "MIT" ]
7
2020-05-18T13:06:57.000Z
2020-06-01T17:44:52.000Z
wolk/data.py
bverhoeve/wolk
61e63c535cac38a2e6026ae48765d8ede4787404
[ "MIT" ]
null
null
null
import os import json import boto3 import logging from .constants import ( DATA_DIR, S3_BUCKET, AWS_DIR, AWS_EC2_FILE, AWS_RDS_FILE, AZURE_DIR, GCP_DIR ) # Initialisation of data directory if not os.path.exists(DATA_DIR): logging.debug('No data directories yet, creating') aws_path: str = os.path.join(DATA_DIR, AWS_DIR) azure_path: str = os.path.join(DATA_DIR, AZURE_DIR) gcp_path: str = os.path.join(DATA_DIR, GCP_DIR) os.makedirs(aws_path, exist_ok=True) os.makedirs(azure_path, exist_ok=True) os.makedirs(gcp_path, exist_ok=True)
33.189655
82
0.66026
import os import json import boto3 import logging from .constants import ( DATA_DIR, S3_BUCKET, AWS_DIR, AWS_EC2_FILE, AWS_RDS_FILE, AZURE_DIR, GCP_DIR ) # Initialisation of data directory if not os.path.exists(DATA_DIR): logging.debug('No data directories yet, creating') aws_path: str = os.path.join(DATA_DIR, AWS_DIR) azure_path: str = os.path.join(DATA_DIR, AZURE_DIR) gcp_path: str = os.path.join(DATA_DIR, GCP_DIR) os.makedirs(aws_path, exist_ok=True) os.makedirs(azure_path, exist_ok=True) os.makedirs(gcp_path, exist_ok=True) def has_local_data(file_path: str = None) -> bool: return os.path.exists(file_path) and os.path.isfile(file_path) def get_aws_data(data_type: str = None): file_path: str = None object_key: str = None if data_type is None or data_type == 'ec2': file_path: str = os.path.join(DATA_DIR, AWS_DIR, AWS_EC2_FILE) object_key: str = DATA_DIR + '/' + AWS_DIR + '/' + AWS_EC2_FILE elif data_type == 'rds': file_path: str = os.path.join(DATA_DIR, AWS_DIR, AWS_RDS_FILE) object_key: str = DATA_DIR + '/' + AWS_DIR + '/' + AWS_RDS_FILE else: raise ValueError(f'{data_type} is not supported, choose "ec2" or "rds"') logging.debug(f'Loading AWS {data_type} data') if has_local_data(file_path): logging.info('Locally cached data found') with open(file_path, 'r') as fp: return json.load(fp) else: # If no local data is present, download it from S3 and persist it. logging.info('No locally cached data found, downloading from S3') s3 = boto3.client('s3') logging.debug(f'Downloading {object_key} from {S3_BUCKET} to {file_path}') s3.download_file(S3_BUCKET, object_key, file_path) # Once downloaded, load in the cached data return get_aws_data(data_type)
1,276
0
58
5c1091d48c8e0c9de6e93e70cfd744804701c5be
51,872
py
Python
parinfer.py
oakmac/sublime-text-parinfer
42b5e43591c415aaab331d07c820f073780df64a
[ "ISC" ]
58
2015-11-20T09:54:43.000Z
2022-01-11T07:17:56.000Z
parinfer.py
oakmac/sublime-text-parinfer
42b5e43591c415aaab331d07c820f073780df64a
[ "ISC" ]
38
2015-11-21T23:02:00.000Z
2021-03-22T15:13:11.000Z
parinfer.py
oakmac/sublime-text-parinfer
42b5e43591c415aaab331d07c820f073780df64a
[ "ISC" ]
10
2015-12-03T02:42:01.000Z
2020-08-16T15:02:45.000Z
## Parinfer.py - a Parinfer implementation in Python ## v3.12.0 ## https://github.com/oakmac/parinfer.py ## ## More information about Parinfer can be found here: ## http://shaunlebron.github.io/parinfer/ ## ## Copyright (c) 2015, 2020, Chris Oakman and other contributors ## Released under the ISC license ## https://github.com/oakmac/parinfer.py/blob/master/LICENSE.md import re import sys #------------------------------------------------------------------------------- # Constants #------------------------------------------------------------------------------- INDENT_MODE = 'INDENT_MODE' PAREN_MODE = 'PAREN_MODE' BACKSLASH = '\\' BLANK_SPACE = ' ' DOUBLE_SPACE = ' ' DOUBLE_QUOTE = '"' NEWLINE = '\n' TAB = '\t' LINE_ENDING_REGEX = re.compile(r"\r?\n") CLOSE_PARENS = frozenset(['}', ')', ']']) OPEN_PARENS = frozenset(['{', '(', '[']) WHITESPACE = frozenset([NEWLINE, BLANK_SPACE, TAB]) MATCH_PAREN = { '{': '}', '}': '{', '[': ']', ']': '[', '(': ')', ')': '(', } # toggle this to check the asserts during development RUN_ASSERTS = False #------------------------------------------------------------------------------- # Options Structure #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Result Structure #------------------------------------------------------------------------------- # This represents the running result. As we scan through each character # of a given text, we mutate this structure to update the state of our # system. class Result: """Returns a dictionary of the initial state.""" __slots__ = ( 'mode', 'smart', 'origText', 'origCursorX', 'origCursorLine', 'inputLines', 'inputLineNo', 'inputX', 'lines', 'lineNo', 'ch', 'x', 'indentX', 'parenStack', 'tabStops', 'parenTrail', 'parenTrails', 'returnParens', 'parens', 'cursorX', 'cursorLine', 'prevCursorX', 'prevCursorLine', 'selectionStartLine', 'changes', 'isInCode', 'isEscaping', 'isEscaped', 'isInStr', 'isInComment', 'commentX', 'quoteDanger', 'trackingIndent', 'skipChar', 'success', 'partialResult', 'forceBalance', 'maxIndent', 'indentDelta', 'trackingArgTabStop', 'error', 'errorPosCache', 'comment') def __init__(self, text, options, mode, smart): """Constructs a dictionary of the initial state.""" super(Result, self).__init__() self.mode = mode # [enum] - current processing mode (INDENT_MODE or PAREN_MODE) self.smart = smart # [boolean] - smart mode attempts special user-friendly behavior self.origText = text # [string] - original text self.origCursorX = None # [integer] - original cursorX option self.origCursorLine = None # [integer] - original cursorLine option # [string array] - input lines that we process line-by-line char-by-char self.inputLines = re.split(LINE_ENDING_REGEX, text) self.inputLineNo = -1 # [integer] - the current input line number self.inputX = -1 # [integer] - the current input x position of the current character (ch) self.lines = [] # [string array] - output lines (with corrected parens or indentation) self.lineNo = -1 # [integer] - output line number we are on self.ch = '' # [string] - character we are processing (can be changed to indicate a replacement) self.x = 0 # [integer] - output x position of the current character (ch) self.indentX = None # [integer] - x position of the indentation point if present self.parenStack = [] # We track where we are in the Lisp tree by keeping a stack (array) of open-parens. # Stack elements are objects containing keys {ch, x, lineNo, indentDelta} # whose values are the same as those described here in this result structure. self.tabStops = [] # In Indent Mode, it is useful for editors to snap a line's indentation # to certain critical points. Thus, we have a `tabStops` array of objects containing # keys {ch, x, lineNo, argX}, which is just the state of the `parenStack` at the cursor line. self.parenTrail = initialParenTrail() # the range of parens at the end of a line self.parenTrails = [] # [array of {lineNo, startX, endX}] - all non-empty parenTrails to be returned self.returnParens = False # [boolean] - determines if we return `parens` described below self.parens = [] # [array of {lineNo, x, closer, children}] - paren tree if `returnParens` is h self.cursorX = None # [integer] - x position of the cursor self.cursorLine = None # [integer] - line number of the cursor self.prevCursorX = None # [integer] - x position of the previous cursor self.prevCursorLine = None # [integer] - line number of the previous cursor self.selectionStartLine = None # [integer] - line number of the current selection starting point self.changes = None # [object] - mapping change.key to a change object (please see `transformChange` for object structure) self.isInCode = True # [boolean] - indicates if we are currently in "code space" (not string or comment) self.isEscaping = False # [boolean] - indicates if the next character will be escaped (e.g. `\c`). This may be inside string comment or code. self.isEscaped = False # [boolean] - indicates if the current character is escaped (e.g. `\c`). This may be inside string comment or code. self.isInStr = False # [boolean] - indicates if we are currently inside a string self.isInComment = False # [boolean] - indicates if we are currently inside a comment self.commentX = None # [integer] - x position of the start of comment on current line (if any) self.quoteDanger = False # [boolean] - indicates if quotes are imbalanced inside of a comment (dangerous) self.trackingIndent = False # [boolean] - are we looking for the indentation point of the current line? self.skipChar = False # [boolean] - should we skip the processing of the current character? self.success = False # [boolean] - was the input properly formatted enough to create a valid result? self.partialResult = False # [boolean] - should we return a partial result when an error occurs? self.forceBalance = False # [boolean] - should indent mode aggressively enforce paren balance? self.maxIndent = sys.maxsize # [integer] - maximum allowed indentation of subsequent lines in Paren Mode self.indentDelta = 0 # [integer] - how far indentation was shifted by Paren Mode # (preserves relative indentation of nested expressions) self.trackingArgTabStop = None # [string] - enum to track how close we are to the first-arg tabStop in a list # For example a tabStop occurs at `bar` below: # # ` (foo bar` # 00011112222000 <-- state after processing char (enums below) # # 0 None => not searching # 1 'space' => searching for next space # 2 'arg' => searching for arg # # (We create the tabStop when the change from 2->0 happens.) # self.comment = ';' # [string] default to semicolon as comment character self.error = { # if 'success' is False, return this error to the user 'name': None, # [string] - Parinfer's unique name for this error 'message': None, # [string] - error message to display 'lineNo': None, # [integer] - line number of error 'x': None, # [integer] - start x position of error 'extra': { 'name': None, 'lineNo': None, 'x': None } } self.errorPosCache = {} # [object] - maps error name to a potential error position if isinstance(options, dict): if 'cursorX' in options: self.cursorX = options['cursorX'] self.origCursorX = options['cursorX'] if 'cursorLine' in options: self.cursorLine = options['cursorLine'] self.origCursorLine = options['cursorLine'] if 'prevCursorX' in options: self.prevCursorX = options['prevCursorX'] if 'prevCursorLine' in options: self.prevCursorLine = options['prevCursorLine'] if 'selectionStartLine' in options: self.selectionStartLine = options['selectionStartLine'] if 'changes' in options: self.changes = transformChanges(options['changes']) if 'partialResult' in options: self.partialResult = options['partialResult'] if 'forceBalance' in options: self.forceBalance = options['forceBalance'] if 'returnParens' in options: self.returnParens = options['returnParens'] if 'comment' in options: self.comment = options['comment'] #------------------------------------------------------------------------------- # Possible Errors #------------------------------------------------------------------------------- # `result.error.name` is set to any of these ERROR_QUOTE_DANGER = "quote-danger" ERROR_EOL_BACKSLASH = "eol-backslash" ERROR_UNCLOSED_QUOTE = "unclosed-quote" ERROR_UNCLOSED_PAREN = "unclosed-paren" ERROR_UNMATCHED_CLOSE_PAREN = "unmatched-close-paren" ERROR_UNMATCHED_OPEN_PAREN = "unmatched-open-paren" ERROR_LEADING_CLOSE_PAREN = "leading-close-paren" ERROR_UNHANDLED = "unhandled" errorMessages = {} errorMessages[ERROR_QUOTE_DANGER] = "Quotes must balanced inside comment blocks." errorMessages[ERROR_EOL_BACKSLASH] = "Line cannot end in a hanging backslash." errorMessages[ERROR_UNCLOSED_QUOTE] = "String is missing a closing quote." errorMessages[ERROR_UNCLOSED_PAREN] = "Unclosed open-paren." errorMessages[ERROR_UNMATCHED_CLOSE_PAREN] = "Unmatched close-paren." errorMessages[ERROR_UNMATCHED_OPEN_PAREN] = "Unmatched open-paren." errorMessages[ERROR_LEADING_CLOSE_PAREN] = "Line cannot lead with a close-paren." errorMessages[ERROR_UNHANDLED] = "Unhandled error." #------------------------------------------------------------------------------- # String Operations #------------------------------------------------------------------------------- if RUN_ASSERTS: assert replaceWithinString('aaa', 0, 2, '') == 'a' assert replaceWithinString('aaa', 0, 1, 'b') == 'baa' assert replaceWithinString('aaa', 0, 2, 'b') == 'ba' #------------------------------------------------------------------------------- # Line Operations #------------------------------------------------------------------------------- # if the current character has changed, commit its change to the current line. # def commitChar(result, origCh): # ch = result.ch # if origCh != ch: # replaceWithinLine(result, result.lineNo, result.x, result.x + len(origCh), ch) # result.indentDelta -= (len(origCh) - len(ch)) # result.x += len(ch) #------------------------------------------------------------------------------- # Misc Utils #------------------------------------------------------------------------------- # if RUN_ASSERTS: # assert clamp(1, 3, 5) == 3 # assert clamp(9, 3, 5) == 5 # assert clamp(1, 3, None) == 3 # assert clamp(5, 3, None) == 5 # assert clamp(1, None, 5) == 1 # assert clamp(9, None, 5) == 5 # assert clamp(1, None, None) == 1 if RUN_ASSERTS: assert peek(['a'], 0) == 'a' assert peek(['a'], 1) is None assert peek(['a', 'b', 'c'], 0) == 'c' assert peek(['a', 'b', 'c'], 1) == 'b' assert peek(['a', 'b', 'c'], 5) is None assert peek([], 0) is None assert peek([], 1) is None #------------------------------------------------------------------------------- # Questions about characters #------------------------------------------------------------------------------- # def isWhitespace(result): # return not result.isEscaped and result.ch in WHITESPACE # can this be the last code character of a list? #------------------------------------------------------------------------------- # Advanced operations on characters #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Literal character events #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Character dispatch #------------------------------------------------------------------------------- CHAR_DISPATCH = { '(': onOpenParen, '{': onOpenParen, '[': onOpenParen, ')': onCloseParen, '}': onCloseParen, ']': onCloseParen, BACKSLASH: onBackslash, TAB: onTab, NEWLINE: onNewline, DOUBLE_QUOTE: onQuote, } #------------------------------------------------------------------------------- # Cursor defs #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Paren Trail defs #------------------------------------------------------------------------------- # INDENT MODE: allow the cursor to clamp the paren trail # INDENT MODE: pops the paren trail from the stack # Determine which open-paren (if any) on the parenStack should be considered # the direct parent of the current line (given its indentation point). # This allows Smart Mode to simulate Paren Mode's structure-preserving # behavior by adding its `opener.indentDelta` to the current line's indentation. # (care must be taken to prevent redundant indentation correction, detailed below) # INDENT MODE: correct paren trail from indentation # PAREN MODE: remove spaces from the paren trail # PAREN MODE: append a valid close-paren to the end of the paren trail #------------------------------------------------------------------------------- # Indentation defs #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # High-level processing functions #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Public API #------------------------------------------------------------------------------- API = { 'version': '3.12.0', 'indent_mode': indent_mode, 'paren_mode': paren_mode, 'smart_mode': smart_mode }
36.32493
158
0.564486
## Parinfer.py - a Parinfer implementation in Python ## v3.12.0 ## https://github.com/oakmac/parinfer.py ## ## More information about Parinfer can be found here: ## http://shaunlebron.github.io/parinfer/ ## ## Copyright (c) 2015, 2020, Chris Oakman and other contributors ## Released under the ISC license ## https://github.com/oakmac/parinfer.py/blob/master/LICENSE.md import re import sys #------------------------------------------------------------------------------- # Constants #------------------------------------------------------------------------------- INDENT_MODE = 'INDENT_MODE' PAREN_MODE = 'PAREN_MODE' BACKSLASH = '\\' BLANK_SPACE = ' ' DOUBLE_SPACE = ' ' DOUBLE_QUOTE = '"' NEWLINE = '\n' TAB = '\t' LINE_ENDING_REGEX = re.compile(r"\r?\n") CLOSE_PARENS = frozenset(['}', ')', ']']) OPEN_PARENS = frozenset(['{', '(', '[']) WHITESPACE = frozenset([NEWLINE, BLANK_SPACE, TAB]) MATCH_PAREN = { '{': '}', '}': '{', '[': ']', ']': '[', '(': ')', ')': '(', } # toggle this to check the asserts during development RUN_ASSERTS = False #------------------------------------------------------------------------------- # Options Structure #------------------------------------------------------------------------------- def transformChange(change): if not change: return None newLines = re.split(LINE_ENDING_REGEX, change['newText']) oldLines = re.split(LINE_ENDING_REGEX, change['oldText']) # single line case: # (defn foo| []) # ^ newEndX, newEndLineNo # +++ # multi line case: # (defn foo # ++++ # "docstring." # ++++++++++++++++ # |[]) # ++^ newEndX, newEndLineNo lastOldLineLen = len(oldLines[-1]) lastNewLineLen = len(newLines[-1]) oldEndX = (change['x'] if len(oldLines) == 1 else 0) + lastOldLineLen newEndX = (change['x'] if len(newLines) == 1 else 0) + lastNewLineLen newEndLineNo = change['lineNo'] + (len(newLines)-1) return { 'x': change['x'], 'lineNo': change['lineNo'], 'oldText': change['oldText'], 'newText': change['newText'], 'oldEndX': oldEndX, 'newEndX': newEndX, 'newEndLineNo': newEndLineNo, 'lookupLineNo': newEndLineNo, 'lookupX': newEndX } def transformChanges(changes): if len(changes) == 0: return None lines = {} for change in changes: change = transformChange(change) # print("change:",change['lookupLineNo']) if change['lookupLineNo'] not in lines: line = lines[change['lookupLineNo']] = {} else: line = lines[change['lookupLineNo']] line[change['lookupX']] = change return lines #------------------------------------------------------------------------------- # Result Structure #------------------------------------------------------------------------------- # This represents the running result. As we scan through each character # of a given text, we mutate this structure to update the state of our # system. class Clamped(object): __slots__ = ('startX', 'endX', 'openers') def __init__(self): self.startX = None # startX before paren trail was clamped self.endX = None # endX before paren trail was clamped self.openers = [] # openers that were cut out after paren trail was clamped class ParenTrail(object): __slots__ = ('lineNo', 'startX', 'endX', 'openers', 'clamped') def __init__(self): self.lineNo = None # [integer] - line number of the last parsed paren trail self.startX = None # [integer] - x position of first paren in this range self.endX = None # [integer] - x position after the last paren in this range self.openers = [] # [array of stack elements] - corresponding open-paren for each close-paren in this range self.clamped = Clamped() def initialParenTrail(): return ParenTrail() class Result: """Returns a dictionary of the initial state.""" __slots__ = ( 'mode', 'smart', 'origText', 'origCursorX', 'origCursorLine', 'inputLines', 'inputLineNo', 'inputX', 'lines', 'lineNo', 'ch', 'x', 'indentX', 'parenStack', 'tabStops', 'parenTrail', 'parenTrails', 'returnParens', 'parens', 'cursorX', 'cursorLine', 'prevCursorX', 'prevCursorLine', 'selectionStartLine', 'changes', 'isInCode', 'isEscaping', 'isEscaped', 'isInStr', 'isInComment', 'commentX', 'quoteDanger', 'trackingIndent', 'skipChar', 'success', 'partialResult', 'forceBalance', 'maxIndent', 'indentDelta', 'trackingArgTabStop', 'error', 'errorPosCache', 'comment') def __str__(self): return ('Result {' + 'mode: ' + str(self.mode) + '\n\t' 'smart: ' + str(self.smart) + '\n\t' 'origText: ' + str(self.origText) + '\n\t' 'origCursorX: ' + str(self.origCursorX) + '\n\t' 'origCursorLine: ' + str(self.origCursorLine) + '\n\t' 'inputLines: ' + str(self.inputLines) + '\n\t' 'inputLineNo: ' + str(self.inputLineNo) + '\n\t' 'inputX: ' + str(self.inputX) + '\n\t' 'lines: ' + str(self.lines) + '\n\t' 'lineNo: ' + str(self.lineNo) + '\n\t' 'ch: ' + str(self.ch) + '\n\t' 'x: ' + str(self.x) + '\n\t' 'indentX: ' + str(self.indentX) + '\n\t' 'parenStack: ' + str(self.parenStack) + '\n\t' 'tabStops: ' + str(self.tabStops) + '\n\t' 'parenTrail: ' + str(self.parenTrail) + '\n\t' 'parenTrails: ' + str(self.parenTrails) + '\n\t' 'returnParens: ' + str(self.returnParens) + '\n\t' 'parens: ' + str(self.parens) + '\n\t' 'cursorX: ' + str(self.cursorX) + '\n\t' 'cursorLine: ' + str(self.cursorLine) + '\n\t' 'prevCursorX: ' + str(self.prevCursorX) + '\n\t' 'prevCursorLine: ' + str(self.prevCursorLine) + '\n\t' 'selectionStartLine: ' + str(self.selectionStartLine) + '\n\t' 'changes: ' + str(self.changes) + '\n\t' 'isInCode: ' + str(self.isInCode) + '\n\t' 'isEscaping: ' + str(self.isEscaping) + '\n\t' 'isEscaped: ' + str(self.isEscaped) + '\n\t' 'isInStr: ' + str(self.isInStr) + '\n\t' 'isInComment: ' + str(self.isInComment) + '\n\t' 'commentX: ' + str(self.commentX) + '\n\t' 'quoteDanger: ' + str(self.quoteDanger) + '\n\t' 'trackingIndent: ' + str(self.trackingIndent) + '\n\t' 'skipChar: ' + str(self.skipChar) + '\n\t' 'success: ' + str(self.success) + '\n\t' 'partialResult: ' + str(self.partialResult) + '\n\t' 'forceBalance: ' + str(self.forceBalance) + '\n\t' 'maxIndent: ' + str(self.maxIndent) + '\n\t' 'indentDelta: ' + str(self.indentDelta) + '\n\t' 'trackingArgTabStop: ' + str(self.trackingArgTabStop) + '\n\t' 'error: ' + str(self.error) + '\n\t' 'errorPosCache: ' + str(self.errorPosCache) + '\n\t' 'comment: ' + str(self.comment) + '\n\t}') def __init__(self, text, options, mode, smart): """Constructs a dictionary of the initial state.""" super(Result, self).__init__() self.mode = mode # [enum] - current processing mode (INDENT_MODE or PAREN_MODE) self.smart = smart # [boolean] - smart mode attempts special user-friendly behavior self.origText = text # [string] - original text self.origCursorX = None # [integer] - original cursorX option self.origCursorLine = None # [integer] - original cursorLine option # [string array] - input lines that we process line-by-line char-by-char self.inputLines = re.split(LINE_ENDING_REGEX, text) self.inputLineNo = -1 # [integer] - the current input line number self.inputX = -1 # [integer] - the current input x position of the current character (ch) self.lines = [] # [string array] - output lines (with corrected parens or indentation) self.lineNo = -1 # [integer] - output line number we are on self.ch = '' # [string] - character we are processing (can be changed to indicate a replacement) self.x = 0 # [integer] - output x position of the current character (ch) self.indentX = None # [integer] - x position of the indentation point if present self.parenStack = [] # We track where we are in the Lisp tree by keeping a stack (array) of open-parens. # Stack elements are objects containing keys {ch, x, lineNo, indentDelta} # whose values are the same as those described here in this result structure. self.tabStops = [] # In Indent Mode, it is useful for editors to snap a line's indentation # to certain critical points. Thus, we have a `tabStops` array of objects containing # keys {ch, x, lineNo, argX}, which is just the state of the `parenStack` at the cursor line. self.parenTrail = initialParenTrail() # the range of parens at the end of a line self.parenTrails = [] # [array of {lineNo, startX, endX}] - all non-empty parenTrails to be returned self.returnParens = False # [boolean] - determines if we return `parens` described below self.parens = [] # [array of {lineNo, x, closer, children}] - paren tree if `returnParens` is h self.cursorX = None # [integer] - x position of the cursor self.cursorLine = None # [integer] - line number of the cursor self.prevCursorX = None # [integer] - x position of the previous cursor self.prevCursorLine = None # [integer] - line number of the previous cursor self.selectionStartLine = None # [integer] - line number of the current selection starting point self.changes = None # [object] - mapping change.key to a change object (please see `transformChange` for object structure) self.isInCode = True # [boolean] - indicates if we are currently in "code space" (not string or comment) self.isEscaping = False # [boolean] - indicates if the next character will be escaped (e.g. `\c`). This may be inside string comment or code. self.isEscaped = False # [boolean] - indicates if the current character is escaped (e.g. `\c`). This may be inside string comment or code. self.isInStr = False # [boolean] - indicates if we are currently inside a string self.isInComment = False # [boolean] - indicates if we are currently inside a comment self.commentX = None # [integer] - x position of the start of comment on current line (if any) self.quoteDanger = False # [boolean] - indicates if quotes are imbalanced inside of a comment (dangerous) self.trackingIndent = False # [boolean] - are we looking for the indentation point of the current line? self.skipChar = False # [boolean] - should we skip the processing of the current character? self.success = False # [boolean] - was the input properly formatted enough to create a valid result? self.partialResult = False # [boolean] - should we return a partial result when an error occurs? self.forceBalance = False # [boolean] - should indent mode aggressively enforce paren balance? self.maxIndent = sys.maxsize # [integer] - maximum allowed indentation of subsequent lines in Paren Mode self.indentDelta = 0 # [integer] - how far indentation was shifted by Paren Mode # (preserves relative indentation of nested expressions) self.trackingArgTabStop = None # [string] - enum to track how close we are to the first-arg tabStop in a list # For example a tabStop occurs at `bar` below: # # ` (foo bar` # 00011112222000 <-- state after processing char (enums below) # # 0 None => not searching # 1 'space' => searching for next space # 2 'arg' => searching for arg # # (We create the tabStop when the change from 2->0 happens.) # self.comment = ';' # [string] default to semicolon as comment character self.error = { # if 'success' is False, return this error to the user 'name': None, # [string] - Parinfer's unique name for this error 'message': None, # [string] - error message to display 'lineNo': None, # [integer] - line number of error 'x': None, # [integer] - start x position of error 'extra': { 'name': None, 'lineNo': None, 'x': None } } self.errorPosCache = {} # [object] - maps error name to a potential error position if isinstance(options, dict): if 'cursorX' in options: self.cursorX = options['cursorX'] self.origCursorX = options['cursorX'] if 'cursorLine' in options: self.cursorLine = options['cursorLine'] self.origCursorLine = options['cursorLine'] if 'prevCursorX' in options: self.prevCursorX = options['prevCursorX'] if 'prevCursorLine' in options: self.prevCursorLine = options['prevCursorLine'] if 'selectionStartLine' in options: self.selectionStartLine = options['selectionStartLine'] if 'changes' in options: self.changes = transformChanges(options['changes']) if 'partialResult' in options: self.partialResult = options['partialResult'] if 'forceBalance' in options: self.forceBalance = options['forceBalance'] if 'returnParens' in options: self.returnParens = options['returnParens'] if 'comment' in options: self.comment = options['comment'] #------------------------------------------------------------------------------- # Possible Errors #------------------------------------------------------------------------------- # `result.error.name` is set to any of these ERROR_QUOTE_DANGER = "quote-danger" ERROR_EOL_BACKSLASH = "eol-backslash" ERROR_UNCLOSED_QUOTE = "unclosed-quote" ERROR_UNCLOSED_PAREN = "unclosed-paren" ERROR_UNMATCHED_CLOSE_PAREN = "unmatched-close-paren" ERROR_UNMATCHED_OPEN_PAREN = "unmatched-open-paren" ERROR_LEADING_CLOSE_PAREN = "leading-close-paren" ERROR_UNHANDLED = "unhandled" errorMessages = {} errorMessages[ERROR_QUOTE_DANGER] = "Quotes must balanced inside comment blocks." errorMessages[ERROR_EOL_BACKSLASH] = "Line cannot end in a hanging backslash." errorMessages[ERROR_UNCLOSED_QUOTE] = "String is missing a closing quote." errorMessages[ERROR_UNCLOSED_PAREN] = "Unclosed open-paren." errorMessages[ERROR_UNMATCHED_CLOSE_PAREN] = "Unmatched close-paren." errorMessages[ERROR_UNMATCHED_OPEN_PAREN] = "Unmatched open-paren." errorMessages[ERROR_LEADING_CLOSE_PAREN] = "Line cannot lead with a close-paren." errorMessages[ERROR_UNHANDLED] = "Unhandled error." def cacheErrorPos(result, errorName): e = { 'lineNo': result.lineNo, 'x': result.x, 'inputLineNo': result.inputLineNo, 'inputX': result.inputX } result.errorPosCache[errorName] = e return e class ParinferError(Exception): pass def error(result, name): cache = result.errorPosCache.get(name, {}) resultLineNo = result.LineNo if result.partialResult else result.inputLineNo resultX = result.x if result.partialResult else result.inputX keyLineNo = 'lineNo' if result.partialResult else 'inputLineNo' keyX = 'x' if result.partialResult else 'inputX' e = { 'parinferError': True, 'name': name, 'message': errorMessages[name], 'lineNo': cache[keyLineNo] if cache else resultLineNo, 'x': cache[keyX] if cache else resultX } opener = peek(result.parenStack, 0) if name == ERROR_UNMATCHED_CLOSE_PAREN: # extra error info for locating the open-paren that it should've matched if ERROR_UNMATCHED_OPEN_PAREN in result.errorPosCache: cache = result.errorPosCache[ERROR_UNMATCHED_OPEN_PAREN] if cache or opener: if opener: openerLineNo = opener.LineNo if result.partialResult else opener.inputLineNo openerX = opener.x if result.partialResult else opener.inputX e['extra'] = { 'name': ERROR_UNMATCHED_OPEN_PAREN, 'lineNo': cache[keyLineNo] if cache else openerLineNo, 'x': cache[keyX] if cache else openerX } elif name == ERROR_UNCLOSED_PAREN: openerLineNo = opener.LineNo if result.partialResult else opener.inputLineNo openerX = opener.x if result.partialResult else opener.inputX e['lineNo'] = openerLineNo e['x'] = openerX return ParinferError(e) #------------------------------------------------------------------------------- # String Operations #------------------------------------------------------------------------------- def replaceWithinString(orig, start, end, replace): return orig[:start] + replace + orig[end:] if RUN_ASSERTS: assert replaceWithinString('aaa', 0, 2, '') == 'a' assert replaceWithinString('aaa', 0, 1, 'b') == 'baa' assert replaceWithinString('aaa', 0, 2, 'b') == 'ba' def getLineEnding(text): # NOTE: We assume that if the CR char "\r" is used anywhere, # then we should use CRLF line-endings after every line. i = text.find("\r") if i != -1: return "\r\n" return "\n" #------------------------------------------------------------------------------- # Line Operations #------------------------------------------------------------------------------- def isCursorAffected(result, start, end): if result.cursorX == start and result.cursorX == end: return result.cursorX == 0 return result.cursorX >= end def shiftCursorOnEdit(result, lineNo, start, end, replace): oldLength = end - start newLength = len(replace) dx = newLength - oldLength if (dx != 0 and result.cursorLine == lineNo and result.cursorX is not None and isCursorAffected(result, start, end)): result.cursorX += dx def replaceWithinLine(result, lineNo, start, end, replace): line = result.lines[lineNo] newLine = replaceWithinString(line, start, end, replace) result.lines[lineNo] = newLine shiftCursorOnEdit(result, lineNo, start, end, replace) def insertWithinLine(result, lineNo, idx, insert): replaceWithinLine(result, lineNo, idx, idx, insert) def initLine(result): result.x = 0 result.lineNo += 1 # reset line-specific state result.indentX = None result.commentX = None result.indentDelta = 0 if ERROR_UNMATCHED_CLOSE_PAREN in result.errorPosCache: del result.errorPosCache[ERROR_UNMATCHED_CLOSE_PAREN] if ERROR_UNMATCHED_OPEN_PAREN in result.errorPosCache: del result.errorPosCache[ERROR_UNMATCHED_OPEN_PAREN] if ERROR_LEADING_CLOSE_PAREN in result.errorPosCache: del result.errorPosCache[ERROR_LEADING_CLOSE_PAREN] result.trackingArgTabStop = None result.trackingIndent = not result.isInStr # if the current character has changed, commit its change to the current line. # def commitChar(result, origCh): # ch = result.ch # if origCh != ch: # replaceWithinLine(result, result.lineNo, result.x, result.x + len(origCh), ch) # result.indentDelta -= (len(origCh) - len(ch)) # result.x += len(ch) #------------------------------------------------------------------------------- # Misc Utils #------------------------------------------------------------------------------- def clamp(val, minN, maxN): return max(minN, min(val, maxN)) # if RUN_ASSERTS: # assert clamp(1, 3, 5) == 3 # assert clamp(9, 3, 5) == 5 # assert clamp(1, 3, None) == 3 # assert clamp(5, 3, None) == 5 # assert clamp(1, None, 5) == 1 # assert clamp(9, None, 5) == 5 # assert clamp(1, None, None) == 1 def peek(arr, idxFromBack): # maxIdx = len(arr) - 1 # if idxFromBack > maxIdx: # return None # return arr[maxIdx - idxFromBack] try: return arr[-1 - idxFromBack] except IndexError: return None if RUN_ASSERTS: assert peek(['a'], 0) == 'a' assert peek(['a'], 1) is None assert peek(['a', 'b', 'c'], 0) == 'c' assert peek(['a', 'b', 'c'], 1) == 'b' assert peek(['a', 'b', 'c'], 5) is None assert peek([], 0) is None assert peek([], 1) is None #------------------------------------------------------------------------------- # Questions about characters #------------------------------------------------------------------------------- def isValidCloseParen(parenStack, ch): if len(parenStack) == 0: return False return peek(parenStack, 0).ch == MATCH_PAREN[ch] # def isWhitespace(result): # return not result.isEscaped and result.ch in WHITESPACE # can this be the last code character of a list? def isClosable(result): ch = result.ch closer = ch in CLOSE_PARENS and not result.isEscaped # closer = ch in ('}', ')', ']') and not result.isEscaped # return result.isInCode and not isWhitespace(result) and ch != '' and not closer return result.isInCode and (result.isEscaped or ch not in WHITESPACE) and ch != '' and not closer # return result.isInCode and not ch in (BLANK_SPACE, DOUBLE_SPACE) and ch != '' and not closer #------------------------------------------------------------------------------- # Advanced operations on characters #------------------------------------------------------------------------------- def checkCursorHolding(result): opener = peek(result.parenStack, 0) parent = peek(result.parenStack, 1) holdMinX = parent.x+1 if parent else 0 holdMaxX = opener.x holding = ( result.cursorLine == opener.lineNo and holdMinX <= result.cursorX and result.cursorX <= holdMaxX ) shouldCheckPrev = not result.changes and result.prevCursorLine is not None if shouldCheckPrev: prevHolding = ( result.prevCursorLine == opener.lineNo and holdMinX <= result.prevCursorX and result.prevCursorX <= holdMaxX ) if prevHolding and not holding: raise ParinferError({'releaseCursorHold': True}) return holding def trackArgTabStop(result, state): if state == 'space': # if result.isInCode and isWhitespace(result): if result.isInCode and not result.isEscaped and result.ch in WHITESPACE: result.trackingArgTabStop = 'arg' elif state == 'arg': # if not isWhitespace(result): if result.isEscaped or result.ch not in WHITESPACE: opener = peek(result.parenStack, 0) opener.argX = result.x result.trackingArgTabStop = None #------------------------------------------------------------------------------- # Literal character events #------------------------------------------------------------------------------- class Opener(object): __slots__ = ('self', 'inputLineNo', 'inputX', 'lineNo', 'x', 'ch', 'indentDelta', 'maxChildIndent', 'argX', 'children', 'closer') def __init__(self, inputLineNo, inputX, lineNo, x, ch, indentDelta, maxChildIndent): super(Opener, self).__init__() self.inputLineNo = inputLineNo self.inputX = inputX self.lineNo = lineNo self.x = x self.ch = ch self.indentDelta = indentDelta self.maxChildIndent = maxChildIndent self.argX = None self.children = None self.closer = None def __str__(self): return ("{ inputLineNo: " + str(self.inputLineNo) + "\n inputX: " + str(self.inputX) + "\n lineNo: " + str(self.lineNo) + "\n x: " + str(self.x) + "\n ch: " + str(self.ch) + "\n indentDelta: " + str(self.indentDelta) + "\n maxChildIndent: " + str(self.maxChildIndent) + "}") def onOpenParen(result): if result.isInCode: opener = Opener( result.inputLineNo, result.inputX, result.lineNo, result.x, result.ch, result.indentDelta, sys.maxsize, ) if result.returnParens: opener.children = [] opener.closer = { 'lineNo': None, 'x': None, 'ch': '' } parent = peek(result.parenStack, 0) parent = parent.children if parent else result.parens parent.append(opener) result.parenStack.append(opener) result.trackingArgTabStop = 'space' def setCloser(opener, lineNo, x, ch): opener.closer['lineNo'] = lineNo opener.closer['x'] = x opener.closer['ch'] = ch def onMatchedCloseParen(result): opener = peek(result.parenStack, 0) if result.returnParens: setCloser(opener, result.lineNo, result.x, result.ch) result.parenTrail.endX = result.x + 1 result.parenTrail.openers.append(opener) if result.mode == INDENT_MODE and result.smart and checkCursorHolding(result): origStartX = result.parenTrail.startX origEndX = result.parenTrail.endX origOpeners = result.parenTrail.openers resetParenTrail(result, result.lineNo, result.x+1) result.parenTrail.clamped.startX = origStartX result.parenTrail.clamped.endX = origEndX result.parenTrail.clamped.openers = origOpeners result.parenStack.pop() result.trackingArgTabStop = None def onUnmatchedCloseParen(result): if result.mode == PAREN_MODE: trail = result.parenTrail inLeadingParenTrail = trail.lineNo == result.lineNo and trail.startX == result.indentX canRemove = result.smart and inLeadingParenTrail if not canRemove: raise error(result, ERROR_UNMATCHED_CLOSE_PAREN) elif result.mode == INDENT_MODE and ( ERROR_UNMATCHED_CLOSE_PAREN not in result.errorPosCache): cacheErrorPos(result, ERROR_UNMATCHED_CLOSE_PAREN) opener = peek(result.parenStack, 0) if opener: e = cacheErrorPos(result, ERROR_UNMATCHED_OPEN_PAREN) e['inputLineNo'] = opener.inputLineNo e['inputX'] = opener.inputX result.ch = '' def onCloseParen(result): if result.isInCode: if isValidCloseParen(result.parenStack, result.ch): onMatchedCloseParen(result) else: onUnmatchedCloseParen(result) def onTab(result): if result.isInCode: result.ch = DOUBLE_SPACE def onComment(result): if result.isInCode: result.isInComment = True result.commentX = result.x result.trackingArgTabStop = None def onNewline(result): result.isInComment = False result.ch = '' def onQuote(result): if result.isInStr: result.isInStr = False elif result.isInComment: result.quoteDanger = not result.quoteDanger if result.quoteDanger: cacheErrorPos(result, ERROR_QUOTE_DANGER) else: result.isInStr = True cacheErrorPos(result, ERROR_UNCLOSED_QUOTE) def onBackslash(result): result.isEscaping = True def afterBackslash(result): result.isEscaping = False result.isEscaped = True if result.ch == NEWLINE: if result.isInCode: raise error(result, ERROR_EOL_BACKSLASH) onNewline(result) #------------------------------------------------------------------------------- # Character dispatch #------------------------------------------------------------------------------- CHAR_DISPATCH = { '(': onOpenParen, '{': onOpenParen, '[': onOpenParen, ')': onCloseParen, '}': onCloseParen, ']': onCloseParen, BACKSLASH: onBackslash, TAB: onTab, NEWLINE: onNewline, DOUBLE_QUOTE: onQuote, } def onChar(result): result.isEscaped = False if result.isEscaping: afterBackslash(result) elif result.ch == result.comment: onComment(result) else: dispatch = CHAR_DISPATCH.get(result.ch, None) if dispatch is not None: dispatch(result) # ch = result.ch result.isInCode = not result.isInComment and not result.isInStr # can this be the last code character of a list? # def isClosable(result): # ch = result.ch # closer = ch in CLOSE_PARENS and not result.isEscaped # closer = ch in ('}', ')', ']') and not result.isEscaped # closable = result.isInCode and not (not result.isEscaped and result.ch in WHITESPACE) and ch != '' and not (ch in CLOSE_PARENS and not result.isEscaped) # return result.isInCode and not ch in (BLANK_SPACE, DOUBLE_SPACE) and ch != '' and not closer # if closable: if isClosable(result): resetParenTrail(result, result.lineNo, result.x+len(result.ch)) state = result.trackingArgTabStop if state: trackArgTabStop(result, state) #------------------------------------------------------------------------------- # Cursor defs #------------------------------------------------------------------------------- def isCursorLeftOf(cursorX, cursorLine, x, lineNo): return ( cursorLine == lineNo and x is not None and cursorX is not None and cursorX <= x # inclusive since (cursorX = x) implies (x-1 < cursor < x) ) def isCursorRightOf(cursorX, cursorLine, x, lineNo): return ( cursorLine == lineNo and x is not None and cursorX is not None and cursorX > x ) def isCursorInComment(result, cursorX, cursorLine): return isCursorRightOf(cursorX, cursorLine, result.commentX, result.lineNo) def handleChangeDelta(result): if result.changes and (result.smart or result.mode == PAREN_MODE): if result.inputLineNo in result.changes: line = result.changes[result.inputLineNo] if result.inputX in line: change = line[result.inputX] result.indentDelta += (change['newEndX'] - change['oldEndX']) #------------------------------------------------------------------------------- # Paren Trail defs #------------------------------------------------------------------------------- def resetParenTrail(result, lineNo, x): result.parenTrail.lineNo = lineNo result.parenTrail.startX = x result.parenTrail.endX = x result.parenTrail.openers = [] result.parenTrail.clamped.startX = None result.parenTrail.clamped.endX = None result.parenTrail.clamped.openers = [] def isCursorClampingParenTrail(result, cursorX, cursorLine): return ( isCursorRightOf(cursorX, cursorLine, result.parenTrail.startX, result.lineNo) and not isCursorInComment(result, cursorX, cursorLine) ) # INDENT MODE: allow the cursor to clamp the paren trail def clampParenTrailToCursor(result): startX = result.parenTrail.startX endX = result.parenTrail.endX clamping = isCursorClampingParenTrail(result, result.cursorX, result.cursorLine) if clamping: newStartX = max(startX, result.cursorX) newEndX = max(endX, result.cursorX) line = result.lines[result.lineNo] removeCount = 0 for i in range(startX, newStartX): if line[i] in CLOSE_PARENS: removeCount += 1 openers = result.parenTrail.openers result.parenTrail.openers = openers[removeCount:] result.parenTrail.startX = newStartX result.parenTrail.endX = newEndX result.parenTrail.clamped.openers = openers[0:removeCount] result.parenTrail.clamped.startX = startX result.parenTrail.clamped.endX = endX # INDENT MODE: pops the paren trail from the stack def popParenTrail(result): startX = result.parenTrail.startX endX = result.parenTrail.endX if startX == endX: return openers = result.parenTrail.openers while len(openers) != 0: result.parenStack.append(openers.pop()) # Determine which open-paren (if any) on the parenStack should be considered # the direct parent of the current line (given its indentation point). # This allows Smart Mode to simulate Paren Mode's structure-preserving # behavior by adding its `opener.indentDelta` to the current line's indentation. # (care must be taken to prevent redundant indentation correction, detailed below) def getParentOpenerIndex(result, indentX): i = 0 # for i in range(len(result.parenStack)): parenStackLen = len(result.parenStack) while i < parenStackLen: # idx = i opener = peek(result.parenStack, i) currOutside = (opener.x < indentX) prevIndentX = indentX - result.indentDelta prevOutside = (opener.x - opener.indentDelta < prevIndentX) isParent = False if prevOutside and currOutside: isParent = True elif not prevOutside and not currOutside: isParent = False elif prevOutside and not currOutside: # POSSIBLE FRAGMENTATION # (foo --\ # +--- FRAGMENT `(foo bar)` => `(foo) bar` # bar) --/ # 1. PREVENT FRAGMENTATION # ```in # (foo # ++ # bar # ``` # ```out # (foo # bar # ``` if result.indentDelta == 0: isParent = True # 2. ALLOW FRAGMENTATION # ```in # (foo # bar # -- # ``` # ```out # (foo) # bar # ``` elif opener.indentDelta == 0: isParent = False else: # TODO: identify legitimate cases where both are nonzero # allow the fragmentation by default isParent = False # TODO: should we throw to exit instead? either of: # 1. give up, just `throw error(...)` # 2. fallback to paren mode to preserve structure elif not prevOutside and currOutside: # POSSIBLE ADOPTION # (foo) --\ # +--- ADOPT `(foo) bar` => `(foo bar)` # bar --/ nextOpener = peek(result.parenStack, i+1) # 1. DISALLOW ADOPTION # ```in # (foo # -- # (bar) # -- # baz) # ``` # ```out # (foo # (bar) # baz) # ``` # OR # ```in # (foo # -- # (bar) # - # baz) # ``` # ```out # (foo # (bar) # baz) # ``` if nextOpener and nextOpener.indentDelta <= opener.indentDelta: # we can only disallow adoption if nextOpener.indentDelta will actually # prevent the indentX from being in the opener's threshold. if indentX + nextOpener.indentDelta > opener.x: isParent = True else: isParent = False # 2. ALLOW ADOPTION # ```in # (foo # (bar) # -- # baz) # ``` # ```out # (foo # (bar # baz)) # ``` # OR # ```in # (foo # - # (bar) # -- # baz) # ``` # ```out # (foo # (bar) # baz) # ``` elif nextOpener and nextOpener.indentDelta > opener.indentDelta: isParent = True # 3. ALLOW ADOPTION # ```in # (foo) # -- # bar # ``` # ```out # (foo # bar) # ``` # OR # ```in # (foo) # bar # ++ # ``` # ```out # (foo # bar # ``` # OR # ```in # (foo) # + # bar # ++ # ``` # ```out # (foo # bar) # ``` elif result.indentDelta > opener.indentDelta: isParent = True if isParent: # if new parent # Clear `indentDelta` since it is reserved for previous child lines only. opener.indentDelta = 0 if isParent: # # return i break i += 1 return i # INDENT MODE: correct paren trail from indentation def correctParenTrail(result, indentX): parens = '' index = getParentOpenerIndex(result, indentX) for i in range(index): opener = result.parenStack.pop() result.parenTrail.openers.append(opener) closeCh = MATCH_PAREN[opener.ch] parens += closeCh if result.returnParens: setCloser(opener, result.parenTrail.lineNo, result.parenTrail.startX+i, closeCh) if result.parenTrail.lineNo is not None: replaceWithinLine(result, result.parenTrail.lineNo, result.parenTrail.startX, result.parenTrail.endX, parens) result.parenTrail.endX = result.parenTrail.startX + len(parens) rememberParenTrail(result) # PAREN MODE: remove spaces from the paren trail def cleanParenTrail(result): startX = result.parenTrail.startX endX = result.parenTrail.endX if (startX == endX or result.lineNo != result.parenTrail.lineNo): return line = result.lines[result.lineNo] newTrail = '' spaceCount = 0 for i in range(startX, endX): if line[i] in CLOSE_PARENS: newTrail += line[i] else: spaceCount += 1 if spaceCount > 0: replaceWithinLine(result, result.lineNo, startX, endX, newTrail) result.parenTrail.endX -= spaceCount # PAREN MODE: append a valid close-paren to the end of the paren trail def appendParenTrail(result): opener = result.parenStack.pop() closeCh = MATCH_PAREN[opener.ch] if result.returnParens: setCloser(opener, result.parenTrail.lineNo, result.parenTrail.endX, closeCh) setMaxIndent(result, opener) insertWithinLine(result, result.parenTrail.lineNo, result.parenTrail.endX, closeCh) result.parenTrail.endX += 1 result.parenTrail.openers.append(opener) updateRememberedParenTrail(result) def invalidateParenTrail(result): result.parenTrail = initialParenTrail() def checkUnmatchedOutsideParenTrail(result): cache = None if ERROR_UNMATCHED_CLOSE_PAREN in result.errorPosCache: cache = result.errorPosCache[ERROR_UNMATCHED_CLOSE_PAREN] if cache and cache['x'] < result.parenTrail.startX: raise error(result, ERROR_UNMATCHED_CLOSE_PAREN) def setMaxIndent(result, opener): if opener: parent = peek(result.parenStack, 0) if parent: parent.maxChildIndent = opener.x else: result.maxIndent = opener.x def rememberParenTrail(result): trail = result.parenTrail openers = trail.clamped.openers + trail.openers if len(openers) > 0: isClamped = trail.clamped.startX is not None allClamped = len(trail.openers) == 0 shortTrail = { 'lineNo': trail.lineNo, 'startX': trail.clamped.startX if isClamped else trail.startX, 'endX': trail.clamped.endX if allClamped else trail.endX, } result.parenTrails.append(shortTrail) if result.returnParens: for i in range(len(openers)): openers[i].closer['trail'] = shortTrail def updateRememberedParenTrail(result): if result.parenTrails: trail = result.parenTrails[-1] if trail['lineNo'] != result.parenTrail.lineNo: rememberParenTrail(result) else: trail['endX'] = result.parenTrail.endX if result.returnParens: opener = result.parenTrail.openers[-1] opener.closer['trail'] = trail else: rememberParenTrail(result) def finishNewParenTrail(result): if result.isInStr: invalidateParenTrail(result) elif result.mode == INDENT_MODE: clampParenTrailToCursor(result) popParenTrail(result) elif result.mode == PAREN_MODE: setMaxIndent(result, peek(result.parenTrail.openers, 0)) if result.lineNo != result.cursorLine: cleanParenTrail(result) rememberParenTrail(result) #------------------------------------------------------------------------------- # Indentation defs #------------------------------------------------------------------------------- def addIndent(result, delta): origIndent = result.x newIndent = origIndent + delta indentStr = BLANK_SPACE*newIndent replaceWithinLine(result, result.lineNo, 0, origIndent, indentStr) result.x = newIndent result.indentX = newIndent result.indentDelta += delta def shouldAddOpenerIndent(result, opener): # Don't add opener.indentDelta if the user already added it. # (happens when multiple lines are indented together) return opener.indentDelta != result.indentDelta def correctIndent(result): origIndent = result.x newIndent = origIndent minIndent = 0 maxIndent = result.maxIndent opener = peek(result.parenStack, 0) if opener: minIndent = opener.x + 1 maxIndent = opener.maxChildIndent if shouldAddOpenerIndent(result, opener): newIndent += opener.indentDelta newIndent = clamp(newIndent, minIndent, maxIndent) if newIndent != origIndent: addIndent(result, newIndent - origIndent) def onIndent(result): result.indentX = result.x result.trackingIndent = False if result.quoteDanger: raise error(result, ERROR_QUOTE_DANGER) if result.mode == INDENT_MODE: correctParenTrail(result, result.x) opener = peek(result.parenStack, 0) if opener and shouldAddOpenerIndent(result, opener): addIndent(result, opener.indentDelta) elif result.mode == PAREN_MODE: correctIndent(result) def checkLeadingCloseParen(result): if (ERROR_LEADING_CLOSE_PAREN in result.errorPosCache and result.parenTrail.lineNo == result.lineNo): raise error(result, ERROR_LEADING_CLOSE_PAREN) def onLeadingCloseParen(result): if result.mode == INDENT_MODE: if not result.forceBalance: if result.smart: raise ParinferError({'leadingCloseParen': True}) if ERROR_LEADING_CLOSE_PAREN not in result.errorPosCache: cacheErrorPos(result, ERROR_LEADING_CLOSE_PAREN) result.skipChar = True if result.mode == PAREN_MODE: if not isValidCloseParen(result.parenStack, result.ch): if result.smart: result.skipChar = True else: raise error(result, ERROR_UNMATCHED_CLOSE_PAREN) elif isCursorLeftOf(result.cursorX, result.cursorLine, result.x, result.lineNo): resetParenTrail(result, result.lineNo, result.x) onIndent(result) else: appendParenTrail(result) result.skipChar = True def onCommentLine(result): parenTrailLength = len(result.parenTrail.openers) # restore the openers matching the previous paren trail if result.mode == PAREN_MODE: for j in range(parenTrailLength): result.parenStack.append(peek(result.parenTrail.openers, j)) i = getParentOpenerIndex(result, result.x) opener = peek(result.parenStack, i) if opener: # shift the comment line based on the parent open paren if shouldAddOpenerIndent(result, opener): addIndent(result, opener.indentDelta) # TODO: store some information here if we need to place close-parens after comment lines # repop the openers matching the previous paren trail if result.mode == PAREN_MODE: for j in range(parenTrailLength): result.parenStack.pop() def checkIndent(result): if result.ch in CLOSE_PARENS: onLeadingCloseParen(result) elif result.ch == result.comment: # comments don't count as indentation points onCommentLine(result) result.trackingIndent = False elif result.ch not in WHITESPACE: onIndent(result) def makeTabStop(result, opener): tabStop = { 'ch': opener.ch, 'x': opener.x, 'lineNo': opener.lineNo } if opener.argX is not None: tabStop['argX'] = opener.argX return tabStop def getTabStopLine(result): return result.selectionStartLine if result.selectionStartLine is not None else result.cursorLine def setTabStops(result): if getTabStopLine(result) != result.lineNo: return for i in range(len(result.parenStack)): result.tabStops.append(makeTabStop(result, result.parenStack[i])) if result.mode == PAREN_MODE: for i in range(len(result.parenTrail.openers)-1, -1, -1): result.tabStops.append(makeTabStop(result, result.parenTrail.openers[i])) # remove argX if it falls to the right of the next stop for i in range(1, len(result.tabStops)): x = result.tabStops[i]['x'] if 'argX' in result.tabStops[i-1] and result.tabStops[i-1]['argX'] >= x: del result.tabStops[i-1]['argX'] #------------------------------------------------------------------------------- # High-level processing functions #------------------------------------------------------------------------------- def processChar(result, ch): origCh = ch result.ch = ch result.skipChar = False handleChangeDelta(result) if result.trackingIndent: checkIndent(result) if result.skipChar: result.ch = '' else: onChar(result) # commitChar(result, origCh) ch = result.ch if origCh != ch: replaceWithinLine(result, result.lineNo, result.x, result.x + len(origCh), ch) result.indentDelta -= (len(origCh) - len(ch)) result.x += len(ch) def processLine(result, lineNo): initLine(result) result.lines.append(result.inputLines[lineNo]) setTabStops(result) for x in range(len(result.inputLines[lineNo])): result.inputX = x processChar(result, result.inputLines[lineNo][x]) processChar(result, NEWLINE) if not result.forceBalance: checkUnmatchedOutsideParenTrail(result) checkLeadingCloseParen(result) if result.lineNo == result.parenTrail.lineNo: finishNewParenTrail(result) def finalizeResult(result): if result.quoteDanger: raise error(result, ERROR_QUOTE_DANGER) if result.isInStr: raise error(result, ERROR_UNCLOSED_QUOTE) if len(result.parenStack) != 0: if result.mode == PAREN_MODE: raise error(result, ERROR_UNCLOSED_PAREN) if result.mode == INDENT_MODE: initLine(result) onIndent(result) result.success = True def processError(result, e): result.success = False if 'parinferError' in e: del e['parinferError'] result.error = e else: result.error.name = ERROR_UNHANDLED result.error.message = e.stack raise e def processText(text, options, mode, smart=False): result = Result(text, options, mode, smart) try: for i in range(len(result.inputLines)): result.inputLineNo = i processLine(result, i) finalizeResult(result) except ParinferError as e: errorDetails = e.args[0] if 'leadingCloseParen' in errorDetails or 'releaseCursorHold' in errorDetails: assert mode != PAREN_MODE return processText(text, options, PAREN_MODE, smart) processError(result, errorDetails) return result #------------------------------------------------------------------------------- # Public API #------------------------------------------------------------------------------- def publicResult(result): lineEnding = getLineEnding(result.origText) if result.success: final = { 'text': lineEnding.join(result.lines), 'cursorX': result.cursorX, 'cursorLine': result.cursorLine, 'success': True, 'tabStops': result.tabStops, 'parenTrails': result.parenTrails } if result.returnParens: final['parens'] = result.parens else: final = { 'text': lineEnding.join(result.lines) if result.partialResult else result.origText, 'cursorX': result.cursorX if result.partialResult else result.origCursorX, 'cursorLine': result.cursorLine if result.partialResult else result.origCursorLine, 'parenTrails': result.parenTrails if result.partialResult else None, 'success': False, 'error': result.error } if result.partialResult and result.returnParens: final['parens'] = result.parens if final['cursorX'] is None: del final['cursorX'] if final['cursorLine'] is None: del final['cursorLine'] if 'tabStops' in final and len(final['tabStops']) == 0: del final['tabStops'] return final def indent_mode(text, options): return publicResult(processText(text, options, INDENT_MODE)) def paren_mode(text, options): return publicResult(processText(text, options, PAREN_MODE)) def smart_mode(text, options): smart = False if isinstance(options, dict): smart = 'selectionStartLine' not in options or options['selectionStartLine'] is None return publicResult(processText(text, options, INDENT_MODE, smart)) API = { 'version': '3.12.0', 'indent_mode': indent_mode, 'paren_mode': paren_mode, 'smart_mode': smart_mode }
33,963
393
1,676
4453f078b7d3b2fd4ad81bf82d4df1d74c857f5d
392
py
Python
testemunhoweb/cadastro/migrations/0010_auto_20181223_2252.py
danielcamilo13/testemunhoWEB
46825e31123058fa6ee21e4e71e9e0bedde32bb4
[ "bzip2-1.0.6" ]
1
2019-12-03T01:37:13.000Z
2019-12-03T01:37:13.000Z
testemunhoweb/cadastro/migrations/0010_auto_20181223_2252.py
danielcamilo13/testemunhoWEB
46825e31123058fa6ee21e4e71e9e0bedde32bb4
[ "bzip2-1.0.6" ]
11
2020-06-06T01:28:35.000Z
2022-03-12T00:16:34.000Z
testemunhoweb/cadastro/migrations/0010_auto_20181223_2252.py
danielcamilo13/testemunhoWEB
46825e31123058fa6ee21e4e71e9e0bedde32bb4
[ "bzip2-1.0.6" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2018-12-24 00:52 from __future__ import unicode_literals from django.db import migrations
19.6
49
0.612245
# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2018-12-24 00:52 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('cadastro', '0009_auto_20181215_0042'), ] operations = [ migrations.RenameModel( old_name='dia', new_name='dias', ), ]
0
220
23
0769c7ed667e125bb1a20f8fc98707bcd2e4d8af
3,435
py
Python
custom_components/nest_protect/select.py
rubicon/ha-nest-protect
4f6ddf12e8384472eff29274c21462b028fe7f85
[ "MIT" ]
78
2022-01-15T23:08:29.000Z
2022-03-31T19:58:17.000Z
custom_components/nest_protect/select.py
rubicon/ha-nest-protect
4f6ddf12e8384472eff29274c21462b028fe7f85
[ "MIT" ]
33
2022-01-15T20:03:18.000Z
2022-03-31T23:38:34.000Z
custom_components/nest_protect/select.py
rubicon/ha-nest-protect
4f6ddf12e8384472eff29274c21462b028fe7f85
[ "MIT" ]
10
2022-01-29T23:14:10.000Z
2022-03-22T12:33:52.000Z
"""Select platform for Nest Protect.""" from __future__ import annotations from dataclasses import dataclass from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.helpers.entity import EntityCategory from . import HomeAssistantNestProtectData from .const import DOMAIN, LOGGER from .entity import NestDescriptiveEntity, NestProtectDeviceClass @dataclass class NestProtectSelectDescriptionMixin: """Define an entity description mixin for select entities.""" options: list[str] # select_option: Callable[[str, Callable[..., Awaitable[None]]], Awaitable[None]] @dataclass class NestProtectSelectDescription( SelectEntityDescription, NestProtectSelectDescriptionMixin ): """Class to describe an Nest Protect sensor.""" BRIGHTNESS_TO_PRESET: dict[str, str] = {1: "low", 2: "medium", 3: "high"} PRESET_TO_BRIGHTNESS = {v: k for k, v in BRIGHTNESS_TO_PRESET.items()} SENSOR_DESCRIPTIONS: list[SelectEntityDescription] = [ NestProtectSelectDescription( key="night_light_brightness", name="Brightness", icon="mdi:lightbulb-on", options=[*PRESET_TO_BRIGHTNESS], entity_category=EntityCategory.CONFIG, device_class=NestProtectDeviceClass.NIGHT_LIGHT_BRIGHTNESS, ), ] async def async_setup_entry(hass, entry, async_add_devices): """Set up the Nest Protect sensors from a config entry.""" data: HomeAssistantNestProtectData = hass.data[DOMAIN][entry.entry_id] entities: list[NestProtectSelect] = [] SUPPORTED_KEYS = { description.key: description for description in SENSOR_DESCRIPTIONS } for device in data.devices.values(): for key in device.value: if description := SUPPORTED_KEYS.get(key): entities.append( NestProtectSelect(device, description, data.areas, data.client) ) async_add_devices(entities) class NestProtectSelect(NestDescriptiveEntity, SelectEntity): """Representation of a Nest Protect Select.""" entity_description: NestProtectSelectDescription @property def current_option(self) -> str: """Return the selected entity option to represent the entity state.""" state = self.bucket.value.get(self.entity_description.key) return BRIGHTNESS_TO_PRESET.get(state) @property def options(self) -> list[str]: """Return a set of selectable options.""" return self.entity_description.options async def async_select_option(self, option: str) -> None: """Change the selected option.""" select = PRESET_TO_BRIGHTNESS.get(option) objects = [ { "object_key": self.bucket.object_key, "op": "MERGE", "value": { self.entity_description.key: select, }, } ] if not self.client.nest_session or self.client.nest_session.is_expired(): if not self.client.auth or self.client.auth.is_expired(): await self.client.get_access_token() await self.client.authenticate(self.client.auth.access_token) result = await self.client.update_objects( self.client.nest_session.access_token, self.client.nest_session.userid, self.client.transport_url, objects, ) LOGGER.debug(result)
30.945946
85
0.676274
"""Select platform for Nest Protect.""" from __future__ import annotations from dataclasses import dataclass from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.helpers.entity import EntityCategory from . import HomeAssistantNestProtectData from .const import DOMAIN, LOGGER from .entity import NestDescriptiveEntity, NestProtectDeviceClass @dataclass class NestProtectSelectDescriptionMixin: """Define an entity description mixin for select entities.""" options: list[str] # select_option: Callable[[str, Callable[..., Awaitable[None]]], Awaitable[None]] @dataclass class NestProtectSelectDescription( SelectEntityDescription, NestProtectSelectDescriptionMixin ): """Class to describe an Nest Protect sensor.""" BRIGHTNESS_TO_PRESET: dict[str, str] = {1: "low", 2: "medium", 3: "high"} PRESET_TO_BRIGHTNESS = {v: k for k, v in BRIGHTNESS_TO_PRESET.items()} SENSOR_DESCRIPTIONS: list[SelectEntityDescription] = [ NestProtectSelectDescription( key="night_light_brightness", name="Brightness", icon="mdi:lightbulb-on", options=[*PRESET_TO_BRIGHTNESS], entity_category=EntityCategory.CONFIG, device_class=NestProtectDeviceClass.NIGHT_LIGHT_BRIGHTNESS, ), ] async def async_setup_entry(hass, entry, async_add_devices): """Set up the Nest Protect sensors from a config entry.""" data: HomeAssistantNestProtectData = hass.data[DOMAIN][entry.entry_id] entities: list[NestProtectSelect] = [] SUPPORTED_KEYS = { description.key: description for description in SENSOR_DESCRIPTIONS } for device in data.devices.values(): for key in device.value: if description := SUPPORTED_KEYS.get(key): entities.append( NestProtectSelect(device, description, data.areas, data.client) ) async_add_devices(entities) class NestProtectSelect(NestDescriptiveEntity, SelectEntity): """Representation of a Nest Protect Select.""" entity_description: NestProtectSelectDescription @property def current_option(self) -> str: """Return the selected entity option to represent the entity state.""" state = self.bucket.value.get(self.entity_description.key) return BRIGHTNESS_TO_PRESET.get(state) @property def options(self) -> list[str]: """Return a set of selectable options.""" return self.entity_description.options async def async_select_option(self, option: str) -> None: """Change the selected option.""" select = PRESET_TO_BRIGHTNESS.get(option) objects = [ { "object_key": self.bucket.object_key, "op": "MERGE", "value": { self.entity_description.key: select, }, } ] if not self.client.nest_session or self.client.nest_session.is_expired(): if not self.client.auth or self.client.auth.is_expired(): await self.client.get_access_token() await self.client.authenticate(self.client.auth.access_token) result = await self.client.update_objects( self.client.nest_session.access_token, self.client.nest_session.userid, self.client.transport_url, objects, ) LOGGER.debug(result)
0
0
0
d95dd702b9c2c9ec191644d22f8b6dfb6585324e
996
py
Python
1Dec/listComprehension.py
universekavish/Python-Training
ccd7dfbc8802662de0e0fc20fe99bb3aae4c6e18
[ "Apache-2.0" ]
null
null
null
1Dec/listComprehension.py
universekavish/Python-Training
ccd7dfbc8802662de0e0fc20fe99bb3aae4c6e18
[ "Apache-2.0" ]
null
null
null
1Dec/listComprehension.py
universekavish/Python-Training
ccd7dfbc8802662de0e0fc20fe99bb3aae4c6e18
[ "Apache-2.0" ]
null
null
null
L = [10, 11, 12, 13, 14, 15, 16, 17, 18] print(L) L1 = [x for x in L if x % 2 == 0] print(L1) ip = '10.20.30.40' octets = ip.split('.') print(octets) octets = [int(octet) for octet in octets] print(octets) import random primesPositions = [p for p in range(100) if isPrime(random.randint(1, 1000))] print(primesPositions) #list of 10 matrices matrices = [Matrix(5, 5, [[random.randint(1, 100) for i in range(5)] for j in range(5)]) for k in range(10)] #print(matrices) for matrix in matrices : print(matrix) print(matrix.get_order())
21.652174
108
0.629518
L = [10, 11, 12, 13, 14, 15, 16, 17, 18] print(L) L1 = [x for x in L if x % 2 == 0] print(L1) ip = '10.20.30.40' octets = ip.split('.') print(octets) octets = [int(octet) for octet in octets] print(octets) def isPrime(n) : if n < 2 : return False i = 2 while i < n : if n % i == 0 : return False i = i + 1 return True import random primesPositions = [p for p in range(100) if isPrime(random.randint(1, 1000))] print(primesPositions) class Matrix : def __init__(self, r, c, data) : self.rows = r self.cols = c self.data = data def __str__(self) : #return 'Matrix({0}, {1}, {2})'.format(self.rows, self.cols, self.data) return '\n'.join([' '.join([str(x) for x in row]) for row in self.data]) def get_order(self) : return self.rows, self.cols #list of 10 matrices matrices = [Matrix(5, 5, [[random.randint(1, 100) for i in range(5)] for j in range(5)]) for k in range(10)] #print(matrices) for matrix in matrices : print(matrix) print(matrix.get_order())
338
-7
121
2f08724de9f8343f814b4344a568f661f600795c
232
py
Python
model/model_builder.py
chansoopark98/Ear-detection
19d402344ffe5101928af6a5c0149fc147b1d847
[ "MIT" ]
null
null
null
model/model_builder.py
chansoopark98/Ear-detection
19d402344ffe5101928af6a5c0149fc147b1d847
[ "MIT" ]
null
null
null
model/model_builder.py
chansoopark98/Ear-detection
19d402344ffe5101928af6a5c0149fc147b1d847
[ "MIT" ]
null
null
null
from tensorflow import keras from model.model import csnet_extra_model
29
61
0.788793
from tensorflow import keras from model.model import csnet_extra_model def model_build(image_size=224): inputs, output = csnet_extra_model(IMAGE_SIZE=image_size) model = keras.Model(inputs, outputs=output) return model
138
0
23
be878f796efbc4d57181dd35b507d33001bf2c79
1,665
py
Python
tests/test_plugins.py
mikegribov/filedjson
ee9f8408edcf8a72b8ed415237789a602ee6b579
[ "MIT" ]
null
null
null
tests/test_plugins.py
mikegribov/filedjson
ee9f8408edcf8a72b8ed415237789a602ee6b579
[ "MIT" ]
null
null
null
tests/test_plugins.py
mikegribov/filedjson
ee9f8408edcf8a72b8ed415237789a602ee6b579
[ "MIT" ]
null
null
null
import os import traceback from ..xjson import XJson
23.125
80
0.717718
import os import traceback from ..xjson import XJson def check_same_structure(name, ext): json = XJson(os.path.join("examples", "single_file_" + name + ".json")) plugin = XJson(os.path.join("examples", "single_file_" + name + "." + ext)) assert json.structure != {} and json.structure == plugin.structure def check_same_structure_by_stack(): stack = traceback.extract_stack() name = stack[-2][2] args = name.split('_') check_same_structure("_".join(args[2:]), args[1]) def test_xjson_arr(): check_same_structure_by_stack() def test_xjson_arr_arr(): check_same_structure_by_stack() def test_xjson_arr_obj(): check_same_structure_by_stack() def test_xjson_obj(): check_same_structure_by_stack() def test_xjson_obj_arr(): check_same_structure_by_stack() def test_xjson_obj_obj(): check_same_structure_by_stack() def test_yaml_arr(): check_same_structure_by_stack() def test_yaml_arr_arr(): check_same_structure_by_stack() def test_yaml_arr_obj(): check_same_structure_by_stack() def test_yaml_obj(): check_same_structure_by_stack() def test_yaml_obj_arr(): check_same_structure_by_stack() def test_yaml_obj_obj(): check_same_structure_by_stack() def test_xml_arr(): check_same_structure_by_stack() def test_xml_arr_arr(): check_same_structure_by_stack() def test_xml_arr_obj(): check_same_structure_by_stack() def test_xml_obj(): check_same_structure_by_stack() def test_xml_obj_arr(): check_same_structure_by_stack() def test_xml_obj_obj(): check_same_structure_by_stack()
1,103
0
499
cc313e270c4f537f465b4dd917ff496c05ef36f3
3,175
py
Python
awwards/migrations/0001_initial.py
Nkurumwa/API-project
452c3c259f46ab60d62f47d740a2eb15db694768
[ "MIT" ]
null
null
null
awwards/migrations/0001_initial.py
Nkurumwa/API-project
452c3c259f46ab60d62f47d740a2eb15db694768
[ "MIT" ]
3
2020-06-06T00:10:58.000Z
2021-06-10T22:29:21.000Z
awwards/migrations/0001_initial.py
Nkurumwa/API-project
452c3c259f46ab60d62f47d740a2eb15db694768
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.11.26 on 2019-11-25 21:03 from __future__ import unicode_literals from django.conf import settings import django.core.validators from django.db import migrations, models import django.db.models.deletion import django.utils.timezone
46.014493
129
0.607559
# -*- coding: utf-8 -*- # Generated by Django 1.11.26 on 2019-11-25 21:03 from __future__ import unicode_literals from django.conf import settings import django.core.validators from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Profile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('profile_pic', models.ImageField(default='default.jpg', upload_to='profile_pics')), ('bio', models.TextField(max_length=100)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Project', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=30)), ('image', models.ImageField(upload_to='project_images/')), ('design', models.IntegerField(default=0)), ('usability', models.IntegerField(default=0)), ('content', models.IntegerField(default=0)), ('description', models.TextField(max_length=250)), ('link', models.URLField(max_length=60)), ('date_posted', models.DateTimeField(default=django.utils.timezone.now)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['-date_posted'], }, ), migrations.CreateModel( name='Rate', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('design', models.PositiveIntegerField(default=0, validators=[django.core.validators.MaxValueValidator(10)])), ('usability', models.PositiveIntegerField(default=0, validators=[django.core.validators.MaxValueValidator(10)])), ('content', models.PositiveIntegerField(default=0, validators=[django.core.validators.MaxValueValidator(10)])), ('project', models.IntegerField(default=0)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Review', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('review', models.TextField(max_length=250)), ('profile_id', models.IntegerField(default=0)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
0
2,870
23
15622877fb0c7c997c9035ab265bc41c3504cd1d
1,390
py
Python
python_codes/generators_.py
naikshubham/Miscellaneous-ML-and-Python
a7be5cf5ac95085d3becc554593ee45f93f7850f
[ "BSD-2-Clause" ]
1
2021-06-15T09:57:47.000Z
2021-06-15T09:57:47.000Z
python_codes/generators_.py
naikshubham/Miscellaneous-ML-and-Python
a7be5cf5ac95085d3becc554593ee45f93f7850f
[ "BSD-2-Clause" ]
null
null
null
python_codes/generators_.py
naikshubham/Miscellaneous-ML-and-Python
a7be5cf5ac95085d3becc554593ee45f93f7850f
[ "BSD-2-Clause" ]
null
null
null
print(join(capitalize("this will be uppercase text"))) print(join(leetspeak("This isn't a leetspeak"))) print(join(hypenate("will be hypenated by letters"))) print(join(hypenate("will be hypenated by words".split()))) # next() and send() free = psychologist() print('free->', free) print('next->', next(free)) print("send->", free.send("I feel bad")) print("send->", free.send("Why i shouldnt ?")) print("send ->", free.send("ok then i should find what is good for me"))
26.730769
72
0.570504
def capitalize(values): # print(values) for value in values: # print(value) yield value.upper() def hypenate(values): # print(values) for value in values: yield f"-{value}-" def leetspeak(values): for value in values: if value in {'t', 'T'}: yield '7' elif value in {'e', 'E'}: yield '3' else: yield value def join(values): print(values) return "".join(values) print(join(capitalize("this will be uppercase text"))) print(join(leetspeak("This isn't a leetspeak"))) print(join(hypenate("will be hypenated by letters"))) print(join(hypenate("will be hypenated by words".split()))) # next() and send() def psychologist(): print("Please tell me your problems") while True: answer = (yield) print("answer->", answer) if answer is not None: if answer.endswith('?'): print("Dont ask yourself too much questions") elif 'good' in answer: print("That's good, go on") elif 'bad' in answer: print('Dont be so negative') free = psychologist() print('free->', free) print('next->', next(free)) print("send->", free.send("I feel bad")) print("send->", free.send("Why i shouldnt ?")) print("send ->", free.send("ok then i should find what is good for me"))
788
0
114
0c0b703bd0d03b86650c5ce280e2f3295cc83afa
4,897
py
Python
mri/dispatch/MatplotlibDispatch.py
Mri-monitoring/Mri-python-client
7ff4889c198d22b9d78b7f0ba8711efa71aef16b
[ "Apache-2.0" ]
4
2015-07-16T16:41:50.000Z
2015-08-13T09:53:30.000Z
mri/dispatch/MatplotlibDispatch.py
Mri-monitoring/Mri-python-client
7ff4889c198d22b9d78b7f0ba8711efa71aef16b
[ "Apache-2.0" ]
4
2015-07-16T16:09:37.000Z
2015-08-18T19:38:19.000Z
mri/dispatch/MatplotlibDispatch.py
Mri-monitoring/Mri-python-client
7ff4889c198d22b9d78b7f0ba8711efa71aef16b
[ "Apache-2.0" ]
null
null
null
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from builtins import super from future import standard_library standard_library.install_aliases() import logging import os import errno from .BaseDispatch import BaseDispatch try: import matplotlib.pyplot as plt import numpy as np IMPORTED = True except Exception as e: logging.warning('Failed to import numpy or matplotlib. Are you sure they are properly installed?') logging.warning('You can ignore this warning if you do not plan to use Matplotlib') logging.warning(e) IMPORTED = False class MatplotlibDispatch(BaseDispatch): """Display events via Matplotlib backend. This class requires some heavy dependencies, and so trying to run it without Matplotlib and Numpy installed will result in pass-thru behavior Arguments --------- task_params : dict Dictionary of the task json specification, including name and ID number img_folder : string Folder to save output images to """ def train_event(self, event): """Plot a basic training and testing curve via Matplotlib Arguments --------- event : TrainingEvent.TrainingEvent Event to add to Matplotlib plot """ if IMPORTED: super().train_event(event) time = event.attributes[event.time_axis] for item in event.attributes: if item != event.time_axis: val = event.attributes[item] self._data[item].append([time, val]) # Convert to numpy arrays np_data = [] mins = {} maxes = {} for key in self._data: if self._data[key]: data = np.array(self._data[key]) mins[key] = np.min(data, axis=0)[1] maxes[key] = np.max(data, axis=0)[1] np_data.append(data[:, 0]) np_data.append(data[:, 1]) plt.clf() plt.plot(*np_data) self._legend_keys = [] for k in self._data.keys(): text = "{} (".format(k.title()) if k in maxes: text += "Max: {:0.4f} ".format(float(maxes[k])) if k in mins: text += "Min: {:0.4f}".format(float(mins[k])) text += ")" self._legend_keys.append(text) ax = plt.gca() box = ax.get_position() ax.set_position([box.x0, box.y0 + box.height * 0.1, box.width, box.height*0.9]) plt.legend(self._legend_keys, bbox_to_anchor=(0.5, -0.05), loc='upper center', ncol=2, borderaxespad=0.) plt.title(self.task_params['title']) plt.grid(True, which='both') plt.draw() else: logging.error('Improper requirements, skipping train event') def train_finish(self): """Save our output figure to PNG format, as defined by the save path `img_folder`""" if IMPORTED: filename = self.task_params['title'].replace(' ', '_') save_path = os.path.join(self._img_folder, filename) logging.info('Finished training! Saving output image to {0}'.format(save_path)) logging.info('\'{}\' Final Extremes: {}'.format(self.task_params['title'], self._legend_keys)) try: fold = os.path.basename(self._img_folder) logging.info("Creating folder {}".format(fold)) os.makedirs(fold) except OSError as e: if e.errno != errno.EEXIST: raise plt.savefig(save_path, bbox_inches='tight', format='png') plt.close() else: logging.error('Improper requirements, skipping train finish')
36.544776
109
0.559526
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from builtins import super from future import standard_library standard_library.install_aliases() import logging import os import errno from .BaseDispatch import BaseDispatch try: import matplotlib.pyplot as plt import numpy as np IMPORTED = True except Exception as e: logging.warning('Failed to import numpy or matplotlib. Are you sure they are properly installed?') logging.warning('You can ignore this warning if you do not plan to use Matplotlib') logging.warning(e) IMPORTED = False class MatplotlibDispatch(BaseDispatch): """Display events via Matplotlib backend. This class requires some heavy dependencies, and so trying to run it without Matplotlib and Numpy installed will result in pass-thru behavior Arguments --------- task_params : dict Dictionary of the task json specification, including name and ID number img_folder : string Folder to save output images to """ def __init__(self, task_params, img_folder): super().__init__() # Data will be a dictionary of lists self._data = {} self.task_params = task_params self._img_folder = img_folder self._legend_keys = [] def setup_display(self, time_axis, attributes, show_windows=False): if IMPORTED: super().setup_display(time_axis, attributes) # Setup data for item in self._attributes: if item != self._time_axis: self._data[item] = [] # Setup plotting plt.figure(figsize=(12, 10)) if show_windows: plt.ion() plt.show() else: logging.error('You need Matplotlib and Numpy to run the MatplotlibDispatch, please install them') def train_event(self, event): """Plot a basic training and testing curve via Matplotlib Arguments --------- event : TrainingEvent.TrainingEvent Event to add to Matplotlib plot """ if IMPORTED: super().train_event(event) time = event.attributes[event.time_axis] for item in event.attributes: if item != event.time_axis: val = event.attributes[item] self._data[item].append([time, val]) # Convert to numpy arrays np_data = [] mins = {} maxes = {} for key in self._data: if self._data[key]: data = np.array(self._data[key]) mins[key] = np.min(data, axis=0)[1] maxes[key] = np.max(data, axis=0)[1] np_data.append(data[:, 0]) np_data.append(data[:, 1]) plt.clf() plt.plot(*np_data) self._legend_keys = [] for k in self._data.keys(): text = "{} (".format(k.title()) if k in maxes: text += "Max: {:0.4f} ".format(float(maxes[k])) if k in mins: text += "Min: {:0.4f}".format(float(mins[k])) text += ")" self._legend_keys.append(text) ax = plt.gca() box = ax.get_position() ax.set_position([box.x0, box.y0 + box.height * 0.1, box.width, box.height*0.9]) plt.legend(self._legend_keys, bbox_to_anchor=(0.5, -0.05), loc='upper center', ncol=2, borderaxespad=0.) plt.title(self.task_params['title']) plt.grid(True, which='both') plt.draw() else: logging.error('Improper requirements, skipping train event') def train_finish(self): """Save our output figure to PNG format, as defined by the save path `img_folder`""" if IMPORTED: filename = self.task_params['title'].replace(' ', '_') save_path = os.path.join(self._img_folder, filename) logging.info('Finished training! Saving output image to {0}'.format(save_path)) logging.info('\'{}\' Final Extremes: {}'.format(self.task_params['title'], self._legend_keys)) try: fold = os.path.basename(self._img_folder) logging.info("Creating folder {}".format(fold)) os.makedirs(fold) except OSError as e: if e.errno != errno.EEXIST: raise plt.savefig(save_path, bbox_inches='tight', format='png') plt.close() else: logging.error('Improper requirements, skipping train finish')
780
0
53
c8beb2fef2a90dce4e6cacc257c60abd209a38cc
2,186
py
Python
prototype/speed/rpcsize.py
ooici/pyon
122c629290d27f32f2f41dafd5c12469295e8acf
[ "BSD-2-Clause" ]
2
2015-06-09T16:07:09.000Z
2015-07-28T10:06:31.000Z
prototype/speed/rpcsize.py
ooici/pyon
122c629290d27f32f2f41dafd5c12469295e8acf
[ "BSD-2-Clause" ]
3
2020-07-22T15:14:55.000Z
2021-12-13T19:35:06.000Z
prototype/speed/rpcsize.py
ooici/pyon
122c629290d27f32f2f41dafd5c12469295e8acf
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/env python from pyon.net.endpoint import RPCClient #from interface.services.idatastore_service import IDatastoreService from interface.services.ihello_service import IHelloService from pyon.net.messaging import make_node import gevent import time import base64 import os import argparse import msgpack parser = argparse.ArgumentParser() parser.add_argument('-d', '--datasize', type=int, help='Maximum size of data in bytes') parser.add_argument('-m', '--msgpack', action='store_true', help='Encode data with msgpack') parser.set_defaults(datasize=1024*1024, parallel=1) opts = parser.parse_args() node,iowat=make_node() #dsclient = RPCClient(node=node, name="datastore", iface=IDatastoreService) hsclient = RPCClient(node=node, name="hello", iface=IHelloService) node.client.add_backpressure_callback(notif) node.client.set_backpressure_multiplier(2) # make data (bytes) DATA_SIZE = opts.datasize # base64 encoding wastes a lot of space, truncate it at the exact data size we requested data = base64.urlsafe_b64encode(os.urandom(DATA_SIZE))[:DATA_SIZE] if opts.msgpack: data = msgpack.dumps(data) counter = 0 st = 0 #_gt = gevent.spawn(tick) results = {} for size in [1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144]: _gl = gevent.spawn(work, size) try: rs = _gl.get(timeout=10) except gevent.Timeout: print "10s elapsed, cutting it" rs = time.time() - st results[size] = { "elapsed": rs, "count": counter, "ps":counter/rs } print "Size:", size, str(results[size]) import pprint pprint.pprint(results)
25.717647
92
0.68893
#!/usr/bin/env python from pyon.net.endpoint import RPCClient #from interface.services.idatastore_service import IDatastoreService from interface.services.ihello_service import IHelloService from pyon.net.messaging import make_node import gevent import time import base64 import os import argparse import msgpack parser = argparse.ArgumentParser() parser.add_argument('-d', '--datasize', type=int, help='Maximum size of data in bytes') parser.add_argument('-m', '--msgpack', action='store_true', help='Encode data with msgpack') parser.set_defaults(datasize=1024*1024, parallel=1) opts = parser.parse_args() node,iowat=make_node() #dsclient = RPCClient(node=node, name="datastore", iface=IDatastoreService) hsclient = RPCClient(node=node, name="hello", iface=IHelloService) def notif(*args, **kwargs): print "GOT A BACKPRESSURE NOTICE", str(args), str(kwargs) node.client.add_backpressure_callback(notif) node.client.set_backpressure_multiplier(2) # make data (bytes) DATA_SIZE = opts.datasize # base64 encoding wastes a lot of space, truncate it at the exact data size we requested data = base64.urlsafe_b64encode(os.urandom(DATA_SIZE))[:DATA_SIZE] if opts.msgpack: data = msgpack.dumps(data) counter = 0 st = 0 def tick(): global counter, st while True: time.sleep(2) ct = time.time() elapsed_s = ct - st sc = sum(counter) mps = sc / elapsed_s print counter, sc, "requests, per sec:", mps def work(ds): curdata = data[:ds] global counter global st counter = 0 st = time.time() while counter < 1000: hsclient.noop(curdata) #hsclient.hello(str(counter[wid])) counter += 1 et = time.time() return et - st #_gt = gevent.spawn(tick) results = {} for size in [1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144]: _gl = gevent.spawn(work, size) try: rs = _gl.get(timeout=10) except gevent.Timeout: print "10s elapsed, cutting it" rs = time.time() - st results[size] = { "elapsed": rs, "count": counter, "ps":counter/rs } print "Size:", size, str(results[size]) import pprint pprint.pprint(results)
531
0
69
7001a2aafe77e80e908cb64a736dd79539290346
7,957
py
Python
packages/connections/gym/connection.py
mattmcd/aea-examples
1c72faadf8feb9f4b8dda9f17995010c2fb3510e
[ "Apache-2.0" ]
null
null
null
packages/connections/gym/connection.py
mattmcd/aea-examples
1c72faadf8feb9f4b8dda9f17995010c2fb3510e
[ "Apache-2.0" ]
null
null
null
packages/connections/gym/connection.py
mattmcd/aea-examples
1c72faadf8feb9f4b8dda9f17995010c2fb3510e
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # # Copyright 2018-2019 Fetch.AI Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ------------------------------------------------------------------------------ """Gym connector and gym channel.""" import logging import queue import threading from queue import Queue from threading import Thread from typing import Dict, Optional, cast import gym from aea.configurations.base import ConnectionConfig from aea.connections.base import Channel, Connection from aea.helpers.base import locate from aea.mail.base import Envelope from gym_protocol.message import GymMessage from gym_protocol.serialization import GymSerializer logger = logging.getLogger(__name__) """default 'to' field for Gym envelopes.""" DEFAULT_GYM = "gym" class GymChannel(Channel): """A wrapper of the gym environment.""" def __init__(self, public_key: str, gym_env: gym.Env): """Initialize a gym channel.""" self.public_key = public_key self.gym_env = gym_env self._lock = threading.Lock() self._queues = {} # type: Dict[str, Queue] def connect(self) -> Optional[Queue]: """ Connect a public key to the gym. :return: an asynchronous queue, that constitutes the communication channel. """ if self.public_key in self._queues: return None assert len(self._queues.keys()) == 0, "Only one public key can register to a gym." q = Queue() # type: Queue self._queues[self.public_key] = q return q def send(self, envelope: Envelope) -> None: """ Process the envelopes to the gym. :return: None """ sender = envelope.sender logger.debug("Processing message from {}: {}".format(sender, envelope)) self._decode_envelope(envelope) def _decode_envelope(self, envelope: Envelope) -> None: """ Decode the envelope. :param envelope: the envelope :return: None """ if envelope.protocol_id == "gym": self.handle_gym_message(envelope) else: raise ValueError('This protocol is not valid for gym.') def handle_gym_message(self, envelope: Envelope) -> None: """ Forward a message to gym. :param envelope: the envelope :return: None """ gym_message = GymSerializer().decode(envelope.message) performative = gym_message.get("performative") if GymMessage.Performative(performative) == GymMessage.Performative.ACT: action = gym_message.get("action") step_id = gym_message.get("step_id") observation, reward, done, info = self.gym_env.step(action) # type: ignore msg = GymMessage(performative=GymMessage.Performative.PERCEPT, observation=observation, reward=reward, done=done, info=info, step_id=step_id) msg_bytes = GymSerializer().encode(msg) envelope = Envelope(to=envelope.sender, sender=DEFAULT_GYM, protocol_id=GymMessage.protocol_id, message=msg_bytes) self._send(envelope) elif GymMessage.Performative(performative) == GymMessage.Performative.RESET: self.gym_env.reset() # type: ignore elif GymMessage.Performative(performative) == GymMessage.Performative.CLOSE: self.gym_env.close() # type: ignore def _send(self, envelope: Envelope) -> None: """Send a message. :param envelope: the envelope :return: None """ destination = envelope.to self._queues[destination].put_nowait(envelope) def disconnect(self) -> None: """ Disconnect. :return: None """ with self._lock: self._queues.pop(self.public_key, None) class GymConnection(Connection): """Proxy to the functionality of the gym.""" def __init__(self, public_key: str, gym_env: gym.Env): """ Initialize a connection to a local gym environment. :param public_key: the public key used in the protocols. :param gym: the gym environment. """ super().__init__() self.public_key = public_key self.channel = GymChannel(public_key, gym_env) self._connection = None # type: Optional[Queue] self._stopped = True self.in_thread = None # type: Optional[Thread] self.out_thread = None # type: Optional[Thread] def _fetch(self) -> None: """ Fetch the envelopes from the outqueue and send them. :return: None """ while not self._stopped: try: envelope = self.out_queue.get(block=True, timeout=2.0) self.send(envelope) except queue.Empty: pass def _receive_loop(self) -> None: """ Receive messages. :return: None """ assert self._connection is not None, "Call connect before calling _receive_loop." while not self._stopped: try: data = self._connection.get(timeout=2.0) self.in_queue.put_nowait(data) except queue.Empty: pass @property def is_established(self) -> bool: """Return True if the connection has been established, False otherwise.""" return self._connection is not None def connect(self) -> None: """ Connect to the gym. :return: None """ if self._stopped: self._stopped = False self._connection = self.channel.connect() self.in_thread = Thread(target=self._receive_loop) self.out_thread = Thread(target=self._fetch) self.in_thread.start() self.out_thread.start() def disconnect(self) -> None: """ Disconnect from the gym. :return: None """ assert self.in_thread is not None, "Call connect before disconnect." assert self.out_thread is not None, "Call connect before disconnect." if not self._stopped: self._stopped = True self.in_thread.join() self.out_thread.join() self.in_thread = None self.out_thread = None self.channel.disconnect() self.stop() def send(self, envelope: Envelope) -> None: """ Send an envelope. :param envelope: the envelop :return: None """ if not self.is_established: raise ConnectionError("Connection not established yet. Please use 'connect()'.") self.channel.send(envelope) def stop(self) -> None: """ Tear down the connection. :return: None """ self._connection = None @classmethod def from_config(cls, public_key: str, connection_configuration: ConnectionConfig) -> 'Connection': """ Get the Gym connection from the connection configuration. :param public_key: the public key of the agent. :param connection_configuration: the connection configuration object. :return: the connection object """ gym_env_package = cast(str, connection_configuration.config.get('env')) gym_env = locate(gym_env_package) return GymConnection(public_key, gym_env())
32.214575
153
0.607892
# -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # # Copyright 2018-2019 Fetch.AI Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ------------------------------------------------------------------------------ """Gym connector and gym channel.""" import logging import queue import threading from queue import Queue from threading import Thread from typing import Dict, Optional, cast import gym from aea.configurations.base import ConnectionConfig from aea.connections.base import Channel, Connection from aea.helpers.base import locate from aea.mail.base import Envelope from gym_protocol.message import GymMessage from gym_protocol.serialization import GymSerializer logger = logging.getLogger(__name__) """default 'to' field for Gym envelopes.""" DEFAULT_GYM = "gym" class GymChannel(Channel): """A wrapper of the gym environment.""" def __init__(self, public_key: str, gym_env: gym.Env): """Initialize a gym channel.""" self.public_key = public_key self.gym_env = gym_env self._lock = threading.Lock() self._queues = {} # type: Dict[str, Queue] def connect(self) -> Optional[Queue]: """ Connect a public key to the gym. :return: an asynchronous queue, that constitutes the communication channel. """ if self.public_key in self._queues: return None assert len(self._queues.keys()) == 0, "Only one public key can register to a gym." q = Queue() # type: Queue self._queues[self.public_key] = q return q def send(self, envelope: Envelope) -> None: """ Process the envelopes to the gym. :return: None """ sender = envelope.sender logger.debug("Processing message from {}: {}".format(sender, envelope)) self._decode_envelope(envelope) def _decode_envelope(self, envelope: Envelope) -> None: """ Decode the envelope. :param envelope: the envelope :return: None """ if envelope.protocol_id == "gym": self.handle_gym_message(envelope) else: raise ValueError('This protocol is not valid for gym.') def handle_gym_message(self, envelope: Envelope) -> None: """ Forward a message to gym. :param envelope: the envelope :return: None """ gym_message = GymSerializer().decode(envelope.message) performative = gym_message.get("performative") if GymMessage.Performative(performative) == GymMessage.Performative.ACT: action = gym_message.get("action") step_id = gym_message.get("step_id") observation, reward, done, info = self.gym_env.step(action) # type: ignore msg = GymMessage(performative=GymMessage.Performative.PERCEPT, observation=observation, reward=reward, done=done, info=info, step_id=step_id) msg_bytes = GymSerializer().encode(msg) envelope = Envelope(to=envelope.sender, sender=DEFAULT_GYM, protocol_id=GymMessage.protocol_id, message=msg_bytes) self._send(envelope) elif GymMessage.Performative(performative) == GymMessage.Performative.RESET: self.gym_env.reset() # type: ignore elif GymMessage.Performative(performative) == GymMessage.Performative.CLOSE: self.gym_env.close() # type: ignore def _send(self, envelope: Envelope) -> None: """Send a message. :param envelope: the envelope :return: None """ destination = envelope.to self._queues[destination].put_nowait(envelope) def disconnect(self) -> None: """ Disconnect. :return: None """ with self._lock: self._queues.pop(self.public_key, None) class GymConnection(Connection): """Proxy to the functionality of the gym.""" def __init__(self, public_key: str, gym_env: gym.Env): """ Initialize a connection to a local gym environment. :param public_key: the public key used in the protocols. :param gym: the gym environment. """ super().__init__() self.public_key = public_key self.channel = GymChannel(public_key, gym_env) self._connection = None # type: Optional[Queue] self._stopped = True self.in_thread = None # type: Optional[Thread] self.out_thread = None # type: Optional[Thread] def _fetch(self) -> None: """ Fetch the envelopes from the outqueue and send them. :return: None """ while not self._stopped: try: envelope = self.out_queue.get(block=True, timeout=2.0) self.send(envelope) except queue.Empty: pass def _receive_loop(self) -> None: """ Receive messages. :return: None """ assert self._connection is not None, "Call connect before calling _receive_loop." while not self._stopped: try: data = self._connection.get(timeout=2.0) self.in_queue.put_nowait(data) except queue.Empty: pass @property def is_established(self) -> bool: """Return True if the connection has been established, False otherwise.""" return self._connection is not None def connect(self) -> None: """ Connect to the gym. :return: None """ if self._stopped: self._stopped = False self._connection = self.channel.connect() self.in_thread = Thread(target=self._receive_loop) self.out_thread = Thread(target=self._fetch) self.in_thread.start() self.out_thread.start() def disconnect(self) -> None: """ Disconnect from the gym. :return: None """ assert self.in_thread is not None, "Call connect before disconnect." assert self.out_thread is not None, "Call connect before disconnect." if not self._stopped: self._stopped = True self.in_thread.join() self.out_thread.join() self.in_thread = None self.out_thread = None self.channel.disconnect() self.stop() def send(self, envelope: Envelope) -> None: """ Send an envelope. :param envelope: the envelop :return: None """ if not self.is_established: raise ConnectionError("Connection not established yet. Please use 'connect()'.") self.channel.send(envelope) def stop(self) -> None: """ Tear down the connection. :return: None """ self._connection = None @classmethod def from_config(cls, public_key: str, connection_configuration: ConnectionConfig) -> 'Connection': """ Get the Gym connection from the connection configuration. :param public_key: the public key of the agent. :param connection_configuration: the connection configuration object. :return: the connection object """ gym_env_package = cast(str, connection_configuration.config.get('env')) gym_env = locate(gym_env_package) return GymConnection(public_key, gym_env())
0
0
0
60b4412d890e1f04c1d7d9dcd011193caca13c37
3,404
py
Python
scripts/find_clusters.py
camillemarie/ncov
6084ab1f42a36cfba6f8198c4f38047737072d06
[ "MIT" ]
1
2022-02-06T20:23:03.000Z
2022-02-06T20:23:03.000Z
scripts/find_clusters.py
camillemarie/ncov
6084ab1f42a36cfba6f8198c4f38047737072d06
[ "MIT" ]
1
2022-03-01T00:02:11.000Z
2022-03-01T00:08:15.000Z
scripts/find_clusters.py
camillemarie/ncov
6084ab1f42a36cfba6f8198c4f38047737072d06
[ "MIT" ]
1
2022-01-11T17:46:29.000Z
2022-01-11T17:46:29.000Z
#!/usr/bin/env python3 import argparse from augur.utils import read_tree, read_node_data, read_metadata from collections import Counter import csv import hashlib MAX_HASH_LENGTH = 7 if __name__ == "__main__": parser = argparse.ArgumentParser( description="Find polytomies in a given tree that all belong to the same metadata group", formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument("--tree", required=True, help="Newick tree") parser.add_argument("--metadata", required=True, help="metadata") parser.add_argument("--mutations", required=True, help="mutations node data JSON") parser.add_argument("--attribute-name", default="cluster_id", help="name of attribute to store in output JSON") parser.add_argument("--group-by", default="division", help="identify polytomies where all tips are in the same group") parser.add_argument("--min-tips", type=int, default=3, help="minimum tips per polytomy to be consider as a cluster") parser.add_argument("--output", required=True, help="tab-delimited file with strain, cluster id, and group value for each strain") args = parser.parse_args() tree = read_tree(args.tree) tree.collapse_all(lambda c: c.branch_length < 1e-5) metadata, columns = read_metadata(args.metadata) muts = read_node_data(args.mutations) attribute_name = args.attribute_name group_by = args.group_by polytomies = [] for node in tree.find_clades(terminal=False): if node == tree.root: continue count_by_group = Counter() polytomy_sequence_id = None for child in node.clades: if child.is_terminal() and child.name: child_muts_data = muts["nodes"].get(child.name, {}) any_muts = (len(child_muts_data.get("muts", [])) > 0) if not any_muts: count_by_group[metadata[child.name][group_by]] += 1 if polytomy_sequence_id is None and "sequence" in child_muts_data: polytomy_sequence_id = hashlib.sha256(child_muts_data["sequence"].encode()).hexdigest()[:MAX_HASH_LENGTH] if any(count >= args.min_tips for count in count_by_group.values()): polytomies.append({"node": node, "name": polytomy_sequence_id}) with open(args.output, "w") as oh: writer = csv.DictWriter( oh, fieldnames=( "strain", args.attribute_name, group_by ), delimiter="\t", lineterminator="\n" ) writer.writeheader() clusters = 0 for polytomy_data in polytomies: polytomy = polytomy_data["node"] polytomy_sequence_id = polytomy_data["name"] if polytomy.name: writer.writerow({ "strain": polytomy.name, args.attribute_name: polytomy_sequence_id, group_by: metadata[polytomy.name][group_by] }) for child in polytomy.clades: if child.is_terminal(): writer.writerow({ "strain": child.name, args.attribute_name: polytomy_sequence_id, group_by: metadata[child.name][group_by] }) clusters += 1
39.126437
134
0.61134
#!/usr/bin/env python3 import argparse from augur.utils import read_tree, read_node_data, read_metadata from collections import Counter import csv import hashlib MAX_HASH_LENGTH = 7 if __name__ == "__main__": parser = argparse.ArgumentParser( description="Find polytomies in a given tree that all belong to the same metadata group", formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument("--tree", required=True, help="Newick tree") parser.add_argument("--metadata", required=True, help="metadata") parser.add_argument("--mutations", required=True, help="mutations node data JSON") parser.add_argument("--attribute-name", default="cluster_id", help="name of attribute to store in output JSON") parser.add_argument("--group-by", default="division", help="identify polytomies where all tips are in the same group") parser.add_argument("--min-tips", type=int, default=3, help="minimum tips per polytomy to be consider as a cluster") parser.add_argument("--output", required=True, help="tab-delimited file with strain, cluster id, and group value for each strain") args = parser.parse_args() tree = read_tree(args.tree) tree.collapse_all(lambda c: c.branch_length < 1e-5) metadata, columns = read_metadata(args.metadata) muts = read_node_data(args.mutations) attribute_name = args.attribute_name group_by = args.group_by polytomies = [] for node in tree.find_clades(terminal=False): if node == tree.root: continue count_by_group = Counter() polytomy_sequence_id = None for child in node.clades: if child.is_terminal() and child.name: child_muts_data = muts["nodes"].get(child.name, {}) any_muts = (len(child_muts_data.get("muts", [])) > 0) if not any_muts: count_by_group[metadata[child.name][group_by]] += 1 if polytomy_sequence_id is None and "sequence" in child_muts_data: polytomy_sequence_id = hashlib.sha256(child_muts_data["sequence"].encode()).hexdigest()[:MAX_HASH_LENGTH] if any(count >= args.min_tips for count in count_by_group.values()): polytomies.append({"node": node, "name": polytomy_sequence_id}) with open(args.output, "w") as oh: writer = csv.DictWriter( oh, fieldnames=( "strain", args.attribute_name, group_by ), delimiter="\t", lineterminator="\n" ) writer.writeheader() clusters = 0 for polytomy_data in polytomies: polytomy = polytomy_data["node"] polytomy_sequence_id = polytomy_data["name"] if polytomy.name: writer.writerow({ "strain": polytomy.name, args.attribute_name: polytomy_sequence_id, group_by: metadata[polytomy.name][group_by] }) for child in polytomy.clades: if child.is_terminal(): writer.writerow({ "strain": child.name, args.attribute_name: polytomy_sequence_id, group_by: metadata[child.name][group_by] }) clusters += 1
0
0
0
fe7269f61e6da28d4bef240e6beb92eb1df6c19b
1,486
py
Python
newstream/settings/docker.py
diffractive/newstream
cf1a1f230e18d01c63b50ab9d360aa44ac5a486f
[ "MIT" ]
1
2020-05-03T12:33:42.000Z
2020-05-03T12:33:42.000Z
newstream/settings/docker.py
diffractive/newstream
cf1a1f230e18d01c63b50ab9d360aa44ac5a486f
[ "MIT" ]
14
2020-07-06T20:05:57.000Z
2022-03-12T00:39:11.000Z
newstream/settings/docker.py
diffractive/newstream
cf1a1f230e18d01c63b50ab9d360aa44ac5a486f
[ "MIT" ]
null
null
null
from .base import * ALLOWED_HOSTS = [ "*" ] SECRET_KEY = os.environ.get("SECERT_KEY", "dummykey") DATABASE_NAME = os.environ.get("DATABASE_NAME", "newstream") DATABASE_USER = os.environ.get("DATABASE_USER", "newstream") DATABASE_HOST = os.environ.get("DATABASE_HOST", "") DATABASE_PORT = os.environ.get("DATABASE_PORT", "") DATABASE_PASSWORD = os.environ.get("DATABASE_PASSWORD", "") DEBUG = int(os.environ.get("NEWSTREAM_DEBUG", "0")) EMAIL_BACKEND = 'django_smtp_ssl.SSLEmailBackend' EMAIL_HOST = 'mailhog' EMAIL_PORT = '1025' DEFAULT_FROM_EMAIL = 'newstream@diffractive.io' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': DATABASE_NAME, 'USER': DATABASE_USER, 'HOST': DATABASE_HOST, 'PORT': DATABASE_PORT, 'PASSWORD': DATABASE_PASSWORD, } } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'default': { 'format': '[DJANGO] %(levelname)s %(asctime)s %(module)s ' '%(name)s.%(funcName)s:%(lineno)s: %(message)s' }, }, 'handlers': { 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'default', } }, 'loggers': { '*': { 'handlers': ['console'], 'level': 'INFO', 'propagate': True, } }, } try: from .local import * except ImportError: pass
23.587302
70
0.571332
from .base import * ALLOWED_HOSTS = [ "*" ] SECRET_KEY = os.environ.get("SECERT_KEY", "dummykey") DATABASE_NAME = os.environ.get("DATABASE_NAME", "newstream") DATABASE_USER = os.environ.get("DATABASE_USER", "newstream") DATABASE_HOST = os.environ.get("DATABASE_HOST", "") DATABASE_PORT = os.environ.get("DATABASE_PORT", "") DATABASE_PASSWORD = os.environ.get("DATABASE_PASSWORD", "") DEBUG = int(os.environ.get("NEWSTREAM_DEBUG", "0")) EMAIL_BACKEND = 'django_smtp_ssl.SSLEmailBackend' EMAIL_HOST = 'mailhog' EMAIL_PORT = '1025' DEFAULT_FROM_EMAIL = 'newstream@diffractive.io' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': DATABASE_NAME, 'USER': DATABASE_USER, 'HOST': DATABASE_HOST, 'PORT': DATABASE_PORT, 'PASSWORD': DATABASE_PASSWORD, } } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'default': { 'format': '[DJANGO] %(levelname)s %(asctime)s %(module)s ' '%(name)s.%(funcName)s:%(lineno)s: %(message)s' }, }, 'handlers': { 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'default', } }, 'loggers': { '*': { 'handlers': ['console'], 'level': 'INFO', 'propagate': True, } }, } try: from .local import * except ImportError: pass
0
0
0
4863ce321508a73286127e64fdef95c188d41618
564
py
Python
app/settings/views.py
dev-johnlopez/assignably-old
99f550e3e970a979234a724097ed8c940f1562c1
[ "MIT" ]
null
null
null
app/settings/views.py
dev-johnlopez/assignably-old
99f550e3e970a979234a724097ed8c940f1562c1
[ "MIT" ]
null
null
null
app/settings/views.py
dev-johnlopez/assignably-old
99f550e3e970a979234a724097ed8c940f1562c1
[ "MIT" ]
null
null
null
from flask import Blueprint, g, render_template, redirect, url_for, g, flash from flask_security import current_user, login_required, current_user from app import db bp = Blueprint('settings', __name__) #@bp.before_app_request #def before_request(): # g.search_form = SearchForm() @bp.route('/index', methods=['GET', 'POST']) @bp.route('/', methods=['GET', 'POST']) @login_required
29.684211
76
0.696809
from flask import Blueprint, g, render_template, redirect, url_for, g, flash from flask_security import current_user, login_required, current_user from app import db bp = Blueprint('settings', __name__) #@bp.before_app_request #def before_request(): # g.search_form = SearchForm() @bp.route('/index', methods=['GET', 'POST']) @bp.route('/', methods=['GET', 'POST']) @login_required def index(): deals = current_user.get_deals() return render_template('settings/notifications.html', title='Notifications', deals=deals)
154
0
22
2d48a057d0d9e5d36b293be7298aca0f68373d78
3,840
py
Python
flask_signon/core/jwt_signon.py
juanmanuel96/flask-signon
c08ce983fdde908aa46936f36a7d43bceaa78ae2
[ "BSD-2-Clause" ]
null
null
null
flask_signon/core/jwt_signon.py
juanmanuel96/flask-signon
c08ce983fdde908aa46936f36a7d43bceaa78ae2
[ "BSD-2-Clause" ]
null
null
null
flask_signon/core/jwt_signon.py
juanmanuel96/flask-signon
c08ce983fdde908aa46936f36a7d43bceaa78ae2
[ "BSD-2-Clause" ]
null
null
null
import datetime from flask import Flask from flask_jwt_extended import JWTManager from flask_signon.utils import import_string
49.87013
87
0.698177
import datetime from flask import Flask from flask_jwt_extended import JWTManager from flask_signon.utils import import_string class JWTAuth(JWTManager): def init_app(self, app): """ Register this extension with the flask app. :param app: The Flask Application object """ # Save this so we can use it later in the extension if not hasattr(app, "extensions"): # pragma: no cover app.extensions = {} app.extensions["flask-jwt-extended"] = self app.jwt_auth = self # Set all the default configurations for this extension self._set_default_configuration_options(app) self._set_error_handler_callbacks(app) self.anonymous_user = import_string(app.config.get('ANONYMOUS_USER')) @staticmethod def _set_default_configuration_options(app: Flask): # Flask-JWT-Extended configurations app.config.setdefault( "JWT_ACCESS_TOKEN_EXPIRES", datetime.timedelta(minutes=15) ) app.config.setdefault("JWT_ACCESS_COOKIE_NAME", "access_token_cookie") app.config.setdefault("JWT_ACCESS_COOKIE_PATH", "/") app.config.setdefault("JWT_ACCESS_CSRF_COOKIE_NAME", "csrf_access_token") app.config.setdefault("JWT_ACCESS_CSRF_COOKIE_PATH", "/") app.config.setdefault("JWT_ACCESS_CSRF_FIELD_NAME", "csrf_token") app.config.setdefault("JWT_ACCESS_CSRF_HEADER_NAME", "X-CSRF-TOKEN") app.config.setdefault("JWT_ALGORITHM", "HS256") app.config.setdefault("JWT_COOKIE_CSRF_PROTECT", True) app.config.setdefault("JWT_COOKIE_DOMAIN", None) app.config.setdefault("JWT_COOKIE_SAMESITE", None) app.config.setdefault("JWT_COOKIE_SECURE", False) app.config.setdefault("JWT_CSRF_CHECK_FORM", False) app.config.setdefault("JWT_CSRF_IN_COOKIES", True) app.config.setdefault("JWT_CSRF_METHODS", ["POST", "PUT", "PATCH", "DELETE"]) app.config.setdefault("JWT_DECODE_ALGORITHMS", None) app.config.setdefault("JWT_DECODE_AUDIENCE", None) app.config.setdefault("JWT_DECODE_ISSUER", None) app.config.setdefault("JWT_DECODE_LEEWAY", 0) app.config.setdefault("JWT_ENCODE_AUDIENCE", None) app.config.setdefault("JWT_ENCODE_ISSUER", None) app.config.setdefault("JWT_ERROR_MESSAGE_KEY", "msg") app.config.setdefault("JWT_HEADER_NAME", "Authorization") app.config.setdefault("JWT_HEADER_TYPE", "Bearer") app.config.setdefault("JWT_IDENTITY_CLAIM", "sub") app.config.setdefault("JWT_JSON_KEY", "access_token") app.config.setdefault("JWT_PRIVATE_KEY", None) app.config.setdefault("JWT_PUBLIC_KEY", None) app.config.setdefault("JWT_QUERY_STRING_NAME", "jwt") app.config.setdefault("JWT_QUERY_STRING_VALUE_PREFIX", "") app.config.setdefault("JWT_REFRESH_COOKIE_NAME", "refresh_token_cookie") app.config.setdefault("JWT_REFRESH_COOKIE_PATH", "/") app.config.setdefault("JWT_REFRESH_CSRF_COOKIE_NAME", "csrf_refresh_token") app.config.setdefault("JWT_REFRESH_CSRF_COOKIE_PATH", "/") app.config.setdefault("JWT_REFRESH_CSRF_FIELD_NAME", "csrf_token") app.config.setdefault("JWT_REFRESH_CSRF_HEADER_NAME", "X-CSRF-TOKEN") app.config.setdefault("JWT_REFRESH_JSON_KEY", "refresh_token") app.config.setdefault("JWT_REFRESH_TOKEN_EXPIRES", datetime.timedelta(days=30)) app.config.setdefault("JWT_SECRET_KEY", None) app.config.setdefault("JWT_SESSION_COOKIE", True) app.config.setdefault("JWT_TOKEN_LOCATION", ("headers",)) app.config.setdefault("JWT_ENCODE_NBF", True) # Flask-SignOn configurations app.config.setdefault('ANONYMOUS_USER', 'flask_signon.core.user.AnonymousUser')
2,989
699
23
51e6803e70accccebe00711e6bcbf5d3e9f1ae94
2,272
py
Python
xmlreader/XmlReader.py
ZrifBSofiane/XmlBuilder
e0caaa6ebff9b16604308373a361133d49b28060
[ "MIT" ]
null
null
null
xmlreader/XmlReader.py
ZrifBSofiane/XmlBuilder
e0caaa6ebff9b16604308373a361133d49b28060
[ "MIT" ]
2
2021-01-13T11:36:18.000Z
2021-01-13T11:36:40.000Z
xmlreader/XmlReader.py
ZrifBSofiane/XmlReader
e0caaa6ebff9b16604308373a361133d49b28060
[ "MIT" ]
null
null
null
from xml.etree import ElementTree from datetime import datetime from typing import Any from os import path import re
33.411765
89
0.59419
from xml.etree import ElementTree from datetime import datetime from typing import Any from os import path import re class XmlReader: def __init__(self): self.types = {} self.datetime_format = None def register(self, ctor: Any): temp = ctor() self.types[type(temp).__name__] = (ctor, temp.__dict__.keys()) def set_datetime_format(self, format: str): if format is not None and len(format) > 0: self.datetime_format = format def read(self, file): if not path.exists(file): return Exception("File not found %s" % file) tree = ElementTree.parse(file) root = tree.getroot() return self.parse_xml(root, None) def parse_xml(self, element, current_obj): if not element.getchildren(): tag = re.sub('{.*}', '', element.tag) key = type(current_obj).__name__ self.__affect_value(key, tag, current_obj, element.text) else: if element.tag in self.types: parent = self.types[element.tag][0]() else: return current_obj for elem in element.getchildren(): current_obj = self.parse_xml(elem, parent) key = type(parent).__name__ tag = type(current_obj).__name__ self.__affect_value(key, tag, parent, current_obj) return current_obj def __affect_value(self, key, tag, current_object, value): if key in self.types: if tag in self.types[key][1]: if isinstance(current_object.__dict__[tag], list): current_object.__dict__[tag].append(value) else: converted_value = self.__convert(current_object.__dict__[tag], value) current_object.__dict__[tag] = converted_value def __convert(self, current, new): if current is None or isinstance(current, str): return new if isinstance(current, int): return int(new) if isinstance(current, float): return float(new) if isinstance(current, datetime) and self.datetime_format is not None: return datetime.strptime(new, self.datetime_format)
1,944
-5
211
cc23049acceaadcf9842b0dcc4a63b1ab589941e
1,269
py
Python
day2.py
Zinko17/FP
f3a8f542a710c3acafbc6d6ac6acd275dd34e653
[ "MIT" ]
null
null
null
day2.py
Zinko17/FP
f3a8f542a710c3acafbc6d6ac6acd275dd34e653
[ "MIT" ]
null
null
null
day2.py
Zinko17/FP
f3a8f542a710c3acafbc6d6ac6acd275dd34e653
[ "MIT" ]
null
null
null
# a = int(input()) # b = int(input()) # if a < b: # print('a') # else: # print('b') # password = input('enter password:') # check_password = input('confirm password:') # if password == check_password and len(password) >=8: # print('Succesful') # else: # if len(password)>= 8: # print('password dont match') # else: # print('password too short') # x = int(input()) # y = int(input()) # if x > 0: # if y > 0: # print(1) # else: # print(4) # else: # if y > 0: # print(2) # else: # print(3) # number1 = int(input()) # if number1 > 0: # print(1) # elif number1 == 0: # print(0) # else: # print(-1) # a = int(input()) # b = int(input()) # c = int(input()) # if a >= b and a >= c: # print('a') # elif b >= a and b >= c: # print('b') # else: # print('steps = int # steps = int(input('enter steps:')) # point = int(input('enter screen points:')) # print(steps % point) # x = float(input()) # print(x - int(x)) # string1 = 'nnnnn n word nnnnnnnnn nn' # ban_word = 'word' # if ban_word in string1: # del string1 # else: # print('OK') # s = 500 # distance = int(input()) # if distance <= s: # print('yes') # else: # print('no')
13.645161
54
0.501182
# a = int(input()) # b = int(input()) # if a < b: # print('a') # else: # print('b') # password = input('enter password:') # check_password = input('confirm password:') # if password == check_password and len(password) >=8: # print('Succesful') # else: # if len(password)>= 8: # print('password dont match') # else: # print('password too short') # x = int(input()) # y = int(input()) # if x > 0: # if y > 0: # print(1) # else: # print(4) # else: # if y > 0: # print(2) # else: # print(3) # number1 = int(input()) # if number1 > 0: # print(1) # elif number1 == 0: # print(0) # else: # print(-1) # a = int(input()) # b = int(input()) # c = int(input()) # if a >= b and a >= c: # print('a') # elif b >= a and b >= c: # print('b') # else: # print('steps = int # steps = int(input('enter steps:')) # point = int(input('enter screen points:')) # print(steps % point) # x = float(input()) # print(x - int(x)) # string1 = 'nnnnn n word nnnnnnnnn nn' # ban_word = 'word' # if ban_word in string1: # del string1 # else: # print('OK') # s = 500 # distance = int(input()) # if distance <= s: # print('yes') # else: # print('no')
0
0
0
49024c6ba195d2ad7accd6ec7d53322f090c0610
7,320
py
Python
tests/api/v1/auth/test_login.py
rogerokello/weConnect-api
e1fb136864842781063a60bae0764defb99e47c6
[ "MIT" ]
1
2019-04-18T19:56:31.000Z
2019-04-18T19:56:31.000Z
tests/api/v1/auth/test_login.py
rogerokello/weconnect-practice
e1fb136864842781063a60bae0764defb99e47c6
[ "MIT" ]
6
2018-02-19T14:17:00.000Z
2018-07-08T08:38:02.000Z
tests/api/v1/auth/test_login.py
rogerokello/weConnect-api
e1fb136864842781063a60bae0764defb99e47c6
[ "MIT" ]
1
2018-02-26T13:05:49.000Z
2018-02-26T13:05:49.000Z
import unittest import json from app import create_app, db, session from tests.api.v1 import BaseTestCase class AuthTestCase(BaseTestCase): """Test case for the authentication blueprint.""" def test_user_login(self): """Test registered user can login. (POST request)""" #first register a user self.client().post('/auth/register', data=json.dumps(self.user_data), content_type='application/json' ) #try to login using registration credentials login_res = self.client().post('/auth/login', data=json.dumps(self.user_data), content_type='application/json' ) # get the results in json format result = json.loads(login_res.data.decode()) # Test that the response contains success message self.assertEqual(result['message'], "You logged in successfully.") # Assert that the status code returned is equal to 200 self.assertEqual(login_res.status_code, 200) # Assert that the result has an access token self.assertTrue(result['access_token']) def test_login_when_email_used(self): """Test registered user can login with email address used for username. (POST request)""" #first register a user self.client().post('/auth/register', data=json.dumps(self.user_data), content_type='application/json' ) #try to login using registration credentials login_res = self.client().post('/auth/login', data=json.dumps({'username':'rogerokello@gmail.com', 'password':'okello'}), content_type='application/json' ) # get the results in json format result = json.loads(login_res.data.decode()) # Test that the response contains success message self.assertEqual(result['message'], "You logged in successfully.") # Assert that the status code returned is equal to 200 self.assertEqual(login_res.status_code, 200) # Assert that the result has an access token self.assertTrue(result['access_token']) def test_login_empty_json(self): "Test user login rejects when data supplied is not json (POST request)" #make a request to the register endpoint res = self.client().post('/auth/login', data=json.dumps({}) ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 201 status code self.assertEqual(result['message'], "Please supply json data") self.assertEqual(res.status_code, 400) def test_login_no_username_key(self): "Test user login rejects when no username key supplied (POST request)" #make a request to the login endpoint res = self.client().post('/auth/login', data=json.dumps({"":""}), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 201 status code self.assertEqual(result['message'], "Please supply a 'username'") self.assertEqual(res.status_code, 400) def test_login_no_passwd_key(self): "Test user login rejects when no password key supplied (POST request)" #register a user self._register_user() #make a request to the login endpoint res = self.client().post('/auth/login', data=json.dumps({"username":"roger"}), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 201 status code self.assertEqual(result['message'], "Please supply a 'password'") self.assertEqual(res.status_code, 400) def test_login_invalid_username(self): "Test user login rejects invalid username supplied (POST request)" #register a user self._register_user() #make a request to the register endpoint res = self.client().post('/auth/login', data=json.dumps({"username":"", "password":"" }), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 401 status code self.assertEqual(result['message'], "Invalid username or password, Please try again") self.assertEqual(res.status_code, 401) def test_login_non_txt_for_values(self): "Test user login rejects non text supplied for values (POST request)" #register a user self._register_user() #make a request to the register endpoint res = self.client().post('/auth/login', data=json.dumps({"username":12, "password":2334 }), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 401 status code self.assertEqual(result['message'], "Invalid values supplied, Please try again with text values") self.assertEqual(res.status_code, 401) def test_login_username_as_no(self): "Test user login rejects username supplied as a number (POST request)" #register a user self._register_user() #make a request to the register endpoint using a number username res = self.client().post('/auth/login', data=json.dumps({"username":123, "password":"" }), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 401 status code self.assertEqual(result['message'], "Invalid values supplied, Please try again with text values") self.assertEqual(res.status_code, 401)
43.058824
115
0.54194
import unittest import json from app import create_app, db, session from tests.api.v1 import BaseTestCase class AuthTestCase(BaseTestCase): """Test case for the authentication blueprint.""" def test_user_login(self): """Test registered user can login. (POST request)""" #first register a user self.client().post('/auth/register', data=json.dumps(self.user_data), content_type='application/json' ) #try to login using registration credentials login_res = self.client().post('/auth/login', data=json.dumps(self.user_data), content_type='application/json' ) # get the results in json format result = json.loads(login_res.data.decode()) # Test that the response contains success message self.assertEqual(result['message'], "You logged in successfully.") # Assert that the status code returned is equal to 200 self.assertEqual(login_res.status_code, 200) # Assert that the result has an access token self.assertTrue(result['access_token']) def test_login_when_email_used(self): """Test registered user can login with email address used for username. (POST request)""" #first register a user self.client().post('/auth/register', data=json.dumps(self.user_data), content_type='application/json' ) #try to login using registration credentials login_res = self.client().post('/auth/login', data=json.dumps({'username':'rogerokello@gmail.com', 'password':'okello'}), content_type='application/json' ) # get the results in json format result = json.loads(login_res.data.decode()) # Test that the response contains success message self.assertEqual(result['message'], "You logged in successfully.") # Assert that the status code returned is equal to 200 self.assertEqual(login_res.status_code, 200) # Assert that the result has an access token self.assertTrue(result['access_token']) def test_login_empty_json(self): "Test user login rejects when data supplied is not json (POST request)" #make a request to the register endpoint res = self.client().post('/auth/login', data=json.dumps({}) ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 201 status code self.assertEqual(result['message'], "Please supply json data") self.assertEqual(res.status_code, 400) def test_login_no_username_key(self): "Test user login rejects when no username key supplied (POST request)" #make a request to the login endpoint res = self.client().post('/auth/login', data=json.dumps({"":""}), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 201 status code self.assertEqual(result['message'], "Please supply a 'username'") self.assertEqual(res.status_code, 400) def test_login_no_passwd_key(self): "Test user login rejects when no password key supplied (POST request)" #register a user self._register_user() #make a request to the login endpoint res = self.client().post('/auth/login', data=json.dumps({"username":"roger"}), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 201 status code self.assertEqual(result['message'], "Please supply a 'password'") self.assertEqual(res.status_code, 400) def test_login_invalid_username(self): "Test user login rejects invalid username supplied (POST request)" #register a user self._register_user() #make a request to the register endpoint res = self.client().post('/auth/login', data=json.dumps({"username":"", "password":"" }), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 401 status code self.assertEqual(result['message'], "Invalid username or password, Please try again") self.assertEqual(res.status_code, 401) def test_login_non_txt_for_values(self): "Test user login rejects non text supplied for values (POST request)" #register a user self._register_user() #make a request to the register endpoint res = self.client().post('/auth/login', data=json.dumps({"username":12, "password":2334 }), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 401 status code self.assertEqual(result['message'], "Invalid values supplied, Please try again with text values") self.assertEqual(res.status_code, 401) def test_login_username_as_no(self): "Test user login rejects username supplied as a number (POST request)" #register a user self._register_user() #make a request to the register endpoint using a number username res = self.client().post('/auth/login', data=json.dumps({"username":123, "password":"" }), content_type='application/json' ) # get the results returned in json format result = json.loads(res.data.decode()) # assert that the request contains a success message and # a 401 status code self.assertEqual(result['message'], "Invalid values supplied, Please try again with text values") self.assertEqual(res.status_code, 401)
0
0
0
db183d5d9240100317028197380a0bf0c8249960
542
py
Python
pathogen_memo/views/index.py
NajlaBioinfo/pathogen_memo_app
426a0ef545ffc96da8c6fc1bca5eb19caaf1879c
[ "MIT" ]
null
null
null
pathogen_memo/views/index.py
NajlaBioinfo/pathogen_memo_app
426a0ef545ffc96da8c6fc1bca5eb19caaf1879c
[ "MIT" ]
3
2021-09-03T10:33:09.000Z
2022-03-12T00:17:18.000Z
pathogen_memo/views/index.py
NajlaBioinfo/pathogen_memo_app
426a0ef545ffc96da8c6fc1bca5eb19caaf1879c
[ "MIT" ]
null
null
null
from flask import Blueprint, render_template from sys import version from flask import Flask, jsonify from pathogen_memo.controllers import getallquery #Set word_count_site_name indexv = Blueprint('indexv', __name__) @indexv.route('/')
22.583333
73
0.761993
from flask import Blueprint, render_template from sys import version from flask import Flask, jsonify from pathogen_memo.controllers import getallquery #Set word_count_site_name indexv = Blueprint('indexv', __name__) @indexv.route('/') def index(): tablename = 'pathogens' data_index = getallquery.gethemall(tablename) #print (data_index) #return jsonify({"message": "Welcome to my Flask App"}) #return render_template('index.html',message="Welcome to my Flask App") return render_template('index.html', data=data_index)
278
0
22
87a07ddd5aaead0e0d702a3e591e624c393f2c25
1,061
py
Python
bin/fasta_unwrap.py
nyoungb2/seq_tools
8d93ad3fa2340c908e67830a2362fba6d2c47fbb
[ "MIT" ]
null
null
null
bin/fasta_unwrap.py
nyoungb2/seq_tools
8d93ad3fa2340c908e67830a2362fba6d2c47fbb
[ "MIT" ]
null
null
null
bin/fasta_unwrap.py
nyoungb2/seq_tools
8d93ad3fa2340c908e67830a2362fba6d2c47fbb
[ "MIT" ]
1
2018-06-25T19:34:47.000Z
2018-06-25T19:34:47.000Z
#!/usr/bin/env python """ fasta_unwrap: hard-wrapped fastas unwrapped (sequence is just 1 line) Usage: fasta_unwrap <fasta> | [-] fasta_unwrap -h | --help fasta_unwrap --version Options: <fasta> Fasta file name ('-' if from STDIN). -h --help Show this screen. --version Show version. Description: Simply unwrap a hard-wrapped fasta file """ from docopt import docopt if __name__ == '__main__': args = docopt(__doc__, version='0.1') import sys import fileinput import re # fixing broken pipe error from signal import signal, SIGPIPE, SIG_DFL signal(SIGPIPE,SIG_DFL) # IO error if args['<fasta>'] is None: sys.stderr.write('Provide fasta via arg or STDIN') sys.stderr.write(__doc__) sys.exit() if args['<fasta>'] == '-': inf = sys.stdin else: inf = open(args['<fasta>'], 'r') # parsing fasta line_cnt = 0 for line in inf: line_cnt += 1 if line.startswith('>'): if line_cnt > 1: print '' sys.stdout.write(line) else: sys.stdout.write(line.rstrip())
18.946429
69
0.637135
#!/usr/bin/env python """ fasta_unwrap: hard-wrapped fastas unwrapped (sequence is just 1 line) Usage: fasta_unwrap <fasta> | [-] fasta_unwrap -h | --help fasta_unwrap --version Options: <fasta> Fasta file name ('-' if from STDIN). -h --help Show this screen. --version Show version. Description: Simply unwrap a hard-wrapped fasta file """ from docopt import docopt if __name__ == '__main__': args = docopt(__doc__, version='0.1') import sys import fileinput import re # fixing broken pipe error from signal import signal, SIGPIPE, SIG_DFL signal(SIGPIPE,SIG_DFL) # IO error if args['<fasta>'] is None: sys.stderr.write('Provide fasta via arg or STDIN') sys.stderr.write(__doc__) sys.exit() if args['<fasta>'] == '-': inf = sys.stdin else: inf = open(args['<fasta>'], 'r') # parsing fasta line_cnt = 0 for line in inf: line_cnt += 1 if line.startswith('>'): if line_cnt > 1: print '' sys.stdout.write(line) else: sys.stdout.write(line.rstrip())
0
0
0
56ceee4a1208e349f997d03840c35c70dbf57a9b
632
py
Python
dialogue-engine/test/integration/programy/clients/events/tcpsocket/socket_client.py
cotobadesign/cotoba-agent-oss
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
[ "MIT" ]
104
2020-03-30T09:40:00.000Z
2022-03-06T22:34:25.000Z
dialogue-engine/test/integration/programy/clients/events/tcpsocket/socket_client.py
cotobadesign/cotoba-agent-oss
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
[ "MIT" ]
25
2020-06-12T01:36:35.000Z
2022-02-19T07:30:44.000Z
dialogue-engine/test/integration/programy/clients/events/tcpsocket/socket_client.py
cotobadesign/cotoba-agent-oss
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
[ "MIT" ]
10
2020-04-02T23:43:56.000Z
2021-05-14T13:47:01.000Z
# client.py import socket import sys import json host = sys.argv[1] port = int(sys.argv[2]) question = sys.argv[3] userid = sys.argv[4] max_size = 1024 if len(sys.argv) == 6: max_size = sys.argv[5] payload = {"question": question, "userid": userid} json_data = json.dumps(payload) # create a socket object s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # connection to hostname on the port. s.connect((host, port)) s.send(json_data.encode('utf-8')) # Receive no more than 1024 bytes received = s.recv(max_size) s.close() json_data = json.loads(received.decode('utf-8')) print("Answer: %s" % json_data['answer'])
19.151515
53
0.702532
# client.py import socket import sys import json host = sys.argv[1] port = int(sys.argv[2]) question = sys.argv[3] userid = sys.argv[4] max_size = 1024 if len(sys.argv) == 6: max_size = sys.argv[5] payload = {"question": question, "userid": userid} json_data = json.dumps(payload) # create a socket object s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # connection to hostname on the port. s.connect((host, port)) s.send(json_data.encode('utf-8')) # Receive no more than 1024 bytes received = s.recv(max_size) s.close() json_data = json.loads(received.decode('utf-8')) print("Answer: %s" % json_data['answer'])
0
0
0
7d02549204810ef8130b0409f64d3c31c9543392
1,351
py
Python
gva/flows/operators/save_to_minio_operator.py
gva-jjoyce/gva_data
cda990d0abb4b175025aaf16e75192bd9cc213af
[ "Apache-2.0" ]
null
null
null
gva/flows/operators/save_to_minio_operator.py
gva-jjoyce/gva_data
cda990d0abb4b175025aaf16e75192bd9cc213af
[ "Apache-2.0" ]
24
2020-12-24T12:21:42.000Z
2021-01-28T14:22:38.000Z
gva/flows/operators/save_to_minio_operator.py
gva-jjoyce/gva_data
cda990d0abb4b175025aaf16e75192bd9cc213af
[ "Apache-2.0" ]
null
null
null
from .internals.base_operator import BaseOperator from ...data.writers import MinIoWriter, Writer # type:ignore from ...data.validator import Schema # type:ignore import datetime
29.369565
78
0.515914
from .internals.base_operator import BaseOperator from ...data.writers import MinIoWriter, Writer # type:ignore from ...data.validator import Schema # type:ignore import datetime class SaveToMinIoOperator(BaseOperator): def __init__( self, *, end_point: str, to_path: str, access_key: str, secret_key: str, schema: Schema = None, compress: bool = True, date: datetime.date = None, secure: bool = True, **kwargs): super().__init__() self.writer = Writer( inner_writer=MinIoWriter, to_path=to_path, schema=schema, compress=compress, date_exchange=date, end_point=end_point, access_key=access_key, secret_key=secret_key, secure=secure, **kwargs) def execute(self, data: dict = {}, context: dict = {}): self.writer.append(data) return data, context def finalize(self): self.writer.finalize() def __del__(self): try: self.writer.finalize() except Exception: # nosec - if this fails, it should be ignored here pass
1,004
19
141
3a03c336149f763ec2aca84dddb3492afa302a34
3,462
py
Python
assets/psutil/get_sys_info.py
OrceBR/HercAdminTool
4db13c88094493ccb75d9e74342c49e5c27d95d2
[ "MIT" ]
2
2021-04-08T05:06:20.000Z
2021-07-22T00:42:50.000Z
assets/psutil/get_sys_info.py
OrceBR/HercAdminTool
4db13c88094493ccb75d9e74342c49e5c27d95d2
[ "MIT" ]
null
null
null
assets/psutil/get_sys_info.py
OrceBR/HercAdminTool
4db13c88094493ccb75d9e74342c49e5c27d95d2
[ "MIT" ]
null
null
null
#!/usr/bin/env python # This script ships with HAT, or HercAdminTool. # Please see our project here: https://github.com/jguy1987/HercAdminTool # Author: Jguy - John Mish # The MIT license covers this file. Please see applicable license file here: https://github.com/jguy1987/HercAdminTool/blob/master/license.txt # You are free to include this file wholesale or parts of it in your project, just as long as you leave the above text alone. # Purpose of file: # This file is intended to be ran on a remote server. # It will gather metrics about the running server using the psutils python module and output them via an XML file with lxml. # Example output: #<metrics> # <basic> # <name>machinename</name> # <os>Linux-3.10.0-327.el7.x86_64-x86_64-with-centos-7.2.1511-Core</os> # <boottime>1365519115.0</boottime> # </basic> # <cpu> # <loadavg>0.5, 0.9, 1.3</loadavg> # <proccount>121</proccount> # </cpu> # <mem> # <virtual> # <total>10367352832</total> # <used>8186245120</used> # <avail>2181107712</avail> # <pct>37.6</pct> # </virtual> # <swap> # <total>2097147904</total> # <used>296128512</used> # <avail>1801019392</avail> # <pct>14.1</pct> # </swap> # </mem> # <disk> # <total>21378641920</total> # <used>4809781248</used> # <free>15482871808</free> # </disk> #</metrics> # Import things import psutil, os, time, sys, platform, socket from lxml import etree # get all of the metrics of the running OS. CPU load, MEM usage, disk space. # load averages... cpuload = os.getloadavg() cpuavg = str(cpuload[0])+", "+str(cpuload[1])+", "+str(cpuload[2]) # memory needs a little more work virt_mem = psutil.virtual_memory() swap_mem = psutil.swap_memory() # generate XML using lxml # <metrics> root = etree.Element('metrics') doc = etree.ElementTree(root) # <basic> child1 = etree.SubElement(root, 'basic') # <name> etree.SubElement(child1, "name").text = socket.gethostname() # <os> etree.SubElement(child1, "os").text = platform.platform() # <boottime> etree.SubElement(child1, "boottime").text = str(psutil.boot_time()) # <cpu> child2 = etree.SubElement(root, "cpu") # <percent> etree.SubElement(child2, "loadavg").text = str(cpuavg) # <proccount> etree.SubElement(child2, "proccount").text = str(len(psutil.pids())) # <mem> child3 = etree.SubElement(root, "mem") # <virtual> child3_1 = etree.SubElement(child3, "virtual") # <total> etree.SubElement(child3_1, "total").text = str(virt_mem.total) # <used> etree.SubElement(child3_1, "used").text = str(virt_mem.used) # <avail> etree.SubElement(child3_1, "avail").text = str(virt_mem.available) # <pct> etree.SubElement(child3_1, "pct").text = str(virt_mem.percent) # <swap> child3_2 = etree.SubElement(child3, "swap") # <total> etree.SubElement(child3_2, "total").text = str(swap_mem.total) # <used> etree.SubElement(child3_2, "used").text = str(swap_mem.used) # <avail> etree.SubElement(child3_2, "avail").text = str(swap_mem.free) # <pct> etree.SubElement(child3_2, "pct").text = str(swap_mem.percent) # <disk> child4 = etree.SubElement(root, "disk") disk = os.statvfs(__file__) free = disk.f_bavail * disk.f_frsize total = disk.f_blocks * disk.f_frsize used = (disk.f_blocks - disk.f_bfree) * disk.f_frsize # <total> etree.SubElement(child4, "total").text = str(total) # <used> etree.SubElement(child4, "used").text = str(used) # <free> etree.SubElement(child4, "free").text = str(free) outFile = open('serverstat.xml', 'w') doc.write(outFile)
29.589744
142
0.697574
#!/usr/bin/env python # This script ships with HAT, or HercAdminTool. # Please see our project here: https://github.com/jguy1987/HercAdminTool # Author: Jguy - John Mish # The MIT license covers this file. Please see applicable license file here: https://github.com/jguy1987/HercAdminTool/blob/master/license.txt # You are free to include this file wholesale or parts of it in your project, just as long as you leave the above text alone. # Purpose of file: # This file is intended to be ran on a remote server. # It will gather metrics about the running server using the psutils python module and output them via an XML file with lxml. # Example output: #<metrics> # <basic> # <name>machinename</name> # <os>Linux-3.10.0-327.el7.x86_64-x86_64-with-centos-7.2.1511-Core</os> # <boottime>1365519115.0</boottime> # </basic> # <cpu> # <loadavg>0.5, 0.9, 1.3</loadavg> # <proccount>121</proccount> # </cpu> # <mem> # <virtual> # <total>10367352832</total> # <used>8186245120</used> # <avail>2181107712</avail> # <pct>37.6</pct> # </virtual> # <swap> # <total>2097147904</total> # <used>296128512</used> # <avail>1801019392</avail> # <pct>14.1</pct> # </swap> # </mem> # <disk> # <total>21378641920</total> # <used>4809781248</used> # <free>15482871808</free> # </disk> #</metrics> # Import things import psutil, os, time, sys, platform, socket from lxml import etree # get all of the metrics of the running OS. CPU load, MEM usage, disk space. # load averages... cpuload = os.getloadavg() cpuavg = str(cpuload[0])+", "+str(cpuload[1])+", "+str(cpuload[2]) # memory needs a little more work virt_mem = psutil.virtual_memory() swap_mem = psutil.swap_memory() # generate XML using lxml # <metrics> root = etree.Element('metrics') doc = etree.ElementTree(root) # <basic> child1 = etree.SubElement(root, 'basic') # <name> etree.SubElement(child1, "name").text = socket.gethostname() # <os> etree.SubElement(child1, "os").text = platform.platform() # <boottime> etree.SubElement(child1, "boottime").text = str(psutil.boot_time()) # <cpu> child2 = etree.SubElement(root, "cpu") # <percent> etree.SubElement(child2, "loadavg").text = str(cpuavg) # <proccount> etree.SubElement(child2, "proccount").text = str(len(psutil.pids())) # <mem> child3 = etree.SubElement(root, "mem") # <virtual> child3_1 = etree.SubElement(child3, "virtual") # <total> etree.SubElement(child3_1, "total").text = str(virt_mem.total) # <used> etree.SubElement(child3_1, "used").text = str(virt_mem.used) # <avail> etree.SubElement(child3_1, "avail").text = str(virt_mem.available) # <pct> etree.SubElement(child3_1, "pct").text = str(virt_mem.percent) # <swap> child3_2 = etree.SubElement(child3, "swap") # <total> etree.SubElement(child3_2, "total").text = str(swap_mem.total) # <used> etree.SubElement(child3_2, "used").text = str(swap_mem.used) # <avail> etree.SubElement(child3_2, "avail").text = str(swap_mem.free) # <pct> etree.SubElement(child3_2, "pct").text = str(swap_mem.percent) # <disk> child4 = etree.SubElement(root, "disk") disk = os.statvfs(__file__) free = disk.f_bavail * disk.f_frsize total = disk.f_blocks * disk.f_frsize used = (disk.f_blocks - disk.f_bfree) * disk.f_frsize # <total> etree.SubElement(child4, "total").text = str(total) # <used> etree.SubElement(child4, "used").text = str(used) # <free> etree.SubElement(child4, "free").text = str(free) outFile = open('serverstat.xml', 'w') doc.write(outFile)
0
0
0
8af60fffa531510aaf431b7e15d750205843a7fd
1,163
py
Python
companies/urls.py
Ins-V/wc_crm
5d75907bb48e892328712ed0b2cf96b9083239aa
[ "MIT" ]
null
null
null
companies/urls.py
Ins-V/wc_crm
5d75907bb48e892328712ed0b2cf96b9083239aa
[ "MIT" ]
null
null
null
companies/urls.py
Ins-V/wc_crm
5d75907bb48e892328712ed0b2cf96b9083239aa
[ "MIT" ]
null
null
null
from django.urls import path from companies import views app_name = 'company' urlpatterns = [ path('contacts/', views.CompanyContactsView.as_view(), name='contacts'), path('phone/list/', views.CompanyPhoneListView.as_view(), name='phone_list'), path('phone/add/', views.CompanyPhoneCreateView.as_view(), name='phone_add'), path('phone/edit/<int:pk>/', views.CompanyPhoneEditView.as_view(), name='phone_edit'), path('phone/delete/<int:pk>/', views.CompanyPhoneDeleteView.as_view(), name='phone_delete'), path('email/list/', views.CompanyEmailListView.as_view(), name='email_list'), path('email/add/', views.CompanyEmailCreateView.as_view(), name='email_add'), path('email/edit/<int:pk>/', views.CompanyEmailEditView.as_view(), name='email_edit'), path('email/delete/<int:pk>/', views.CompanyEmailDeleteView.as_view(), name='email_delete'), path('list/', views.CompanyListView.as_view(), name='list'), path('create/', views.CompanyCreateView.as_view(), name='create'), path('edit/<int:pk>/', views.CompanyEditView.as_view(), name='edit'), path('<int:pk>/', views.CompanyDetailView.as_view(), name='detail'), ]
52.863636
96
0.705933
from django.urls import path from companies import views app_name = 'company' urlpatterns = [ path('contacts/', views.CompanyContactsView.as_view(), name='contacts'), path('phone/list/', views.CompanyPhoneListView.as_view(), name='phone_list'), path('phone/add/', views.CompanyPhoneCreateView.as_view(), name='phone_add'), path('phone/edit/<int:pk>/', views.CompanyPhoneEditView.as_view(), name='phone_edit'), path('phone/delete/<int:pk>/', views.CompanyPhoneDeleteView.as_view(), name='phone_delete'), path('email/list/', views.CompanyEmailListView.as_view(), name='email_list'), path('email/add/', views.CompanyEmailCreateView.as_view(), name='email_add'), path('email/edit/<int:pk>/', views.CompanyEmailEditView.as_view(), name='email_edit'), path('email/delete/<int:pk>/', views.CompanyEmailDeleteView.as_view(), name='email_delete'), path('list/', views.CompanyListView.as_view(), name='list'), path('create/', views.CompanyCreateView.as_view(), name='create'), path('edit/<int:pk>/', views.CompanyEditView.as_view(), name='edit'), path('<int:pk>/', views.CompanyDetailView.as_view(), name='detail'), ]
0
0
0
6aec30f308acb1094dc090358d4839af6529207e
768
py
Python
aaulan/models/crew_member.py
AAULAN/aaulan2.0
f0400914fe9e126a2dbf94cbce78cc34f976f3b8
[ "MIT" ]
null
null
null
aaulan/models/crew_member.py
AAULAN/aaulan2.0
f0400914fe9e126a2dbf94cbce78cc34f976f3b8
[ "MIT" ]
null
null
null
aaulan/models/crew_member.py
AAULAN/aaulan2.0
f0400914fe9e126a2dbf94cbce78cc34f976f3b8
[ "MIT" ]
null
null
null
from django.conf import settings from django.db import models from django.contrib import admin admin.site.register(CrewMember, CrewMemberAdmin)
24.774194
72
0.692708
from django.conf import settings from django.db import models from django.contrib import admin class CrewMember(models.Model): class Meta: verbose_name = 'Crew member' #photo = models.ImageField() title = models.CharField(max_length=100) user = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, ) def __str__(self): return "{} {}".format(self.user.first_name, self.user.last_name) class CrewMemberAdmin(admin.ModelAdmin): list_display = ('get_user_name', 'title') ordering = ['user'] def get_user_name(self, obj): return obj.user.first_name + ' ' + obj.user.last_name get_user_name.short_description = 'Name' admin.site.register(CrewMember, CrewMemberAdmin)
140
434
46
836d12ee60397703c65310ff1b8d20a62d53b1f0
4,626
py
Python
order.py
AmmeySaini/Realme-AutoBuy
c5ca4ce9990fd397b192f3360ecb264a64ab08f6
[ "MIT" ]
32
2020-06-23T08:20:56.000Z
2022-02-13T05:42:48.000Z
order.py
AmmeySaini/Realme-AutoBuy
c5ca4ce9990fd397b192f3360ecb264a64ab08f6
[ "MIT" ]
9
2020-07-22T06:16:43.000Z
2021-04-08T09:03:41.000Z
order.py
AmmeySaini/Realme-AutoBuy
c5ca4ce9990fd397b192f3360ecb264a64ab08f6
[ "MIT" ]
18
2020-06-23T08:12:15.000Z
2021-10-09T00:13:15.000Z
import requests import urllib3 import re import time from pathlib import Path from __constants.constants import head1 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) ######### AUTHOR - @AmmeySaini ######### ######### Github Repo - https://github.com/AmmeySaini/Realme-AutoBuy ######### ######### I'm not responisble for any damage or anything bad happens to you using this script ######### ######### Use it on your own RISK ######### ######### This is only for educational purpose ######### if __name__ == '__main__': main()
50.835165
149
0.426719
import requests import urllib3 import re import time from pathlib import Path from __constants.constants import head1 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) ######### AUTHOR - @AmmeySaini ######### ######### Github Repo - https://github.com/AmmeySaini/Realme-AutoBuy ######### ######### I'm not responisble for any damage or anything bad happens to you using this script ######### ######### Use it on your own RISK ######### ######### This is only for educational purpose ######### def main(): total_orders = 0 orders_file = Path('orders.txt') if orders_file.exists(): with open(orders_file, 'r') as file: file_data = file.read() cookie_list = file_data.split('\n') for cooki in cookie_list: if cooki != '': cookie_file = Path('./my_cookies/' + cooki) if cookie_file.exists(): print('Performing on ' + cooki) fp = open(cookie_file, 'r') all_cooks = fp.read().split('||') sessionId = all_cooks[0] opkey = all_cooks[1] newopkey = all_cooks[2] cookie1 = dict(sessionId=sessionId, opkey=opkey, newopkey=newopkey) else: print('cookie file doesn\'t exists') else: print('\nCompleted!!', 'Total Successful Orders - ' + str(total_orders)) exit() url1 = 'https://api.realme.com/in/order/query' r = requests.get(url1, headers=head1, cookies=cookie1, verify=False) js = r.json() try: if len(js['data']) > 0: try: if js['data'][0]['orderStatus'] == 11 and js['data'][0]['countDown'] > 1: orderNo = js['data'][0]['orderNo'] number = js['data'][0]['phone'] phoneAreacode = js['data'][0]['phoneAreacode'] url2 = 'https://api.realme.com/in/order/cod/verifyCode/sms' data2 = '{"phoneNumber":"' + str(phoneAreacode) + str(number) + '","orderNo":"' + str(orderNo) + '"}' r2 = requests.post(url2, headers=head1, data=data2, cookies=cookie1, verify=False) js2 = r2.json() if js2['msg'] == 'success': verified = False while verified != True: otp = int(input('Enter OTP sent to number ' + str(number + ': '))) url3 = 'https://api.realme.com/in/payment/custom/make-payment' data3 = '{"verifyCode":"' + str(otp) + '","orderNo":"' + str(orderNo) + '","payMethod":"COD","payChannel":"COD"}' r3 = requests.post(url3, headers=head1, data=data3, cookies=cookie1, verify=False) js3 = r3.json() if js3['code'] == 302: print('Order Successful', '\nSleeping for 1 Min, don\'t disturb me') total_orders += 1 verified = True time.sleep(60) elif js3['code'] == 20133: print('Error - ' + js3['msg']) verified = False else: print('Error - ' + js3['msg'], '\nSleeping for 1 Min, don\'t disturb me') time.sleep(60) break else: print('Error - ' + js2['msg'], '\nSleeping for 1 Min, don\'t disturb me') time.sleep(60) elif js['data'][0]['orderStatus'] == 40: print('Order is already processed') else: print('Error - Timeout Expired, Status is ' + str(js['data'][0]['orderStatus'])) # break except Exception as e: print(e) except: print('Error - Order does not exists') else: print('Error - Orders File doesn\'t exists') if __name__ == '__main__': main()
4,052
0
23
0103fcf54df56370cd3ee1ea3fff662c86dbc2ac
1,597
py
Python
output_parser.py
crowd-planning-poker/ML-SEE-replication-pack
8db5ad0e6425d2d24243442a472028137164fdf5
[ "MIT" ]
null
null
null
output_parser.py
crowd-planning-poker/ML-SEE-replication-pack
8db5ad0e6425d2d24243442a472028137164fdf5
[ "MIT" ]
null
null
null
output_parser.py
crowd-planning-poker/ML-SEE-replication-pack
8db5ad0e6425d2d24243442a472028137164fdf5
[ "MIT" ]
null
null
null
import numpy as np import sys import regex as re if __name__ == "__main__": file_name = sys.argv[1] parse_output_performance(file_name)
35.488889
110
0.514715
import numpy as np import sys import regex as re def parse_output_performance(file): f = open(file, "r") content = f.read() print("Dataset\tProject\tMethod\tFold\tF1\tAUC_ROC") f1, auc_roc = {'RF-BERT':[],'BERT-BERT':[],'RF-BoW':[]},{'RF-BERT':[],'BERT-BERT':[],'RF-BoW':[]} for match in re.findall(r'.*\n.*\n.*\n.*Matrix', content): if "Average Metrics" in match: continue if "BERT" in match: method_s = "RF-BERT" elif "BoW" in match: method_s = "RF-BoW" else: method_s = "BERT-BERT" fold = re.search(r'fold\:(\d+)\]', match).group(1) if int(fold)<= len (f1[method_s]): for k, i in f1.items(): print("%s\t%s\t%s\t%s\t%s\t%s" % ( dataset, project, k, len(i), round(np.mean(i),4), round(np.mean(auc_roc[k]),4))) f1, auc_roc = {'RF-BERT':[],'BERT-BERT':[],'RF-BoW':[]},{'RF-BERT':[],'BERT-BERT':[],'RF-BoW':[]} dataset = re.search(r'dataset: (.*?)\.', match).group(1) project = re.search(r'Project: (.*?)\n', match).group(1) f1[method_s].append(float(re.search(r'F1 Score: (.*?),', match).group(1))) auc_roc[method_s].append(float(re.search(r'ROC AUC: (.*?),', match).group(1))) for k, i in f1.items(): print("%s\t%s\t%s\t%s\t%s\t%s" % ( dataset, project, k, len(i), round(np.mean(i), 4), round(np.mean(auc_roc[k]), 4))) if __name__ == "__main__": file_name = sys.argv[1] parse_output_performance(file_name)
1,418
0
25
bce529e350c6acfe1c38a83d15faaccd0ed9f22c
4,649
py
Python
demo.py
b4824583/cmr
7e964bd5fae6d12ad347d5256daa634548ca0a5a
[ "MIT" ]
null
null
null
demo.py
b4824583/cmr
7e964bd5fae6d12ad347d5256daa634548ca0a5a
[ "MIT" ]
null
null
null
demo.py
b4824583/cmr
7e964bd5fae6d12ad347d5256daa634548ca0a5a
[ "MIT" ]
null
null
null
""" Demo of CMR. Note that CMR assumes that the object has been detected, so please use a picture of a bird that is centered and well cropped. Sample usage: python demo.py --name bird_net --num_train_epoch 500 --img_path misc/demo_data/img1.jpg """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import flags, app import numpy as np import skimage.io as io import torch from nnutils import test_utils from nnutils import predictor as pred_util from utils import image as img_util #----------------------------edited by parker call to graph from pycallgraph import PyCallGraph from pycallgraph.output import GraphvizOutput # # with PyCallGraph(output=GraphvizOutput()): # code_to_profile() #------------------------- flags.DEFINE_string('img_path', 'data/im1963.jpg', 'Image to run') flags.DEFINE_integer('img_size', 256, 'image size the network was trained on.') opts = flags.FLAGS if __name__ == '__main__': opts.batch_size = 1 opts.name="bird_net" opts.num_train_epoch=500 opts.img_path="misc/demo_data/img2.jpg" app.run(main)
27.672619
125
0.620779
""" Demo of CMR. Note that CMR assumes that the object has been detected, so please use a picture of a bird that is centered and well cropped. Sample usage: python demo.py --name bird_net --num_train_epoch 500 --img_path misc/demo_data/img1.jpg """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import flags, app import numpy as np import skimage.io as io import torch from nnutils import test_utils from nnutils import predictor as pred_util from utils import image as img_util #----------------------------edited by parker call to graph from pycallgraph import PyCallGraph from pycallgraph.output import GraphvizOutput # # with PyCallGraph(output=GraphvizOutput()): # code_to_profile() #------------------------- flags.DEFINE_string('img_path', 'data/im1963.jpg', 'Image to run') flags.DEFINE_integer('img_size', 256, 'image size the network was trained on.') opts = flags.FLAGS def preprocess_image(img_path, img_size=256): img = io.imread(img_path) / 255. # Scale the max image size to be img_size #-這邊將圖片的大小scale到257 scale_factor = float(img_size) / np.max(img.shape[:2]) img, _ = img_util.resize_img(img, scale_factor)#256x256x3 # Crop img_size x img_size from the center #---------------其實看不太懂它為什麼要切割,因為它切割的大小是257x257,而它縮放的大小是256x256 #--------------他是不是在耍人阿!?? #--------------切割是由中心點往外切出一個bounding box center = np.round(np.array(img.shape[:2]) / 2).astype(int) #p # print("center1:"+str(center)) # img center in (x, y) center = center[::-1] #p # print("center2:"+str(center)) bbox = np.hstack([center - img_size / 2., center + img_size / 2.]) #p # print("bbox:"+str(bbox)) img = img_util.crop(img, bbox, bgval=1.)#257x257x3 # Transpose the image to 3xHxW img = np.transpose(img, (2, 0, 1))#3x257x257 return img def visualize(img, outputs, renderer): vert = outputs['verts'][0] cam = outputs['cam_pred'][0] texture = outputs['texture'][0] # -----------------------這邊會輸出已經預測好的bird mesh # -----------------------------------------這邊會Vis Render call() shape_pred = renderer(vert, cam) # -----------------------這邊會輸出已經預測好的bird mesh # -----------------------------------------這邊會Vis Render call() img_pred = renderer(vert, cam, texture=texture) # Different viewpoints. vp1 = renderer.diff_vp( vert, cam, angle=30, axis=[0, 1, 0], texture=texture, extra_elev=True) vp2 = renderer.diff_vp( vert, cam, angle=60, axis=[0, 1, 0], texture=texture, extra_elev=True) vp3 = renderer.diff_vp( vert, cam, angle=60, axis=[1, 0, 0], texture=texture) # f=open("texture.txt","w") # f.write(repr(texture.shape)+"\n") # f.write(repr(texture)) # f.close() img = np.transpose(img, (1, 2, 0)) import matplotlib.pyplot as plt # plt.ion() # plt.figure(1) # plt.clf() # plt.imshow(texture) # plt.show() # plt.savefig("texture.png") plt.ion() plt.figure(1) plt.clf() plt.subplot(231) plt.imshow(img) plt.title('input') plt.axis('off') plt.subplot(232) plt.imshow(shape_pred) plt.title('pred mesh') plt.axis('off') plt.subplot(233) plt.imshow(img_pred) plt.title('pred mesh w/texture') plt.axis('off') plt.subplot(234) plt.imshow(vp1) plt.title('different viewpoints') plt.axis('off') plt.subplot(235) plt.imshow(vp2) plt.axis('off') plt.subplot(236) plt.imshow(vp3) plt.axis('off') plt.draw() plt.show() print('saving file to demo.png') plt.savefig('demo.png') def main(_): #----edited by parker call to graph # graphviz = GraphvizOutput() # graphviz.output_file = 'call_to_graph.png' img = preprocess_image(opts.img_path, img_size=opts.img_size) print("opts:",opts.gpu_id) # with PyCallGraph(output=graphviz): #img的維度是3x257x257 # 創建一個pytorch tensor 的batch 維度是["img"][1][3][257][257]而且值為 1.0 batch = {'img': torch.Tensor(np.expand_dims(img, 0))} predictor = pred_util.MeshPredictor(opts) #-----------------得到預測好的vertice outputs = predictor.predict(batch) #-----------------------------draw predited mesh #---------------------------------- # This is resolution renderer = predictor.vis_rend renderer.set_light_dir([0, 1, -1], 0.4) # output["verts"]是已經預測好的vertce visualize(img, outputs, predictor.vis_rend) if __name__ == '__main__': opts.batch_size = 1 opts.name="bird_net" opts.num_train_epoch=500 opts.img_path="misc/demo_data/img2.jpg" app.run(main)
3,684
0
69
4ae71ff7c9b21807335b8daf912d973c1bd8ecba
4,389
py
Python
deepsim/deepsim/__init__.py
aws-deepracer/deepsim
cad2639f525c2f94ec5c03d8b855cc65b0b8ee55
[ "Apache-2.0" ]
1
2022-03-25T07:20:49.000Z
2022-03-25T07:20:49.000Z
deepsim/deepsim/__init__.py
aws-deepracer/deepsim
cad2639f525c2f94ec5c03d8b855cc65b0b8ee55
[ "Apache-2.0" ]
null
null
null
deepsim/deepsim/__init__.py
aws-deepracer/deepsim
cad2639f525c2f94ec5c03d8b855cc65b0b8ee55
[ "Apache-2.0" ]
null
null
null
################################################################################# # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed under the Apache License, Version 2.0 (the "License"). # # You may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # ################################################################################# from .behaviours.behaviour_manager import BehaviourManager from .behaviours.deepsim_behaviour import DeepSimBehaviour from .behaviours.transform import Transform from .cameras.abs_camera import AbstractCamera from .cameras.constants import CameraSettings from .colliders.abs_collider import AbstractCollider, Abstract2DCollider, Abstract3DCollider, ColliderType from .colliders.box2d_collider import Box2DCollider from .colliders.geometry2d_collider import Geometry2DCollider from .colliders.circle2d_collider import Circle2DCollider from .colliders.sphere_collider import SphereCollider from .constants import Tag from .deepsim import DeepSim from .domain_randomizations.abs_randomizer import AbstractRandomizer from .domain_randomizations.constants import ( ModelRandomizerType, RangeType, RANGE_MIN, RANGE_MAX, ColorAttr, Attenuation ) from .domain_randomizations.randomizer_manager import RandomizerManager from .domain_randomizations.randomizers.model_visual_randomizer import ModelVisualRandomizer from .domain_randomizations.randomizers.light_randomizer import LightRandomizer from .exception import DeepSimError from .exception import DeepSimCallbackError from .exception import DeepSimException from .gazebo.constants import ( GazeboWorld, GazeboServiceName, GeometryType ) from .core.color import Color from .core.euler import Euler from .core.model_state import ModelState from .core.link_state import LinkState from .core.material import Material from .core.math import lerp, lerp_angle_rad, project_to_2d, dot, cross, magnitude, sqr_magnitude, unit, distance from .core.plane import Plane from .core.point import Point from .core.pose import Pose from .core.quaternion import Quaternion from .core.ray import Ray from .core.twist import Twist from .core.vector3 import Vector3 from .ros.service_proxy_wrapper import ServiceProxyWrapper from .ros.ros_util import ROSUtil from .sim_trackers.tracker import TrackerInterface from .sim_trackers.tracker_manager import TrackerManager from .sim_trackers.constants import TrackerPriority from .sim_trackers.trackers.get_model_state_tracker import GetModelStateTracker from .sim_trackers.trackers.get_link_state_tracker import GetLinkStateTracker from .sim_trackers.trackers.set_model_state_tracker import SetModelStateTracker from .sim_trackers.trackers.set_link_state_tracker import SetLinkStateTracker from .sim_trackers.trackers.set_visual_material_tracker import SetVisualMaterialTracker from .sim_trackers.trackers.set_visual_transparency_tracker import SetVisualTransparencyTracker from .sim_trackers.trackers.set_visual_visible_tracker import SetVisualVisibleTracker from .spawners.gazebo_xml_loader import GazeboXmlLoader from .spawners.gazebo_model_spawner import GazeboModelSpawner from .spawners.abs_model_spawner import AbstractModelSpawner from .spawners.dummy_spawner import DummySpawner from .visual_effects.abs_effect import AbstractEffect, EffectObserverInterface from .visual_effects.effect_manager import EffectManager from .visual_effects.effects.blink_effect import BlinkEffect from .visual_effects.effects.invisible_effect import InvisibleEffect
48.766667
112
0.732741
################################################################################# # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed under the Apache License, Version 2.0 (the "License"). # # You may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # ################################################################################# from .behaviours.behaviour_manager import BehaviourManager from .behaviours.deepsim_behaviour import DeepSimBehaviour from .behaviours.transform import Transform from .cameras.abs_camera import AbstractCamera from .cameras.constants import CameraSettings from .colliders.abs_collider import AbstractCollider, Abstract2DCollider, Abstract3DCollider, ColliderType from .colliders.box2d_collider import Box2DCollider from .colliders.geometry2d_collider import Geometry2DCollider from .colliders.circle2d_collider import Circle2DCollider from .colliders.sphere_collider import SphereCollider from .constants import Tag from .deepsim import DeepSim from .domain_randomizations.abs_randomizer import AbstractRandomizer from .domain_randomizations.constants import ( ModelRandomizerType, RangeType, RANGE_MIN, RANGE_MAX, ColorAttr, Attenuation ) from .domain_randomizations.randomizer_manager import RandomizerManager from .domain_randomizations.randomizers.model_visual_randomizer import ModelVisualRandomizer from .domain_randomizations.randomizers.light_randomizer import LightRandomizer from .exception import DeepSimError from .exception import DeepSimCallbackError from .exception import DeepSimException from .gazebo.constants import ( GazeboWorld, GazeboServiceName, GeometryType ) from .core.color import Color from .core.euler import Euler from .core.model_state import ModelState from .core.link_state import LinkState from .core.material import Material from .core.math import lerp, lerp_angle_rad, project_to_2d, dot, cross, magnitude, sqr_magnitude, unit, distance from .core.plane import Plane from .core.point import Point from .core.pose import Pose from .core.quaternion import Quaternion from .core.ray import Ray from .core.twist import Twist from .core.vector3 import Vector3 from .ros.service_proxy_wrapper import ServiceProxyWrapper from .ros.ros_util import ROSUtil from .sim_trackers.tracker import TrackerInterface from .sim_trackers.tracker_manager import TrackerManager from .sim_trackers.constants import TrackerPriority from .sim_trackers.trackers.get_model_state_tracker import GetModelStateTracker from .sim_trackers.trackers.get_link_state_tracker import GetLinkStateTracker from .sim_trackers.trackers.set_model_state_tracker import SetModelStateTracker from .sim_trackers.trackers.set_link_state_tracker import SetLinkStateTracker from .sim_trackers.trackers.set_visual_material_tracker import SetVisualMaterialTracker from .sim_trackers.trackers.set_visual_transparency_tracker import SetVisualTransparencyTracker from .sim_trackers.trackers.set_visual_visible_tracker import SetVisualVisibleTracker from .spawners.gazebo_xml_loader import GazeboXmlLoader from .spawners.gazebo_model_spawner import GazeboModelSpawner from .spawners.abs_model_spawner import AbstractModelSpawner from .spawners.dummy_spawner import DummySpawner from .visual_effects.abs_effect import AbstractEffect, EffectObserverInterface from .visual_effects.effect_manager import EffectManager from .visual_effects.effects.blink_effect import BlinkEffect from .visual_effects.effects.invisible_effect import InvisibleEffect
0
0
0
64f2b559694d21656b8f455bb0caf9f3b4b2e2c5
4,010
py
Python
hexrd/config/tests/test_root.py
glemaitre/hexrd
b68b1ba72e0f480d29bdaae2adbd6c6e2380cc7c
[ "BSD-3-Clause" ]
27
2020-02-18T12:15:08.000Z
2022-03-24T17:53:46.000Z
hexrd/config/tests/test_root.py
glemaitre/hexrd
b68b1ba72e0f480d29bdaae2adbd6c6e2380cc7c
[ "BSD-3-Clause" ]
259
2020-02-02T22:18:29.000Z
2022-03-30T19:59:58.000Z
hexrd/config/tests/test_root.py
glemaitre/hexrd
b68b1ba72e0f480d29bdaae2adbd6c6e2380cc7c
[ "BSD-3-Clause" ]
11
2020-02-18T12:14:44.000Z
2022-03-04T16:19:11.000Z
import multiprocessing as mp import os import tempfile from unittest import skipIf from .common import TestConfig, test_data from hexrd import config reference_data = \ """ analysis_name: analysis #working_dir: # not set to test defaulting to cwd --- analysis_name: analysis_2 working_dir: %(existing_path)s multiprocessing: -1 --- #analysis_name: # not set to test inheritance working_dir: %(nonexistent_path)s multiprocessing: all --- multiprocessing: half --- multiprocessing: 2 --- multiprocessing: 1000 --- multiprocessing: -1000 --- multiprocessing: foo """ % test_data
33.140496
78
0.66783
import multiprocessing as mp import os import tempfile from unittest import skipIf from .common import TestConfig, test_data from hexrd import config reference_data = \ """ analysis_name: analysis #working_dir: # not set to test defaulting to cwd --- analysis_name: analysis_2 working_dir: %(existing_path)s multiprocessing: -1 --- #analysis_name: # not set to test inheritance working_dir: %(nonexistent_path)s multiprocessing: all --- multiprocessing: half --- multiprocessing: 2 --- multiprocessing: 1000 --- multiprocessing: -1000 --- multiprocessing: foo """ % test_data class TestRootConfig(TestConfig): @classmethod def get_reference_data(cls): return reference_data def test_analysis_dir(self): self.assertEqual( self.cfgs[0].analysis_dir, os.path.join(os.getcwd(), 'analysis') ) def test_analysis_name(self): self.assertEqual(self.cfgs[0].analysis_name, 'analysis') self.assertEqual(self.cfgs[1].analysis_name, 'analysis_2') self.cfgs[3].analysis_name = 'analysis_3' self.assertEqual(self.cfgs[3].analysis_name, 'analysis_3') def test_section_inheritance(self): self.assertEqual(self.cfgs[0].analysis_name, 'analysis') self.assertEqual(self.cfgs[1].analysis_name, 'analysis_2') # 2 should inherit from 0, not 1: self.assertEqual(self.cfgs[2].analysis_name, 'analysis') def test_working_dir(self): self.assertEqual(self.cfgs[0].working_dir, os.getcwd()) self.assertEqual(self.cfgs[1].working_dir, test_data['existing_path']) self.assertRaises(IOError, getattr, self.cfgs[2], 'working_dir') self.cfgs[7].working_dir = test_data['existing_path'] self.assertEqual(self.cfgs[7].working_dir, test_data['existing_path']) self.assertRaises( IOError, setattr, self.cfgs[7], 'working_dir', test_data['nonexistent_path'] ) @skipIf(mp.cpu_count() < 2, 'test requires at least two cores') def test_multiprocessing(self): ncpus = mp.cpu_count() self.assertEqual(self.cfgs[0].multiprocessing, ncpus - 1) self.assertEqual(self.cfgs[1].multiprocessing, ncpus - 1) self.assertEqual(self.cfgs[2].multiprocessing, ncpus) self.assertEqual(self.cfgs[3].multiprocessing, ncpus/2) self.assertEqual(self.cfgs[4].multiprocessing, 2) self.assertEqual(self.cfgs[5].multiprocessing, ncpus) self.assertEqual(self.cfgs[6].multiprocessing, 1) self.assertEqual(self.cfgs[7].multiprocessing, ncpus-1) self.cfgs[7].multiprocessing = 1 self.assertEqual(self.cfgs[7].multiprocessing, 1) self.cfgs[7].multiprocessing = 'all' self.assertEqual(self.cfgs[7].multiprocessing, ncpus) self.cfgs[7].multiprocessing = 2 self.assertEqual(self.cfgs[7].multiprocessing, 2) self.assertRaises( RuntimeError, setattr, self.cfgs[7], 'multiprocessing', 'foo' ) self.assertRaises( RuntimeError, setattr, self.cfgs[7], 'multiprocessing', -2 ) class TestSingleConfig(TestConfig): @classmethod def get_reference_data(cls): return "analysis_name: foo" def test_analysis_name(self): self.assertEqual(self.cfgs[0].analysis_name, 'foo') def test_dirty(self): self.assertEqual(self.cfgs[0].dirty, False) self.cfgs[0].analysis_name = 'bar' self.assertEqual(self.cfgs[0].analysis_name, 'bar') self.assertEqual(self.cfgs[0].dirty, True) def test_dump(self): self.assertEqual(self.cfgs[0].dirty, False) self.cfgs[0].analysis_name = 'baz' self.assertEqual(self.cfgs[0].dirty, True) with tempfile.NamedTemporaryFile(delete=False) as f: pass self.cfgs[0].dump(f.name) self.assertEqual(self.cfgs[0].dirty, False) cfg = config.open(f.name)[0] self.assertEqual(self.cfgs[0].analysis_name, 'baz')
2,984
398
46
c9ceab957cb78af08dee479a7e3534e68d15a3a3
122
py
Python
StackedAutoEncoder/train.py
vsc-hvdc/srtp_GIS
1ade28e92f450812990328d2e4b4921497646f6c
[ "Apache-2.0" ]
null
null
null
StackedAutoEncoder/train.py
vsc-hvdc/srtp_GIS
1ade28e92f450812990328d2e4b4921497646f6c
[ "Apache-2.0" ]
null
null
null
StackedAutoEncoder/train.py
vsc-hvdc/srtp_GIS
1ade28e92f450812990328d2e4b4921497646f6c
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ this file is for training the SAE, and apply for the model @author Zhou Hang """ import model
17.428571
58
0.663934
# -*- coding: utf-8 -*- """ this file is for training the SAE, and apply for the model @author Zhou Hang """ import model
0
0
0
b83849f586edcddec3a0d1931e899385d48dcf29
2,179
py
Python
deeplearning-image-gpu/python-lib/tensorboard_handle.py
gbetegon88/dataiku-contrib
4683a9e08a3706a163810bf104bf6464025e235e
[ "Apache-2.0" ]
93
2015-10-28T13:15:45.000Z
2022-03-07T01:16:24.000Z
deeplearning-image-gpu/python-lib/tensorboard_handle.py
gbetegon88/dataiku-contrib
4683a9e08a3706a163810bf104bf6464025e235e
[ "Apache-2.0" ]
130
2015-11-13T17:41:41.000Z
2022-03-30T16:37:44.000Z
deeplearning-image-gpu/python-lib/tensorboard_handle.py
gbetegon88/dataiku-contrib
4683a9e08a3706a163810bf104bf6464025e235e
[ "Apache-2.0" ]
88
2015-10-29T09:36:23.000Z
2021-12-13T20:14:08.000Z
import dataiku import constants from dataikuapi.utils import DataikuException import os from threading import Thread from werkzeug.serving import make_server from tensorflow import logging from tensorboard.backend import application import tensorboard.default as tb_default
32.044118
129
0.671868
import dataiku import constants from dataikuapi.utils import DataikuException import os from threading import Thread from werkzeug.serving import make_server from tensorflow import logging from tensorboard.backend import application import tensorboard.default as tb_default class TensorboardThread(Thread): def __init__(self, folder_name, host="127.0.0.1", verbosity=logging.WARN): Thread.__init__(self) self.project_key = os.environ["DKU_CURRENT_PROJECT_KEY"] self.folder_name = folder_name self.client = dataiku.api_client() logging.set_verbosity(verbosity) # Getting app logs_path = self.__get_logs_path() app = self.__get_tb_app(logs_path) # Setting server self.srv = make_server(host, 0, app) def get_port(self): return self.srv.server_port def __get_logs_path(self): # Retrieve model managed-folder path folder_found = False project = self.client.get_project(self.project_key) for folder in project.list_managed_folders(): if self.folder_name == folder['name']: folder_path = dataiku.Folder(folder['id'], project_key=self.project_key).get_path() folder_found = True break if not folder_found: raise DataikuException("The folder '{}' (in project '{}' cannot be found".format(self.folder_name, self.project_key)) log_path = os.path.join(folder_path, constants.TENSORBOARD_LOGS) return log_path def __get_tb_app(self, tensorboard_logs): return application.standard_tensorboard_wsgi( logdir=tensorboard_logs, assets_zip_provider=tb_default.get_assets_zip_provider(), purge_orphaned_data=True, reload_interval=5, plugins=tb_default.get_plugins()) def run(self): print("Launching tensorboard :") print("Your tensorboard dashboard will be accessible on http://<SERVER ADDRESS>:{}".format(self.get_port())) self.srv.serve_forever() def stop(self): print("Stopping tensorboard process") self.srv.shutdown()
1,704
11
185
8c344f9429de43332d17b67e902d0404f0d63aa0
2,934
py
Python
src/arago/hiro/backend/seven/app_admin.py
166MMX/hiro-python-library
fb29e3247a8fe1b0f7dc4e68141cf7340a8dd0a5
[ "MIT" ]
null
null
null
src/arago/hiro/backend/seven/app_admin.py
166MMX/hiro-python-library
fb29e3247a8fe1b0f7dc4e68141cf7340a8dd0a5
[ "MIT" ]
null
null
null
src/arago/hiro/backend/seven/app_admin.py
166MMX/hiro-python-library
fb29e3247a8fe1b0f7dc4e68141cf7340a8dd0a5
[ "MIT" ]
null
null
null
from typing import TYPE_CHECKING, Final from urllib.parse import quote from arago.hiro.model.graph.vertex import Vertex from arago.hiro.utils.cast_b import to_vertex if TYPE_CHECKING: from arago.hiro.client.rest_base_client import HiroRestBaseClient
40.191781
102
0.623722
from typing import TYPE_CHECKING, Final from urllib.parse import quote from arago.hiro.model.graph.vertex import Vertex from arago.hiro.utils.cast_b import to_vertex if TYPE_CHECKING: from arago.hiro.client.rest_base_client import HiroRestBaseClient class Hiro7AppAdminModel: _base_client: Final['HiroRestBaseClient'] def __init__(self, client: 'HiroRestBaseClient') -> None: super().__init__() # TODO Bug https://itautopilot.zendesk.com/agent/tickets/7933 # path = client.root.model.meta.version()['app-admin'].endpoint # if path.endswith('/'): # path = path[:-1] path = '/api/app-admin/1.2' fork = client.fork(path) self._base_client = fork def deactivate(self, app_id: str) -> dict: # DELETE /$id # https://docs.hiro.arago.co/hiro/6.2.0/user/hiro-graph-api/app-rest-api.html#_id_delete # https://docs.hiro.arago.co/hiro/6.2.0/user/hiro-graph-api/graph-applications.html#deactivate # TODO result strange json vs Vertex # {'ogit/Auth/Application/status': 'inactive'} uri = '/%s' % quote(app_id, '') with self._base_client.request( 'DELETE', uri, headers={'Accept': 'application/json'} ) as response: res_data = response.json() # vertex = to_vertex(res_data, self.__base_client) # return vertex[OgitAttribute.OGIT_AUTH_APPLICATION_STATUS] == 'inactive' return res_data class Hiro7GraphAppAdminModel(Hiro7AppAdminModel): def __init__(self, client: 'HiroRestBaseClient') -> None: super().__init__(client) def create(self, name: str, description: str) -> Vertex: # returns ogit/Auth/Application Vertex # POST /$type # https://docs.hiro.arago.co/hiro/6.2.0/user/hiro-graph-api/app-rest-api.html#_type_post # https://docs.hiro.arago.co/hiro/6.2.0/user/hiro-graph-api/graph-applications.html#create uri = '/graph' req_data = { 'ogit/name': name, 'ogit/description': description, } with self._base_client.request( 'POST', uri, headers={'Accept': 'application/json'}, json=req_data ) as response: res_data = response.json() vertex = to_vertex(res_data, self._base_client) return vertex def activate(self, app_id: str) -> dict: # PATCH /$id # https://docs.hiro.arago.co/hiro/6.2.0/user/hiro-graph-api/app-rest-api.html#_id_patch # https://docs.hiro.arago.co/hiro/6.2.0/user/hiro-graph-api/graph-applications.html#activate uri = '/%s' % quote(app_id, '') req_data = {} with self._base_client.request( 'PATCH', uri, headers={'Accept': 'application/json'}, json=req_data ) as response: res_data = response.json() # TODO define model return res_data
2,417
133
126
69681b1cbcf6d74049745d43ead44794f77d94da
2,617
py
Python
google-cloud-sdk/lib/surface/sql/reschedule_maintenance.py
bopopescu/Social-Lite
ee05d6a7431c36ff582c8d6b58bb20a8c5f550bf
[ "Apache-2.0" ]
2
2019-11-10T09:17:07.000Z
2019-12-18T13:44:08.000Z
google-cloud-sdk/lib/surface/sql/reschedule_maintenance.py
bopopescu/Social-Lite
ee05d6a7431c36ff582c8d6b58bb20a8c5f550bf
[ "Apache-2.0" ]
4
2020-07-21T12:51:46.000Z
2022-01-22T10:29:25.000Z
google-cloud-sdk/lib/surface/sql/reschedule_maintenance.py
bopopescu/Social-Lite
ee05d6a7431c36ff582c8d6b58bb20a8c5f550bf
[ "Apache-2.0" ]
1
2020-07-25T01:40:19.000Z
2020-07-25T01:40:19.000Z
# -*- coding: utf-8 -*- # # Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Reschedule a Cloud SQL instance's maintenance.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import textwrap from googlecloudsdk.api_lib.sql import api_util from googlecloudsdk.calliope import base from googlecloudsdk.command_lib.sql import flags from googlecloudsdk.command_lib.sql import reschedule_maintenance_util @base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA, base.ReleaseTrack.ALPHA) class RescheduleMaintenance(base.Command): """Reschedule a Cloud SQL instance's maintenance.""" detailed_help = { 'DESCRIPTION': textwrap.dedent("""\ {command} reschedules a Cloud SQL instance's maintenance. """), 'EXAMPLES': textwrap.dedent("""\ To run maintenance on instance `my-instance` immediately, run: $ {command} my-instance --reschedule-type=IMMEDIATE To reschedule maintenance on instance `my-instance` to the next available window, run: $ {command} my-instance --reschedule-type=NEXT_AVAILABLE_WINDOW To reschedule maintenance on instance `my-instance` to 2019-11-07 at 4:00 am UTC, run: $ {command} my-instance --reschedule-type=SPECIFIC_TIME --schedule-time=2019-11-07T04:00Z """), } @staticmethod def Args(parser): """Args is called by calliope to gather arguments for this command. Args: parser: An argparse parser that you can use to add arguments that go on the command line after this command. Positional arguments are allowed. """ flags.AddInstanceArgument(parser) flags.AddRescheduleType(parser) flags.AddScheduleTime(parser) def Run(self, args): """Runs the command to reschedule maintenance for a Cloud SQL instance.""" client = api_util.SqlClient(api_util.API_VERSION_DEFAULT) return reschedule_maintenance_util.RunRescheduleMaintenanceCommand( args, client)
35.849315
101
0.721055
# -*- coding: utf-8 -*- # # Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Reschedule a Cloud SQL instance's maintenance.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import textwrap from googlecloudsdk.api_lib.sql import api_util from googlecloudsdk.calliope import base from googlecloudsdk.command_lib.sql import flags from googlecloudsdk.command_lib.sql import reschedule_maintenance_util @base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA, base.ReleaseTrack.ALPHA) class RescheduleMaintenance(base.Command): """Reschedule a Cloud SQL instance's maintenance.""" detailed_help = { 'DESCRIPTION': textwrap.dedent("""\ {command} reschedules a Cloud SQL instance's maintenance. """), 'EXAMPLES': textwrap.dedent("""\ To run maintenance on instance `my-instance` immediately, run: $ {command} my-instance --reschedule-type=IMMEDIATE To reschedule maintenance on instance `my-instance` to the next available window, run: $ {command} my-instance --reschedule-type=NEXT_AVAILABLE_WINDOW To reschedule maintenance on instance `my-instance` to 2019-11-07 at 4:00 am UTC, run: $ {command} my-instance --reschedule-type=SPECIFIC_TIME --schedule-time=2019-11-07T04:00Z """), } @staticmethod def Args(parser): """Args is called by calliope to gather arguments for this command. Args: parser: An argparse parser that you can use to add arguments that go on the command line after this command. Positional arguments are allowed. """ flags.AddInstanceArgument(parser) flags.AddRescheduleType(parser) flags.AddScheduleTime(parser) def Run(self, args): """Runs the command to reschedule maintenance for a Cloud SQL instance.""" client = api_util.SqlClient(api_util.API_VERSION_DEFAULT) return reschedule_maintenance_util.RunRescheduleMaintenanceCommand( args, client)
0
0
0
07a4e65171b1be0b8deff477425b769066282366
895
py
Python
2017/day16.py
di-wu/aoc-python
cb08358ad3654e9401c1bd73ef32b0f79f372215
[ "Apache-2.0" ]
null
null
null
2017/day16.py
di-wu/aoc-python
cb08358ad3654e9401c1bd73ef32b0f79f372215
[ "Apache-2.0" ]
null
null
null
2017/day16.py
di-wu/aoc-python
cb08358ad3654e9401c1bd73ef32b0f79f372215
[ "Apache-2.0" ]
null
null
null
input = open('inputs/day16.input').read().strip().split(',') positions = list("abcdefghijklmnop") print("Part one: %s" % move(1, positions[:])) print("Part two: %s" % move(1000000000, positions[:]))
28.870968
60
0.426816
input = open('inputs/day16.input').read().strip().split(',') positions = list("abcdefghijklmnop") def move(iterations, pos): sequence = [] for i in range(iterations): s = ''.join(pos) if s in sequence: return sequence[iterations % i] sequence.append(s) for i in input: if i[0] == 's': x = int(i[1:]) pos = pos[-x:] + pos[:-x] elif i[0] == 'x': x = i[1:].split('/') a, b = int(x[0]), int(x[1]) pos[a], pos[b] = pos[b], pos[a] elif i[0] == 'p': a, b = i[1:].split('/') A = pos.index(a) B = pos.index(b) pos[A], pos[B] = pos[B], pos[A] return ''.join(pos) print("Part one: %s" % move(1, positions[:])) print("Part two: %s" % move(1000000000, positions[:]))
671
0
23
c3bedd08edfecc9f7400cb88cb75e8a254fd2c64
1,423
py
Python
media/files/linearreg.py
cobyoo/PlayPay
6a80e104062965d809d290c65db82f391a53b844
[ "MIT" ]
2
2021-01-18T14:31:26.000Z
2021-01-18T15:52:54.000Z
media/files/linearreg_q0cLGi8.py
cobyoo/PlayPay
6a80e104062965d809d290c65db82f391a53b844
[ "MIT" ]
null
null
null
media/files/linearreg_q0cLGi8.py
cobyoo/PlayPay
6a80e104062965d809d290c65db82f391a53b844
[ "MIT" ]
null
null
null
import statsmodels.formula.api as smf import matplotlib.pyplot as plt import pandas as pd from sklearn.linear_model import LinearRegression from sklearn import model_selection from sklearn import metrics from mpl_toolkits.mplot3d import Axes3D data = pd.read_excel('train_data.xlsx') data2 = pd.read_excel('test_data.xlsx') dfX = pd.DataFrame(data, columns=["temp", "humid"]) dfy = pd.DataFrame(data, columns=["Gcal"]) df = pd.concat([dfX, dfy], axis=1) testx = pd.DataFrame(data2, columns=["temp", "humid"]) testy = pd.DataFrame(data2, columns=["Gcal"]) fig = plt.figure(figsize=(10,5)) ax = fig.add_subplot(111, projection='3d') cols = ["temp", "humid", "Gcal"] ax.scatter(df["temp"],df["humid"], df["Gcal"]) model = smf.ols(formula = 'Gcal ~ temp + humid', data = data) result = model.fit() #print(result.summary()) #-----변수 별 관계성 수치표현 #plt.show() # plotting function #x_train, x_test, y_train, y_test = model_selection.train_test_split(dfX, dfy, test_size=0.3) model = LinearRegression() model.fit(dfX, dfy) #print(model.coef_, model.intercept_) #y_predict = boston_model.predict(x_train) #score = metrics.r2_score(y_train, y_predict) #print(score) #1.0 y_predict = model.predict(testx) score = metrics.r2_score(testy, y_predict) print(score) #1.0 temp = input() humid = input() result = int(model.intercept_[0]) + int(model.coef_[0][0])*float(temp) + int(model.coef_[0][1])*float(humid) print(result)
28.46
108
0.719606
import statsmodels.formula.api as smf import matplotlib.pyplot as plt import pandas as pd from sklearn.linear_model import LinearRegression from sklearn import model_selection from sklearn import metrics from mpl_toolkits.mplot3d import Axes3D data = pd.read_excel('train_data.xlsx') data2 = pd.read_excel('test_data.xlsx') dfX = pd.DataFrame(data, columns=["temp", "humid"]) dfy = pd.DataFrame(data, columns=["Gcal"]) df = pd.concat([dfX, dfy], axis=1) testx = pd.DataFrame(data2, columns=["temp", "humid"]) testy = pd.DataFrame(data2, columns=["Gcal"]) fig = plt.figure(figsize=(10,5)) ax = fig.add_subplot(111, projection='3d') cols = ["temp", "humid", "Gcal"] ax.scatter(df["temp"],df["humid"], df["Gcal"]) model = smf.ols(formula = 'Gcal ~ temp + humid', data = data) result = model.fit() #print(result.summary()) #-----변수 별 관계성 수치표현 #plt.show() # plotting function #x_train, x_test, y_train, y_test = model_selection.train_test_split(dfX, dfy, test_size=0.3) model = LinearRegression() model.fit(dfX, dfy) #print(model.coef_, model.intercept_) #y_predict = boston_model.predict(x_train) #score = metrics.r2_score(y_train, y_predict) #print(score) #1.0 y_predict = model.predict(testx) score = metrics.r2_score(testy, y_predict) print(score) #1.0 temp = input() humid = input() result = int(model.intercept_[0]) + int(model.coef_[0][0])*float(temp) + int(model.coef_[0][1])*float(humid) print(result)
0
0
0
b0a1f2f042b581c99a0d2f8cfa9f921d37bf758b
2,597
py
Python
utils/misc.py
bbbbbbzhou/DuDoRNet
5e47030e373aec066f6cc7957e913e984407b1b2
[ "MIT" ]
35
2020-03-27T02:12:21.000Z
2022-03-14T17:19:22.000Z
utils/misc.py
chisyliu/DuDoRNet
5e47030e373aec066f6cc7957e913e984407b1b2
[ "MIT" ]
4
2020-06-14T12:50:14.000Z
2021-12-04T06:19:08.000Z
utils/misc.py
chisyliu/DuDoRNet
5e47030e373aec066f6cc7957e913e984407b1b2
[ "MIT" ]
11
2020-11-17T12:56:13.000Z
2022-03-19T17:45:29.000Z
__all__ = ['read_dir', 'get_last_checkpoint', 'compute_metrics', 'get_aapm_minmax', 'convert_coefficient2hu', 'convert_hu2coefficient'] import os import os.path as path import scipy.io as sio import numpy as np from tqdm import tqdm from skimage.measure import compare_ssim, compare_psnr
30.916667
83
0.630343
__all__ = ['read_dir', 'get_last_checkpoint', 'compute_metrics', 'get_aapm_minmax', 'convert_coefficient2hu', 'convert_hu2coefficient'] import os import os.path as path import scipy.io as sio import numpy as np from tqdm import tqdm from skimage.measure import compare_ssim, compare_psnr def read_dir(dir_path, predicate=None, name_only=False): if predicate in {'dir', 'file'}: predicate = { 'dir': lambda x: path.isdir(path.join(dir_path, x)), 'file':lambda x: path.isfile(path.join(dir_path, x)) }[predicate] return [f if name_only else path.join(dir_path, f) for f in os.listdir(dir_path) if (True if predicate is None else predicate(f))] def get_last_checkpoint(checkpoint_dir, predicate=None, pattern=None): if predicate is None: predicate = lambda x: x.endswith('pth') or x.endswith('pt') checkpoints = read_dir(checkpoint_dir, predicate) if len(checkpoints) == 0: return None checkpoints = sorted(checkpoints, key=lambda x: path.getmtime(x)) checkpoint = checkpoints[-1] if pattern is None: pattern = lambda x: int(path.basename(x).split('_')[-1].split('.')[0]) return checkpoint, pattern(checkpoint) def compute_metrics(lq_image, hq_image, metrics=None): psnr = compare_psnr(lq_image, hq_image, hq_image.max()) ssim = compare_ssim(lq_image, hq_image, data_range=hq_image.max()) if metrics is None: return {'psnr': [psnr], 'ssim': [ssim]} else: metrics['psnr'].append(psnr) metrics['ssim'].append(ssim) return metrics def convert_coefficient2hu(image): image = (image - 0.0192) / 0.0192 * 1000 return image def convert_hu2coefficient(image): image = image * 0.0192 / 1000 + 0.0192 return image def get_aapm_minmax(data_dir, splits=('test', 'train', 'val'), tags=('dense_view', 'sparse_view')): data_files = [] for s in splits: split_dir = path.join(data_dir, s) for d in os.listdir(split_dir): study_dir = path.join(split_dir, d) for f in os.listdir(study_dir): data_file = path.join(study_dir, f) if f.endswith('.mat'): data_files.append(data_file) val_max = -float('inf') val_min = float('inf') for f in tqdm(data_files): data = sio.loadmat(f) data = np.array([data[t] for t in tags]) if data.max() > val_max: val_max = data.max() if data.min() < val_min: val_min = data.min() return val_min, val_max
2,161
0
138
538e6766d10f927845522486ad65550a2e7d0ba7
7,236
py
Python
srunner/challenge/envs/sensor_interface.py
HumanCompatibleAI/scenario_runner
6ad946762ffc91d9cf30455045d7cc2f1de57ea1
[ "MIT" ]
2
2019-07-18T07:02:01.000Z
2022-03-03T02:47:46.000Z
srunner/challenge/envs/sensor_interface.py
HumanCompatibleAI/scenario_runner
6ad946762ffc91d9cf30455045d7cc2f1de57ea1
[ "MIT" ]
null
null
null
srunner/challenge/envs/sensor_interface.py
HumanCompatibleAI/scenario_runner
6ad946762ffc91d9cf30455045d7cc2f1de57ea1
[ "MIT" ]
1
2020-11-27T02:01:49.000Z
2020-11-27T02:01:49.000Z
import copy import logging import numpy as np import os import time from threading import Thread import carla class Speedometer(object): """ Speed pseudo sensor that gets the current speed of the vehicle. This sensor is not placed at the CARLA environment. It is only an asynchronous interface to the forward speed. """ def _get_forward_speed(self): """ Convert the vehicle transform directly to forward speed """ velocity = self._vehicle.get_velocity() transform = self._vehicle.get_transform() vel_np = np.array([velocity.x, velocity.y, velocity.z]) pitch = np.deg2rad(transform.rotation.pitch) yaw = np.deg2rad(transform.rotation.yaw) orientation = np.array([np.cos(pitch) * np.cos(yaw), np.cos(pitch) * np.sin(yaw), np.sin(pitch)]) speed = np.dot(vel_np, orientation) return speed @threaded
35.126214
105
0.615395
import copy import logging import numpy as np import os import time from threading import Thread import carla def threaded(fn): def wrapper(*args, **kwargs): thread = Thread(target=fn, args=args, kwargs=kwargs) thread.setDaemon(True) thread.start() return thread return wrapper class HDMapMeasurement(object): def __init__(self, data, frame_number): self.data = data self.frame_number = frame_number class HDMapReader(object): def __init__(self, vehicle, reading_frequency=1.0): self._vehicle = vehicle self._reading_frequency = reading_frequency self._CARLA_ROOT = os.getenv('CARLA_ROOT', "./") self._callback = None self._frame_number = 0 self._run_ps = True self.run() def __call__(self): map_name = os.path.basename(self._vehicle.get_world().get_map().name) transform = self._vehicle.get_transform() return {'map_file': "{}/HDMaps/{}.ply".format(self._CARLA_ROOT, map_name), 'transform': {'x': transform.location.x, 'y': transform.location.y, 'z': transform.location.z, 'yaw': transform.rotation.yaw, 'pitch': transform.rotation.pitch, 'roll': transform.rotation.roll} } @threaded def run(self): latest_read = time.time() while self._run_ps: if self._callback is not None: capture = time.time() if capture - latest_read > (1 / self._reading_frequency): self._callback(HDMapMeasurement(self.__call__(), self._frame_number)) self._frame_number += 1 latest_read = time.time() else: time.sleep(0.001) def listen(self, callback): # Tell that this function receives what the producer does. self._callback = callback def destroy(self): self._run_ps = False class SpeedMeasurement(object): def __init__(self, data, frame_number): self.data = data self.frame_number = frame_number class Speedometer(object): """ Speed pseudo sensor that gets the current speed of the vehicle. This sensor is not placed at the CARLA environment. It is only an asynchronous interface to the forward speed. """ def __init__(self, vehicle, reading_frequency): # The vehicle where the class reads the speed self._vehicle = vehicle # How often do you look at your speedometer in hz self._reading_frequency = reading_frequency self._callback = None # Counts the frames self._frame_number = 0 self._run_ps = True self.produce_speed() def _get_forward_speed(self): """ Convert the vehicle transform directly to forward speed """ velocity = self._vehicle.get_velocity() transform = self._vehicle.get_transform() vel_np = np.array([velocity.x, velocity.y, velocity.z]) pitch = np.deg2rad(transform.rotation.pitch) yaw = np.deg2rad(transform.rotation.yaw) orientation = np.array([np.cos(pitch) * np.cos(yaw), np.cos(pitch) * np.sin(yaw), np.sin(pitch)]) speed = np.dot(vel_np, orientation) return speed @threaded def produce_speed(self): latest_speed_read = time.time() while self._run_ps: if self._callback is not None: capture = time.time() if capture - latest_speed_read > (1 / self._reading_frequency): self._callback(SpeedMeasurement(self._get_forward_speed(), self._frame_number)) self._frame_number += 1 latest_speed_read = time.time() else: time.sleep(0.001) def listen(self, callback): # Tell that this function receives what the producer does. self._callback = callback def destroy(self): self._run_ps = False class CallBack(object): def __init__(self, tag, sensor, data_provider): self._tag = tag self._data_provider = data_provider self._data_provider.register_sensor(tag, sensor) def __call__(self, data): if isinstance(data, carla.Image): self._parse_image_cb(data, self._tag) elif isinstance(data, carla.LidarMeasurement): self._parse_lidar_cb(data, self._tag) elif isinstance(data, carla.GnssEvent): self._parse_gnss_cb(data, self._tag) elif isinstance(data, SpeedMeasurement): self._parse_speedometer(data, self._tag) elif isinstance(data, HDMapMeasurement): self._parse_hdmap(data, self._tag) else: logging.error('No callback method for this sensor.') def _parse_image_cb(self, image, tag): array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8")) array = copy.deepcopy(array) array = np.reshape(array, (image.height, image.width, 4)) array = array[:, :, :3] array = array[:, :, ::-1] self._data_provider.update_sensor(tag, array, image.frame_number) def _parse_lidar_cb(self, lidar_data, tag): points = np.frombuffer(lidar_data.raw_data, dtype=np.dtype('f4')) points = copy.deepcopy(points) points = np.reshape(points, (int(points.shape[0] / 3), 3)) self._data_provider.update_sensor(tag, points, lidar_data.frame_number) def _parse_gnss_cb(self, gnss_data, tag): array = np.array([gnss_data.latitude, gnss_data.longitude, gnss_data.altitude], dtype=np.float32) self._data_provider.update_sensor(tag, array, gnss_data.frame_number) def _parse_speedometer(self, speed, tag): self._data_provider.update_sensor(tag, speed.data, speed.frame_number) def _parse_hdmap(self, hd_package, tag): self._data_provider.update_sensor(tag, hd_package.data, hd_package.frame_number) class SensorInterface(object): def __init__(self): self._sensors_objects = {} self._data_buffers = {} self._timestamps = {} def register_sensor(self, tag, sensor): if tag in self._sensors_objects: raise ValueError("Duplicated sensor tag [{}]".format(tag)) self._sensors_objects[tag] = sensor self._data_buffers[tag] = None self._timestamps[tag] = -1 def update_sensor(self, tag, data, timestamp): if tag not in self._sensors_objects: raise ValueError("The sensor with tag [{}] has not been created!".format(tag)) self._data_buffers[tag] = data self._timestamps[tag] = timestamp def all_sensors_ready(self): for key in self._sensors_objects.keys(): if self._data_buffers[key] is None: return False return True def get_data(self): data_dict = {} for key in self._sensors_objects.keys(): data_dict[key] = (self._timestamps[key], copy.deepcopy(self._data_buffers[key])) return data_dict
5,528
184
619
c4b292dfdc7b1ee1ab1dec65306d4c3d376b6785
2,879
py
Python
api/staticdata/control_list_entries/helpers.py
django-doctor/lite-api
1ba278ba22ebcbb977dd7c31dd3701151cd036bf
[ "MIT" ]
null
null
null
api/staticdata/control_list_entries/helpers.py
django-doctor/lite-api
1ba278ba22ebcbb977dd7c31dd3701151cd036bf
[ "MIT" ]
null
null
null
api/staticdata/control_list_entries/helpers.py
django-doctor/lite-api
1ba278ba22ebcbb977dd7c31dd3701151cd036bf
[ "MIT" ]
null
null
null
from api.core.exceptions import NotFoundError from api.staticdata.control_list_entries.models import ControlListEntry def get_clc_parent_nodes(rating): """ A control list entry can be a group entry or a child of a child entry. Given a rating, this function provides the list of all parent nodes in the chain. eg., ML1 -> ML1a, ML1b, ML1c, ML1d ML1b -> ML1b1, ML1b2 Given ML1b1, it returns [ML1, ML1b] """ parent_nodes = [] try: node = ControlListEntry.objects.get(rating=rating) except ControlListEntry.DoesNotExist: node = None if node and node.parent: parent_nodes.append(node.parent.rating) next_parent = get_clc_parent_nodes(node.parent.rating) parent_nodes.extend(next_parent) return parent_nodes def get_clc_child_nodes(group_rating): """ A control list entry can have children at multiple nodes. Given a group rating, this function provides the list of all child nodes in the chain. eg., ML1 -> ML1a, ML1b, ML1c, ML1d ML1b -> ML1b1, ML1b2 Given ML1, it returns [ML1, ML1a, ML1b, ML1b1, ML1b2, ML1c, ML1d] """ child_nodes = [] try: node = ControlListEntry.objects.get(rating=group_rating) except ControlListEntry.DoesNotExist: node = None if node: if node.children.exists(): child_nodes.append(node.rating) for child in node.children.all(): next_children = get_clc_child_nodes(child.rating) child_nodes.extend(next_children) else: child_nodes.append(group_rating) return child_nodes
35.54321
115
0.686697
from api.core.exceptions import NotFoundError from api.staticdata.control_list_entries.models import ControlListEntry def get_control_list_entry(rating): try: return ControlListEntry.objects.get(rating=rating) except ControlListEntry.DoesNotExist: raise NotFoundError({"control_list_entry": f"'{rating}' - Control list entry not found"}) def convert_control_list_entries_to_tree(queryset=None): # custom queryset exists for testing purposes since it contains a number of random control codes otherwise data = queryset if queryset else ControlListEntry.objects.all().values() # Link children inside their parent object data_dict = {control_code["id"]: control_code for control_code in data} for control_code in data: # if a control code has a parent, we wish to add it to the parent's "children" if control_code["parent_id"]: parent = data_dict[control_code["parent_id"]] if "children" not in parent: parent["children"] = [] parent["children"].append(control_code) # Get a list of items which are are the ultimate parent in their tree. Ignoring any child objects in data_dict ultimate_parents_of_tree_list = [data_dict[data["id"]] for data in data_dict.values() if not data["parent_id"]] return ultimate_parents_of_tree_list def get_clc_parent_nodes(rating): """ A control list entry can be a group entry or a child of a child entry. Given a rating, this function provides the list of all parent nodes in the chain. eg., ML1 -> ML1a, ML1b, ML1c, ML1d ML1b -> ML1b1, ML1b2 Given ML1b1, it returns [ML1, ML1b] """ parent_nodes = [] try: node = ControlListEntry.objects.get(rating=rating) except ControlListEntry.DoesNotExist: node = None if node and node.parent: parent_nodes.append(node.parent.rating) next_parent = get_clc_parent_nodes(node.parent.rating) parent_nodes.extend(next_parent) return parent_nodes def get_clc_child_nodes(group_rating): """ A control list entry can have children at multiple nodes. Given a group rating, this function provides the list of all child nodes in the chain. eg., ML1 -> ML1a, ML1b, ML1c, ML1d ML1b -> ML1b1, ML1b2 Given ML1, it returns [ML1, ML1a, ML1b, ML1b1, ML1b2, ML1c, ML1d] """ child_nodes = [] try: node = ControlListEntry.objects.get(rating=group_rating) except ControlListEntry.DoesNotExist: node = None if node: if node.children.exists(): child_nodes.append(node.rating) for child in node.children.all(): next_children = get_clc_child_nodes(child.rating) child_nodes.extend(next_children) else: child_nodes.append(group_rating) return child_nodes
1,189
0
46
1e57494008d83982fbef0bcb09c70e1c20e983a5
2,934
py
Python
packtivity/kubernetes/kubedirectjobbackend.py
yadage/packtivity
e7020f549bb0933afc1f63c399ee0926d113c23c
[ "MIT" ]
1
2021-08-23T03:45:20.000Z
2021-08-23T03:45:20.000Z
packtivity/kubernetes/kubedirectjobbackend.py
lukasheinrich/packtivity
e7020f549bb0933afc1f63c399ee0926d113c23c
[ "MIT" ]
12
2019-07-23T09:13:53.000Z
2022-02-03T05:47:36.000Z
packtivity/kubernetes/kubedirectjobbackend.py
lukasheinrich/packtivity
e7020f549bb0933afc1f63c399ee0926d113c23c
[ "MIT" ]
2
2020-03-24T10:45:28.000Z
2020-05-30T05:49:34.000Z
import os import logging from .kubesubmitmixin import SubmitToKubeMixin from .kubespecmixin import KubeSpecMixin log = logging.getLogger(__name__)
34.517647
83
0.522154
import os import logging from .kubesubmitmixin import SubmitToKubeMixin from .kubespecmixin import KubeSpecMixin log = logging.getLogger(__name__) class KubernetesDirectJobBackend(SubmitToKubeMixin, KubeSpecMixin): def __init__(self, **kwargs): SubmitToKubeMixin.__init__(self, **kwargs) KubeSpecMixin.__init__(self, **kwargs) self.state_type = kwargs.get("state_type", "claim") self.collapse_state = kwargs.get("collapse_state", False) self.mount_propagation = kwargs.get("mount_propagation", "None") self.claim_name = kwargs.get("claim_name", "yadagedata") self.base = ( os.path.realpath( kwargs.get( "path_base", os.environ.get("YADAGE_K8S_PATHBASE", os.getcwd()) ) ) + "/" ) def state_mounts_and_vols(self, jobspec): container_mounts_state, volumes_state = [], [] if self.collapse_state: ctrmnt = { "name": "state", "mountPath": self.base.rstrip("/"), "mountPropagation": self.mount_propagation, } container_mounts_state.append(ctrmnt) else: for i, ro in enumerate(jobspec["state"]["readonly"]): subpath = ro["path"].replace(self.base, "") ctrmnt = { "name": "state", "mountPath": ro["path"], "subPath": subpath, "mountPropagation": self.mount_propagation, } container_mounts_state.append(ctrmnt) for i, rw in enumerate(jobspec["state"]["readwrite"]): subpath = rw["path"].replace(self.base, "") ctrmnt = { "name": "state", "mountPath": rw["path"], "subPath": subpath, "mountPropagation": self.mount_propagation, } container_mounts_state.append(ctrmnt) if self.state_type == "claim": volumes_state.append( { "name": "state", "persistentVolumeClaim": { "claimName": self.claim_name, "readOnly": False, }, } ) elif self.state_type == "hostPath": volumes_state.append( {"name": "state", "hostPath": {"path": self.claim_name}} ) else: raise RuntimeError("unknown state type %s", self.state_type) return container_mounts_state, volumes_state def proxy_data(self, job_uuid, kube_resources): jobname = "{}-job-{}".format(self.resource_prefix, job_uuid) return {"job_id": jobname, "resources": kube_resources} def config(self, job_uuid, jobspec): return [], [], []
2,609
46
130
54c4df4909919029afc5df010166e50c34ded11a
1,612
py
Python
spotty/providers/aws/config/instance_config.py
vexcel-data/spotty
f6d56129a088908a5b8dcf303867af85e3bdf325
[ "MIT" ]
null
null
null
spotty/providers/aws/config/instance_config.py
vexcel-data/spotty
f6d56129a088908a5b8dcf303867af85e3bdf325
[ "MIT" ]
null
null
null
spotty/providers/aws/config/instance_config.py
vexcel-data/spotty
f6d56129a088908a5b8dcf303867af85e3bdf325
[ "MIT" ]
null
null
null
from typing import Optional from spotty.config.abstract_instance_config import AbstractInstanceConfig from spotty.providers.aws.config.validation import validate_instance_parameters VOLUME_TYPE_EBS = 'ebs' VOLUME_TYPE_EFS = 'efs' DEFAULT_AMI_NAME = 'SpottyAMI'
24.8
87
0.675558
from typing import Optional from spotty.config.abstract_instance_config import AbstractInstanceConfig from spotty.providers.aws.config.validation import validate_instance_parameters VOLUME_TYPE_EBS = 'ebs' VOLUME_TYPE_EFS = 'efs' DEFAULT_AMI_NAME = 'SpottyAMI' class InstanceConfig(AbstractInstanceConfig): def __init__(self, config: dict): super().__init__(config) self._params = validate_instance_parameters(self._params) @property def region(self) -> str: return self._params['region'] @property def availability_zone(self) -> str: return self._params['availabilityZone'] @property def subnet_id(self) -> str: return self._params['subnetId'] @property def instance_type(self) -> str: return self._params['instanceType'] @property def on_demand(self) -> bool: return self._params['onDemandInstance'] @property def ami_name(self) -> str: return self._params['amiName'] if self._params['amiName'] else DEFAULT_AMI_NAME @property def has_ami_name(self) -> bool: return bool(self._params['amiName']) @property def ami_id(self) -> str: return self._params['amiId'] @property def root_volume_size(self) -> int: return self._params['rootVolumeSize'] @property def max_price(self) -> float: return self._params['maxPrice'] @property def managed_policy_arns(self) -> list: return self._params['managedPolicyArns'] @property def bucket(self) -> Optional[str]: return self._params['bucket']
782
543
23
8bcf5fb87a306555240cbdbc68c6a26cf55b9ed8
14,113
py
Python
django/sierra/api/models.py
unt-libraries/catalog-api
9189a6cc64305a6ead4b95ca0b56d7b7ae6c87d5
[ "BSD-3-Clause" ]
19
2016-03-16T22:08:28.000Z
2020-08-28T15:54:33.000Z
django/sierra/api/models.py
unt-libraries/catalog-api
9189a6cc64305a6ead4b95ca0b56d7b7ae6c87d5
[ "BSD-3-Clause" ]
39
2016-02-26T14:35:12.000Z
2021-03-19T22:04:20.000Z
django/sierra/api/models.py
unt-libraries/catalog-api
9189a6cc64305a6ead4b95ca0b56d7b7ae6c87d5
[ "BSD-3-Clause" ]
9
2016-02-29T22:11:56.000Z
2020-06-15T06:15:44.000Z
""" Contains models for api app. """ import hashlib import importlib import csv import re import ujson from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.db import models, transaction, IntegrityError from django.contrib.auth.models import User from django.contrib.auth import authenticate def get_permission_defaults_from_apps(): """ Return all valid permissions and their default values as a dict. Gathers permissions from apps that are configured in settings.API_PERMISSIONS. Apps that want to contribute permissions to the APIUser model must have a `permissions` module that supplies a DEFAULTS dict. """ permission_defaults = {} for app_name in settings.API_PERMISSIONS: perm = importlib.import_module('{}.permissions'.format(app_name)) permission_defaults.update(perm.DEFAULTS) return permission_defaults def remove_null_kwargs(**kwargs): """ Return a kwargs dict having items with a None value removed. """ return {k: v for k, v in kwargs.items() if v is not None} class APIUser(models.Model): """ Provides fields/features for secrets and permissions. """ user = models.OneToOneField(User) secret = models.CharField(max_length=128) permissions = models.TextField(default='{}') permission_defaults = get_permission_defaults_from_apps() objects = APIUserManager() def __init__(self, *args, **kwargs): """ When an APIUser object is initialized, the `secret` and `permissions` fields may be set via a `secret_text` and `permissions_dict` kwarg, respectively. `secret_text` is the user-readable text of the secret, which is encoded to produce the `secret` field value. `permissions_dict` is a Python dict w/permissions to override the defaults. """ pdict = ujson.decode(kwargs.pop('permissions', '{}')) or {} pdict.update(kwargs.pop('permissions_dict', {}) or {}) secret_text = kwargs.pop('secret_text', None) super(APIUser, self).__init__(*args, **kwargs) self.update_permissions(pdict) if not self.secret and secret_text is not None: self.secret = self.encode_secret(secret_text) def save(self, *args, **kwargs): """ An APIUser MUST have a `secret` and a `user` relation, and the user must have a non-blank username, before the APIUser obj is saved. Otherwise, an APIUserException is raised. """ msg = '' if not self.secret: msg = 'APIUser obj cannot be saved without a `secret`.' else: try: self.user except User.DoesNotExist: msg = 'APIUser obj cannot be saved without a related user.' else: if not self.user.username: msg = ('APIUser obj cannot be save if the related user ' 'has a blank `username`.') if msg: raise APIUserException(msg) super(APIUser, self).save(*args, **kwargs) @transaction.atomic def update_and_save(self, secret_text=None, permissions_dict=None, password=None, email=None, first_name=None, last_name=None): """ Update AND SAVE an existing APIUser with any or all new values. For any of `email`, `password`, `first_name`, and `last_name`, the related User object is updated with the appropriate value(s). """ kwargs = remove_null_kwargs(password=password, email=email, first_name=first_name, last_name=last_name) for field, value in kwargs.items(): if field == 'password': self.user.set_password(value) else: setattr(self.user, field, value) if secret_text is not None: self.secret = self.encode_secret(secret_text) if permissions_dict: self.update_permissions(permissions_dict) self.user.save() self.save() return self def update_permissions(self, permissions_dict): """ Update certain permissions' values via a `permissions_dict`. The passed `permissions_dict` is a dictionary with key-value pairs that set particular permissions (keys) to specific bool values. Names for the permissions that are set MUST exist in cls.permission_defaults. An APIUserException is raised if an unexpected permission is encountered. Returns a dictionary of all user permissions after the update. """ permissions = type(self).permission_defaults.copy() permissions.update(ujson.decode(self.permissions)) for pname, pvalue in permissions_dict.items(): if not isinstance(pvalue, bool): msg = ('Permission values must be set to a boolean True or ' 'False. "{}" is not valid.').format(pvalue) raise APIUserException(msg) if pname in permissions: permissions[pname] = pvalue else: msg = 'Permission `{}` is not valid.'.format(pname) raise APIUserException(msg) self.permissions = ujson.encode(permissions) return permissions def set_permissions_to_value(self, permissions, value): """ Set certain permissions to the given `value` for this APIUser. `permissions` is a list of permissions to set to the supplied boolean value. Returns a dictionary of all user permissions after the update. """ return self.update_permissions({pname: value for pname in permissions}) def set_all_permissions_to_value(self, value): """ Set ALL permissions for this APIUser to the given value. Returns a dictionary of all user permissions after the update. """ permissions = self.permission_defaults.keys() return self.set_permissions_to_value(permissions, value) @staticmethod def encode_secret(secret, hash_type='sha256'): """ Make and return a `secret` string for this APIUser. `hash_type` should be a string representing the hashing algorithm to use: md5, sha1, sha224, sha256, sha384, sha512. """ valid_hashes = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash_type not in valid_hashes: raise APIUserException('Provided hash_type argument must be one ' 'of: {}'.format(', '.join(valid_hashes))) hasher = getattr(hashlib, hash_type)(secret) return hasher.hexdigest()
39.867232
79
0.61128
""" Contains models for api app. """ import hashlib import importlib import csv import re import ujson from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.db import models, transaction, IntegrityError from django.contrib.auth.models import User from django.contrib.auth import authenticate class APIUserException(Exception): pass class UserExists(APIUserException): pass def get_permission_defaults_from_apps(): """ Return all valid permissions and their default values as a dict. Gathers permissions from apps that are configured in settings.API_PERMISSIONS. Apps that want to contribute permissions to the APIUser model must have a `permissions` module that supplies a DEFAULTS dict. """ permission_defaults = {} for app_name in settings.API_PERMISSIONS: perm = importlib.import_module('{}.permissions'.format(app_name)) permission_defaults.update(perm.DEFAULTS) return permission_defaults def remove_null_kwargs(**kwargs): """ Return a kwargs dict having items with a None value removed. """ return {k: v for k, v in kwargs.items() if v is not None} class APIUserManager(models.Manager): @staticmethod def _set_new_user_password(user, pw): un = user.username if pw is None: msg = ('APIUser for {} not created: Django user not found and not ' 'created. (You need to provide a password!)'.format(un)) raise APIUserException(msg) user.set_password(pw) user.save() @staticmethod def _existing_password_is_okay(user, pw): un = user.username return (pw is None) or (bool(authenticate(username=un, password=pw))) @staticmethod def _apiuser_already_exists(user): try: api_user = user.apiuser except ObjectDoesNotExist: return False return True @transaction.atomic def create_user(self, username, secret_text, permissions_dict=None, password=None, email=None, first_name=None, last_name=None): """ Create, save, and return a new APIUser object. If no Django user with the given username exists, it is created along with the APIUser, using the provided password, email, first_name, and last_name. (In this case at least a password must be provided. The other fields are optional.) If a Django user with the given username already exists but has no related APIUser, the APIUser is created and related to the existing user. In this case, if a password, email, first_name, or last_name are provided, then those are also matched when fetching the user. Any parameters you provide that are different than the ones in the database cause an error. If a Django user with the given username AND an associated APIUser already exist, a `UserExists` error is raised. """ kwargs = remove_null_kwargs(email=email, first_name=first_name, last_name=last_name) user_model = self.model._meta.get_field('user').related_model try: user, created = user_model.objects.get_or_create(username=username, **kwargs) if created: self._set_new_user_password(user, password) else: if not self._existing_password_is_okay(user, password): raise IntegrityError(1062, 'Attempted to create' 'duplicate user') elif self._apiuser_already_exists(user): msg = ('Could not create APIUser for Django user {}. ' 'APIUser already exists.'.format(username)) raise UserExists(msg) except IntegrityError as (ie_num, detail): if ie_num == 1062: detail = ('Existing Django user found, but it may not be ' 'the correct one. Its details do not match the ones ' 'supplied.') msg = ('Could not create APIUser for Django user {}. {}' ''.format(username, detail)) raise APIUserException(msg) api_user = self.model(user=user, secret_text=secret_text, permissions_dict=permissions_dict) api_user.save() return api_user def batch_import_users(self, user_records): """ Create and/or update a list of APIUsers in one batch operation. APIUsers are created (if they do not already exist) or updated (if they already exist) based on the provided `user_records` arg. Returns a tuple: (created, updated, errors), where: `errors` is a list of (exception, record) tuples, `created` is a list of APIUser objs that were created, and `updated` is a list of APIUser objs that were updated. The `user_records` arg should be a list of dictionaries, where each dict contains a `username` plus optional `secret_text`, `permissions_dict`, `password`, `email`, `first_name`, and `last_name` elements. A username is of course required, and a secret and password are required if the APIUser is being created; otherwise, elements that are either set to None or not included at all are not set or changed. E.g., if you wanted to update secrets for a list of existing APIUsers, you could provide ONLY the `username` and `secret_text` for each. A dict with one or more elements NOT in the ones listed above (and not a valid permission key) is an error: the element may be a misnamed field or an extra field. """ created, updated, errors = [], [], [] kwarg_names = ('permissions_dict', 'email', 'first_name', 'last_name', 'password') valid_fields = set(('secret_text', 'username') + kwarg_names) for i, record in enumerate(user_records): secret_text = record.get('secret_text', None) kwargs = {k: record.get(k, None) for k in kwarg_names} username = record.get('username', None) unknown_fields = tuple(set(record.keys()) - valid_fields) try: if unknown_fields: msg = 'Unknown fields in record: {}'.format(unknown_fields) raise(APIUserException(msg)) try: au = self.get(user__username=username) except ObjectDoesNotExist: au = self.create_user(username, secret_text, **kwargs) created.append(au) else: au.update_and_save(secret_text, **kwargs) updated.append(au) except APIUserException as e: errors.append((i+1, record, e)) return (created, updated, errors) def table_to_batch(self, table): """ Convert a list of data rows to an APIUser import batch. Pass an iterable object containing rows of tabular APIUser data fields and get a list of dictionaries (a "batch") suitable for passing into the `batch_import_users` method. The first row of the iterable object should contain column names. Columns should be named so that they can be converted directly to dict keys; permissions should use the permission key as the column name and a string ('true' or 'false') as the value. """ def _str_to_bool(string): return False if re.match(r'([Ff]|0+$|$)', string) else True permission_columns = self.model.permission_defaults.keys() rows = (r for r in table) colnames = rows.next() user_records = [] for row in rows: rec = {col: row[i] for i, col in enumerate(colnames)} perm_dict = {c: _str_to_bool(rec.pop(c)) for c in permission_columns if c in rec} if perm_dict: rec['permissions_dict'] = perm_dict user_records.append(rec) return user_records class APIUser(models.Model): """ Provides fields/features for secrets and permissions. """ user = models.OneToOneField(User) secret = models.CharField(max_length=128) permissions = models.TextField(default='{}') permission_defaults = get_permission_defaults_from_apps() objects = APIUserManager() def __init__(self, *args, **kwargs): """ When an APIUser object is initialized, the `secret` and `permissions` fields may be set via a `secret_text` and `permissions_dict` kwarg, respectively. `secret_text` is the user-readable text of the secret, which is encoded to produce the `secret` field value. `permissions_dict` is a Python dict w/permissions to override the defaults. """ pdict = ujson.decode(kwargs.pop('permissions', '{}')) or {} pdict.update(kwargs.pop('permissions_dict', {}) or {}) secret_text = kwargs.pop('secret_text', None) super(APIUser, self).__init__(*args, **kwargs) self.update_permissions(pdict) if not self.secret and secret_text is not None: self.secret = self.encode_secret(secret_text) def save(self, *args, **kwargs): """ An APIUser MUST have a `secret` and a `user` relation, and the user must have a non-blank username, before the APIUser obj is saved. Otherwise, an APIUserException is raised. """ msg = '' if not self.secret: msg = 'APIUser obj cannot be saved without a `secret`.' else: try: self.user except User.DoesNotExist: msg = 'APIUser obj cannot be saved without a related user.' else: if not self.user.username: msg = ('APIUser obj cannot be save if the related user ' 'has a blank `username`.') if msg: raise APIUserException(msg) super(APIUser, self).save(*args, **kwargs) @transaction.atomic def update_and_save(self, secret_text=None, permissions_dict=None, password=None, email=None, first_name=None, last_name=None): """ Update AND SAVE an existing APIUser with any or all new values. For any of `email`, `password`, `first_name`, and `last_name`, the related User object is updated with the appropriate value(s). """ kwargs = remove_null_kwargs(password=password, email=email, first_name=first_name, last_name=last_name) for field, value in kwargs.items(): if field == 'password': self.user.set_password(value) else: setattr(self.user, field, value) if secret_text is not None: self.secret = self.encode_secret(secret_text) if permissions_dict: self.update_permissions(permissions_dict) self.user.save() self.save() return self def update_permissions(self, permissions_dict): """ Update certain permissions' values via a `permissions_dict`. The passed `permissions_dict` is a dictionary with key-value pairs that set particular permissions (keys) to specific bool values. Names for the permissions that are set MUST exist in cls.permission_defaults. An APIUserException is raised if an unexpected permission is encountered. Returns a dictionary of all user permissions after the update. """ permissions = type(self).permission_defaults.copy() permissions.update(ujson.decode(self.permissions)) for pname, pvalue in permissions_dict.items(): if not isinstance(pvalue, bool): msg = ('Permission values must be set to a boolean True or ' 'False. "{}" is not valid.').format(pvalue) raise APIUserException(msg) if pname in permissions: permissions[pname] = pvalue else: msg = 'Permission `{}` is not valid.'.format(pname) raise APIUserException(msg) self.permissions = ujson.encode(permissions) return permissions def set_permissions_to_value(self, permissions, value): """ Set certain permissions to the given `value` for this APIUser. `permissions` is a list of permissions to set to the supplied boolean value. Returns a dictionary of all user permissions after the update. """ return self.update_permissions({pname: value for pname in permissions}) def set_all_permissions_to_value(self, value): """ Set ALL permissions for this APIUser to the given value. Returns a dictionary of all user permissions after the update. """ permissions = self.permission_defaults.keys() return self.set_permissions_to_value(permissions, value) @staticmethod def encode_secret(secret, hash_type='sha256'): """ Make and return a `secret` string for this APIUser. `hash_type` should be a string representing the hashing algorithm to use: md5, sha1, sha224, sha256, sha384, sha512. """ valid_hashes = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash_type not in valid_hashes: raise APIUserException('Provided hash_type argument must be one ' 'of: {}'.format(', '.join(valid_hashes))) hasher = getattr(hashlib, hash_type)(secret) return hasher.hexdigest() class Meta: app_label = 'api'
655
6,552
96
088bf1e3b31b8ef5be18cdddd2324a6bd339deae
297
py
Python
src/validators.py
catzilla-007/Gengo-SSE-Coding-Test
a4eb0a49e09ed6d243847242e9fa0097908e650a
[ "MIT" ]
null
null
null
src/validators.py
catzilla-007/Gengo-SSE-Coding-Test
a4eb0a49e09ed6d243847242e9fa0097908e650a
[ "MIT" ]
null
null
null
src/validators.py
catzilla-007/Gengo-SSE-Coding-Test
a4eb0a49e09ed6d243847242e9fa0097908e650a
[ "MIT" ]
null
null
null
from .errors import InvalidInputError
33
67
0.73064
from .errors import InvalidInputError def validate_palindrome_character(palindrome: str) -> None: if not isinstance(palindrome, str): raise InvalidInputError('Input should be a string') if len(palindrome) <= 0: raise InvalidInputError('Characters should be one or more')
235
0
23
19a62c19b001126691f6e415ed9dc44c7a8d1b31
957
py
Python
n_gram.py
Qin-Folks/vgae_pytorch
392dc3a8c2a5d47cbcf6c5571b5878dc4eae56b1
[ "MIT" ]
1
2021-07-01T17:34:20.000Z
2021-07-01T17:34:20.000Z
n_gram.py
Qin-Folks/vgae_pytorch
392dc3a8c2a5d47cbcf6c5571b5878dc4eae56b1
[ "MIT" ]
null
null
null
n_gram.py
Qin-Folks/vgae_pytorch
392dc3a8c2a5d47cbcf6c5571b5878dc4eae56b1
[ "MIT" ]
null
null
null
import torch
28.147059
74
0.601881
import torch def get_gram_graph_embedding(node_attrs, adj, is_soft): node_num = node_attrs.shape[0] assert node_num <= adj.shape[0] if is_soft: adj = adj / 0.5 adj_hard = (adj > 0.5).int() adj = ((adj_hard - adj).detach() + adj)[:node_num, :node_num] adj = adj[:node_num, :node_num] walk = node_attrs v1 = torch.sum(walk, dim=1) walk = torch.mm(adj, walk) * node_attrs v2 = torch.sum(walk, dim=1) walk = torch.mm(adj, walk) * node_attrs v3 = torch.sum(walk, dim=1) # walk = torch.mm(adj, walk) * node_attrs # v4 = torch.sum(walk, dim=1) # # walk = torch.mm(adj, walk) * node_attrs # v5 = torch.sum(walk, dim=1) # # walk = torch.mm(adj, walk) * node_attrs # v6 = torch.sum(walk, dim=1) # embedded_graph_matrix = torch.stack([v1, v2, v3, v4, v5, v6], dim=1) embedded_graph_matrix = torch.stack([v1, v2, v3], dim=1) return embedded_graph_matrix
921
0
23
f9d31a78e22b20491ae1cf00aa085332a44b4b14
1,050
py
Python
src/scraping/articlescraper/spiders/fd-sitemap.py
NZR/PublicDiscourseMiner-COVID
e0246852db65eb9fd5058dfb0040e7d02c503931
[ "MIT" ]
null
null
null
src/scraping/articlescraper/spiders/fd-sitemap.py
NZR/PublicDiscourseMiner-COVID
e0246852db65eb9fd5058dfb0040e7d02c503931
[ "MIT" ]
null
null
null
src/scraping/articlescraper/spiders/fd-sitemap.py
NZR/PublicDiscourseMiner-COVID
e0246852db65eb9fd5058dfb0040e7d02c503931
[ "MIT" ]
1
2021-08-03T12:04:33.000Z
2021-08-03T12:04:33.000Z
from time import sleep import scrapy from scrapy import Request # usage: `scrapy crawl fd-sitemap-css -o ./sitemapURLs/fdlinks.json`
40.384615
160
0.621905
from time import sleep import scrapy from scrapy import Request # usage: `scrapy crawl fd-sitemap-css -o ./sitemapURLs/fdlinks.json` class FdSitemapSpider(scrapy.Spider): name = "fd-sitemap-css" start_urls = [] for i in range(1, 48): # Number of pages with tag "Coronavirus" start_urls.append('https://fd.nl/search?customPeriod.start=2020-04-01&customPeriod.end=2020-12-13&period=custom-period&tags=Coronavirus&page=' + str(i)) def start_requests(self): for url in self.start_urls: yield Request(url, cookies={'FDSSO': 'coockie_value', "gdpr-dau": "true"}) # Add your FD cookie here def parse(self, response): # print(response.text) sleep(0.5) for article in response.css(".fd-horizontal-card-3"): yield { 'url': article.css("a::attr(href)").extract_first() } for article in response.css(".fd-horizontal-card-3 long-read"): yield { 'url': article.css("a::attr(href)").extract_first() }
547
348
22
92dbcfabdb0f0af2b55ea72ec3c7023368b9e376
1,772
py
Python
src/waldur_rancher/migrations/0019_settings_non_null.py
geant-multicloud/MCMS-mastermind
81333180f5e56a0bc88d7dad448505448e01f24e
[ "MIT" ]
26
2017-10-18T13:49:58.000Z
2021-09-19T04:44:09.000Z
src/waldur_rancher/migrations/0019_settings_non_null.py
geant-multicloud/MCMS-mastermind
81333180f5e56a0bc88d7dad448505448e01f24e
[ "MIT" ]
14
2018-12-10T14:14:51.000Z
2021-06-07T10:33:39.000Z
src/waldur_rancher/migrations/0019_settings_non_null.py
geant-multicloud/MCMS-mastermind
81333180f5e56a0bc88d7dad448505448e01f24e
[ "MIT" ]
32
2017-09-24T03:10:45.000Z
2021-10-16T16:41:09.000Z
# Generated by Django 2.2.10 on 2020-03-19 15:29 import django.db.models.deletion from django.db import migrations, models
29.533333
60
0.517494
# Generated by Django 2.2.10 on 2020-03-19 15:29 import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('waldur_rancher', '0018_template_icon'), ] operations = [ migrations.AlterField( model_name='catalog', name='settings', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='+', to='structure.ServiceSettings', ), ), migrations.AlterField( model_name='cluster', name='settings', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='+', to='structure.ServiceSettings', ), ), migrations.AlterField( model_name='namespace', name='settings', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='+', to='structure.ServiceSettings', ), ), migrations.AlterField( model_name='project', name='settings', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='+', to='structure.ServiceSettings', ), ), migrations.AlterField( model_name='template', name='settings', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='+', to='structure.ServiceSettings', ), ), ]
0
1,624
23
20d5a6a14d18d0266795a549eef19fc38dfca6eb
355
py
Python
catops/parser.py
BBOXX/CatOps
c8d8fcc8d5d8d809f75f5663265dd213260484a7
[ "MIT" ]
2
2018-07-12T15:11:59.000Z
2018-07-14T19:10:58.000Z
catops/parser.py
BBOXX/CatOps
c8d8fcc8d5d8d809f75f5663265dd213260484a7
[ "MIT" ]
3
2018-08-10T15:54:02.000Z
2018-08-31T09:45:43.000Z
catops/parser.py
BBOXX/CatOps
c8d8fcc8d5d8d809f75f5663265dd213260484a7
[ "MIT" ]
null
null
null
"""Custom parser which raises an error instead of exiting.""" import argparse class ArgumentParserError(Exception): """Error raised by ArgumentParser""" pass class CatParser(argparse.ArgumentParser): """Overrides error method to throw an error instead of exiting"""
25.357143
69
0.732394
"""Custom parser which raises an error instead of exiting.""" import argparse class ArgumentParserError(Exception): """Error raised by ArgumentParser""" pass class CatParser(argparse.ArgumentParser): """Overrides error method to throw an error instead of exiting""" def error(self, message): raise ArgumentParserError(message)
47
0
26
6ff8fd27d66086bf4c8e1ed8affbd6442d82cf21
59
py
Python
md_click/__init__.py
shalgrim/md-click
fdf4348c83143e157047a0d53a63d79db51ac0cb
[ "BSD-3-Clause" ]
3
2021-03-15T18:50:16.000Z
2022-01-17T11:43:31.000Z
md_click/__init__.py
shalgrim/md-click
fdf4348c83143e157047a0d53a63d79db51ac0cb
[ "BSD-3-Clause" ]
1
2022-03-31T07:52:18.000Z
2022-03-31T07:52:18.000Z
md_click/__init__.py
shalgrim/md-click
fdf4348c83143e157047a0d53a63d79db51ac0cb
[ "BSD-3-Clause" ]
1
2021-06-21T21:45:28.000Z
2021-06-21T21:45:28.000Z
from .main import cli __version__ = 1.0 __all__ = ['cli']
11.8
21
0.677966
from .main import cli __version__ = 1.0 __all__ = ['cli']
0
0
0
1c8ebe3b0e41f7a4b51bbb4e0dac81d2f5bca3f1
648
py
Python
leetcode/475.py
windniw/just-for-fun
54e5c2be145f3848811bfd127f6a89545e921570
[ "Apache-2.0" ]
1
2019-08-28T23:15:25.000Z
2019-08-28T23:15:25.000Z
leetcode/475.py
windniw/just-for-fun
54e5c2be145f3848811bfd127f6a89545e921570
[ "Apache-2.0" ]
null
null
null
leetcode/475.py
windniw/just-for-fun
54e5c2be145f3848811bfd127f6a89545e921570
[ "Apache-2.0" ]
null
null
null
""" link: https://leetcode-cn.com/problems/heaters problem: 给定数轴上若干A类型的点,再给B类型的点,问以所有B为圆心画圆,最小半径是多少可以覆盖所有的A solution: 二分。所有A点由其最近的B负责覆盖,结果为此集合中的最大值。 """
27
71
0.544753
""" link: https://leetcode-cn.com/problems/heaters problem: 给定数轴上若干A类型的点,再给B类型的点,问以所有B为圆心画圆,最小半径是多少可以覆盖所有的A solution: 二分。所有A点由其最近的B负责覆盖,结果为此集合中的最大值。 """ class Solution: def findRadius(self, houses: List[int], heaters: List[int]) -> int: houses.sort() heaters.sort() res = 0 for house in houses: p = bisect.bisect_left(heaters, house) a, b = float("inf"), float("inf") if p != len(heaters): a = max(heaters[p] - house, res) if p != 0: b = max(house - heaters[p - 1], res) res = max(res, min(a, b)) return res
449
-6
48