hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0b729c6af7d440093ab2706bff962e7602e418a9
| 1,327
|
py
|
Python
|
integration/phore/tests/shardsynctest.py
|
phoreproject/synapse
|
77d10ca2eb7828ca9f7c8e29b72a73cf2c07f954
|
[
"MIT"
] | 9
|
2018-09-30T18:56:26.000Z
|
2019-10-30T23:09:07.000Z
|
integration/phore/tests/shardsynctest.py
|
phoreproject/synapse
|
77d10ca2eb7828ca9f7c8e29b72a73cf2c07f954
|
[
"MIT"
] | 102
|
2018-11-09T16:17:59.000Z
|
2020-11-04T19:06:01.000Z
|
integration/phore/tests/shardsynctest.py
|
phoreproject/graphene
|
77d10ca2eb7828ca9f7c8e29b72a73cf2c07f954
|
[
"MIT"
] | 5
|
2018-11-05T14:29:24.000Z
|
2020-06-08T19:26:05.000Z
|
import logging
from phore.framework import tester, validatornode, shardnode
from phore.pb import common_pb2
class ShardSyncTest(tester.Tester):
def __init__(self):
logging.info(logging.INFO)
super().__init__()
def _do_run(self):
beacon_nodes = [self.create_beacon_node() for _ in range(1)]
beacon_nodes[0].start()
beacon_nodes[0].wait_for_rpc()
shard_node_configs = [shardnode.ShardConfig.from_beacon(beacon_nodes[0]) for _ in range(2)]
shard_nodes = []
for c in shard_node_configs:
c.initial_shards = ['1']
shard_nodes.append(self.create_shard_node(c))
shard_nodes[0].start()
shard_nodes[0].wait_for_rpc()
shard_nodes[1].start()
shard_nodes[1].wait_for_rpc()
validator_node = self.create_validator_node(
validatornode.ValidatorConfig.from_beacon_and_shard(beacon_nodes[0], shard_nodes[0], "0-255")
)
validator_node.start()
validator_node.wait_for_rpc()
shard_nodes[0].wait_for_slot(4, 1)
shard_node_0_addr = shard_nodes[0].get_listening_addresses().Addresses[0]
shard_nodes[1].connect(common_pb2.ConnectMessage(Address=shard_node_0_addr))
shard_nodes[1].wait_for_slot(8, 1)
ex = ShardSyncTest()
ex.run()
| 26.54
| 105
| 0.667671
| 1,182
| 0.890731
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 0.007536
|
0b72b6b59c7098297806590340d0f99c8c866547
| 426
|
py
|
Python
|
chartconvert/mpp.py
|
e-sailing/avnav
|
b3e8df4d6fa122b05309eee09197c716e29b64ec
|
[
"MIT"
] | null | null | null |
chartconvert/mpp.py
|
e-sailing/avnav
|
b3e8df4d6fa122b05309eee09197c716e29b64ec
|
[
"MIT"
] | null | null | null |
chartconvert/mpp.py
|
e-sailing/avnav
|
b3e8df4d6fa122b05309eee09197c716e29b64ec
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
#
# vim: ts=2 sw=2 et
#
import sys
#from wx.py.crust import Display
inchpm=39.3700
dpi=100
if len(sys.argv) >1:
dpi=int(sys.argv[1])
displaympp=1/(float(dpi)*inchpm)
print "display mpp=%f"%(displaympp)
mpp= 20037508.342789244 * 2 / 256
print "Level : mpp \t\t: scale"
for i in range(0,31):
scale=mpp/displaympp
print "level(%02d):%07.4f:\t\t1:%5.2f"%(i,mpp,scale)
mpp=mpp/2
| 16.384615
| 54
| 0.638498
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 157
| 0.368545
|
0b7345be2719bd8b7fdccdbf2d4ec3d3cec346b7
| 314
|
py
|
Python
|
Lower_Upper_Counter/Lower_Upper_Counter.py
|
GracjanBuczek/Python
|
655801ae58ed7ef21f7da2f69f649c556b20aaee
|
[
"MIT"
] | null | null | null |
Lower_Upper_Counter/Lower_Upper_Counter.py
|
GracjanBuczek/Python
|
655801ae58ed7ef21f7da2f69f649c556b20aaee
|
[
"MIT"
] | null | null | null |
Lower_Upper_Counter/Lower_Upper_Counter.py
|
GracjanBuczek/Python
|
655801ae58ed7ef21f7da2f69f649c556b20aaee
|
[
"MIT"
] | null | null | null |
x = input("Enter sentence: ")
count={"Uppercase":0, "Lowercase":0}
for i in x:
if i.isupper():
count["Uppercase"]+=1
elif i.islower():
count["Lowercase"]+=1
else:
pass
print ("There is:", count["Uppercase"], "uppercases.")
print ("There is:", count["Lowercase"], "lowercases.")
| 26.166667
| 54
| 0.582803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 132
| 0.420382
|
0b740ea892a08bb96379c733e82f7e4324d439a4
| 684
|
py
|
Python
|
examples/driving_in_traffic/scenarios/loop/scenario.py
|
zbzhu99/SMARTS
|
652aa23e71bd4e2732e2742140cfcd0ec082a7da
|
[
"MIT"
] | 2
|
2021-12-13T12:41:54.000Z
|
2021-12-16T03:10:24.000Z
|
examples/driving_in_traffic/scenarios/loop/scenario.py
|
zbzhu99/SMARTS
|
652aa23e71bd4e2732e2742140cfcd0ec082a7da
|
[
"MIT"
] | null | null | null |
examples/driving_in_traffic/scenarios/loop/scenario.py
|
zbzhu99/SMARTS
|
652aa23e71bd4e2732e2742140cfcd0ec082a7da
|
[
"MIT"
] | null | null | null |
from pathlib import Path
from smarts.sstudio import gen_scenario
from smarts.sstudio import types as t
traffic = t.Traffic(
flows=[
t.Flow(
route=t.RandomRoute(),
rate=60 * 60,
actors={t.TrafficActor(name="car", vehicle_type=vehicle_type): 1},
)
for vehicle_type in [
"passenger",
"bus",
"coach",
"truck",
"trailer",
"passenger",
"bus",
"coach",
"truck",
"trailer",
]
]
)
gen_scenario(
t.Scenario(
traffic={"basic": traffic},
),
output_dir=Path(__file__).parent,
)
| 20.117647
| 78
| 0.483918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 90
| 0.131579
|
0b74a2d6dbfc76ec355ef8ff8e62599cfa40e389
| 13,196
|
py
|
Python
|
asyncorm/models/models.py
|
kejkz/asyncorm
|
6342e2d5fbaa22fb368aead772ac4f255df7562a
|
[
"Apache-2.0"
] | 1
|
2017-02-27T05:37:39.000Z
|
2017-02-27T05:37:39.000Z
|
asyncorm/models/models.py
|
kejkz/asyncorm
|
6342e2d5fbaa22fb368aead772ac4f255df7562a
|
[
"Apache-2.0"
] | null | null | null |
asyncorm/models/models.py
|
kejkz/asyncorm
|
6342e2d5fbaa22fb368aead772ac4f255df7562a
|
[
"Apache-2.0"
] | null | null | null |
import inspect
import os
from collections import Callable
from asyncorm.application.configure import get_model
from asyncorm.exceptions import AsyncOrmFieldError, AsyncOrmModelDoesNotExist, AsyncOrmModelError
from asyncorm.manager import ModelManager
from asyncorm.models.fields import AutoField, Field, ForeignKey, ManyToManyField
from asyncorm.serializers import ModelSerializer, SerializerMethod
__all__ = ["Model", "ModelSerializer", "SerializerMethod"]
class empty:
pass
class ModelMeta(type):
def __new__(cls, clsname, bases, clsdict):
base_class = super().__new__(cls, clsname, bases, clsdict)
base_class.objects = type("{}Manager".format(base_class.__name__), (ModelManager,), {"model": base_class})(
base_class
)
# Meta manage
defined_meta = clsdict.pop("Meta", None)
base_class.ordering = None
base_class.unique_together = []
base_class.table_name = ""
base_class.DoesNotExist = AsyncOrmModelDoesNotExist
base_class.meta_items = ("ordering", "unique_together", "table_name")
if defined_meta:
if hasattr(defined_meta, "ordering"):
base_class.ordering = getattr(defined_meta, "ordering")
if hasattr(defined_meta, "unique_together"):
base_class.unique_together = getattr(defined_meta, "unique_together")
if hasattr(defined_meta, "table_name"):
base_class.table_name = getattr(defined_meta, "table_name")
base_class.fields = base_class.get_fields()
primary_keys = [f for f in base_class.fields.values() if isinstance(f, AutoField)]
if not primary_keys:
base_class.id = AutoField()
base_class.fields["id"] = base_class.id
base_class.db_pk = "id"
base_class.orm_pk = "id"
elif len(primary_keys) == 1:
base_class.db_pk = primary_keys[0].db_column
base_class.orm_pk = primary_keys[0].orm_field_name
for f in base_class.fields.values():
if hasattr(f, "choices"):
if f.choices:
setattr(base_class, "{}_display".format(f.orm_field_name), "choices_placeholder")
return base_class
class BaseModel(object, metaclass=ModelMeta):
table_name = ""
objects = None
deleted = False
field_requirements = []
def __init__(self, **kwargs):
self.dir_name = os.path.dirname(inspect.getmodule(self).__file__)
self.app_name = self.dir_name.split(os.path.sep)[-1]
self.table_name = ""
self.objects.model = self.__class__
manager = getattr(self, "objects")
manager.model = self.__class__
# resolve method for posible display methods
for k, v in self.__class__.__dict__.items():
if v == "choices_placeholder":
field_name = k.split("_display")[0]
field = getattr(self.__class__, field_name)
def new_func(field=field, field_name=field_name):
value = getattr(self, field_name)
for a, b in field.choices.items():
if a == value:
return b
setattr(self, k, new_func)
self.validate_kwargs(kwargs)
for field_name in self.fields.keys():
f_cls = getattr(self.__class__, field_name)
if field_name in kwargs:
setattr(self, field_name, kwargs[field_name])
elif hasattr(f_cls, "default"):
d_value = f_cls.default
setattr(self, field_name, d_value() if isinstance(d_value, Callable) else d_value)
@classmethod
def cls_tablename(cls):
return cls.table_name or cls.__name__
@classmethod
def set_reverse_foreignkey(cls, model_name, field_name):
def fk_set(self):
model = get_model(model_name)
return model.objects.filter(**{field_name: getattr(self, self.orm_pk)})
setattr(cls, "{}_set".format(model_name.lower()), fk_set)
@classmethod
def set_many2many(cls, field, table_name, my_column, other_column, direct=False):
other_model = get_model(other_column)
queryset = ModelManager(other_model, field=field)
queryset.set_orm(cls.objects.orm)
def m2m_set(self):
queryset.query = [
{
"action": "_db__select_m2m",
"select": "*",
"m2m_tablename": table_name,
"other_tablename": other_column,
"otherdb_pk": other_model.db_pk,
"id_data": "{}={}".format(my_column, getattr(self, self.orm_pk)),
}
]
return queryset
method_name = direct and field.field_name or "{}_set".format(other_column.lower())
setattr(cls, method_name, m2m_set)
@classmethod
def set_orm(cls, orm):
cls.objects.set_orm(orm)
@property
def data(self):
d = {}
created = bool(self.orm_pk)
for orm, db in self.__class__.attr_names.items():
class__orm = getattr(self.__class__, orm)
self__orm = getattr(self, orm)
if self__orm is class__orm:
continue
has_pk = self.orm_pk == orm
many2many = isinstance(class__orm, ManyToManyField)
if not has_pk and not many2many:
d[db] = self__orm
is_default = self__orm == getattr(class__orm, "default", empty)
# if value equal to default we set him with insert,
# else we should always represent him
if not created and is_default:
d.pop(db)
return d
@property
def m2m_data(self):
d = {}
for orm, db in self.__class__.attr_names.items():
class__orm = getattr(self.__class__, orm)
if isinstance(class__orm, ManyToManyField):
self__orm = getattr(self, orm)
d[db] = self__orm
default = self__orm == class__orm.default
if bool(self.orm_pk) and default:
d.pop(db)
return d
@classmethod
def orm_attr_names(cls):
return {v: k for k, v in cls.attr_names.items()}
@classmethod
def get_fields(cls):
fields = {}
cls.attr_names = {}
for f_n, field in cls.__dict__.items():
if isinstance(field, Field):
field.orm_field_name = f_n
if not field.db_column:
field.set_field_name(f_n)
if not field.table_name:
field.table_name = cls.cls_tablename()
if isinstance(field, ManyToManyField):
field.own_model = cls.cls_tablename()
field.table_name = "{my_model}_{foreign_key}".format(
my_model=cls.cls_tablename(), foreign_key=field.foreign_key
)
if not isinstance(field.__class__, AutoField):
cls.attr_names.update({f_n: field.db_column})
if hasattr(field, "field_requirement"):
if field.field_requirement not in cls.field_requirements:
cls.field_requirements.append(field.field_requirement)
fields[f_n] = field
if len(cls.attr_names) != len(set(cls.attr_names)):
raise AsyncOrmModelError("Models should have unique attribute names and field_name if explicitly edited!")
return fields
@classmethod
def get_db_columns(cls):
db_columns = []
for f_n, field in cls.__dict__.items():
is_many2many = isinstance(field, ManyToManyField)
is_field = isinstance(field, Field)
if is_field and not is_many2many:
db_columns.append(field.db_column and field.db_column or f_n)
return db_columns
def validate_kwargs(self, kwargs):
"""validate the kwargs on object instantiation only"""
attr_errors = [k for k in kwargs.keys() if k not in self.fields.keys()]
if attr_errors:
err_string = '"{}" is not an attribute for {}'
error_list = [err_string.format(k, self.__class__.__name__) for k in attr_errors]
raise AsyncOrmModelError(error_list)
for k, v in kwargs.items():
att_field = getattr(self.__class__, k)
att_field.validate(v)
if att_field.__class__ is AutoField and v:
raise AsyncOrmFieldError("Models can not be generated with forced id")
def migration_queries(self):
migration_queries = [self.objects.create_table_builder()]
for f in self.fields.values():
if isinstance(f, ForeignKey):
migration_queries.append(self.objects.add_fk_field_builder(f))
for f in self.fields.values():
if isinstance(f, ManyToManyField):
migration_queries.append(self.objects.add_m2m_columns_builder(f))
migration_queries.append(self.objects.unique_together_builder())
return migration_queries
@classmethod
def current_state(cls):
from copy import deepcopy
fields = deepcopy(cls.get_fields())
meta = {}
for f_n, field in fields.items():
fields[f_n] = field.current_state()
for m in cls.meta_items:
meta[m] = getattr(cls, m)
return {"fields": fields, "meta": meta}
@classmethod
def status_difference(cls, old_state):
current_state = cls.current_state()
news = {"fields": {}, "meta": {}}
deleted = {"fields": [], "meta": []}
updated = {"fields": {}, "meta": {}}
if old_state != current_state:
for subzone in ("fields", "meta"):
if old_state[subzone] != current_state[subzone]:
for f_n, f_v in old_state[subzone].items():
if current_state[subzone].get(f_n, False):
if current_state[subzone][f_n] != f_v:
updated[subzone][f_n] = current_state[subzone].get(f_n)
else:
deleted[subzone].append(f_n)
for f_n, f_v in current_state[subzone].items():
if not old_state[subzone].get(f_n, False):
news[subzone][f_n] = current_state[subzone].get(f_n)
class Model(BaseModel):
def construct(self, data, deleted=False, subitems=None):
# populates the model with the data
internal_objects = {}
for k, v in data.items():
k_splitted = k.split("€$$€")
if len(k_splitted) == 1:
# check if its named different in the database than the orm
if k not in self.__class__.attr_names.keys():
for orm, db in self.__class__.attr_names.items():
if k == db:
k = orm
break
# get the recomposed value
field_class = getattr(self.__class__, k, None)
if field_class is None:
continue
v = field_class.recompose(v)
if field_class in [ForeignKey, ManyToManyField]:
pass
setattr(self, k, v)
else:
# itself or empty dict
internal_objects[k_splitted[0]] = internal_objects.get(k_splitted[0], {})
# update the new value
internal_objects[k_splitted[0]].update({k_splitted[1]: v})
if internal_objects:
for attr_name, data in internal_objects.items():
if hasattr(self, attr_name):
if getattr(self, attr_name):
field = getattr(self.__class__, attr_name)
model = get_model(field.foreign_key)
setattr(self, attr_name, model().construct(data))
else:
for join in subitems[0]["fields"]:
if join["right_table"] == attr_name:
field = getattr(self.__class__, join["orm_fieldname"])
model = get_model(field.foreign_key)
setattr(self, join["orm_fieldname"], model().construct(data))
break
self.deleted = deleted
return self
async def save(self, **kwargs):
# external save method
if self.deleted:
raise AsyncOrmModelError(
"That {model_name} has already been deleted!".format(model_name=self.__class__.__name__)
)
await self.objects.save(self)
async def delete(self):
# object delete method
self.deleted = True
return await self.objects.delete(self)
def __str__(self):
return "< {} object >".format(self.__class__.__name__)
def __repr__(self):
return self.__str__()
| 35.761518
| 118
| 0.572674
| 12,728
| 0.964242
| 0
| 0
| 5,609
| 0.424924
| 411
| 0.031136
| 1,213
| 0.091894
|
0b7531882bc3693d78e18104d816fb7003ff5f35
| 74,222
|
py
|
Python
|
pyseq/main.py
|
nygctech/PySeq2500
|
6969f178a5f5837ce2f41887d59624bf4cc39433
|
[
"MIT"
] | 9
|
2019-09-25T16:41:42.000Z
|
2021-11-15T08:49:48.000Z
|
pyseq/main.py
|
nygctech/PySeq2500
|
6969f178a5f5837ce2f41887d59624bf4cc39433
|
[
"MIT"
] | 8
|
2020-07-18T09:50:33.000Z
|
2022-03-12T01:01:21.000Z
|
pyseq/main.py
|
nygctech/PySeq2500
|
6969f178a5f5837ce2f41887d59624bf4cc39433
|
[
"MIT"
] | 5
|
2020-08-02T09:51:12.000Z
|
2022-01-04T15:54:32.000Z
|
"""
TODO:
"""
import time
import logging
import os
from os.path import join
import sys
import configparser
import threading
import argparse
from . import methods
from . import args
from . import focus
# Global int to track # of errors during start up
def error(*args):
"""Keep count of errors and print to logger and/or console."""
global n_errors
i = 0
if isinstance(args[0], logging.Logger):
logger = args[0]
i = 1
msg = 'ERROR::'
for a in args[i:]:
msg = msg + str(a) + ' '
if i is 0:
print(msg)
else:
logger.log(21, msg)
n_errors += 1
return n_errors
##########################################################
## Flowcell Class ########################################
##########################################################
class Flowcell():
"""HiSeq 2500 System :: Flowcell
**Attributes:**
- position (str): Flowcell is at either position A (left slot )
or B (right slot).
- recipe_path (path): Path to the recipe.
- recipe (file): File handle for the recipe.
- first_line (int): Line number for the recipe to start from on the
initial cycle.
- cycle (int): The current cycle.
- total_cycles (int): Total number of the cycles for the experiment.
- history ([[int,],[str,],[str,]]): Timeline of flowcells events, the
1st column is the timestamp, the 2nd column is the event, and the
3rd column is an event specific detail.
- sections (dict): Dictionary of section names keys and coordinate
positions of the sections on the flowcell values.
- stage (dict): Dictionary of section names keys and stage positioning
and imaging details of the sections on the flowcell values.
- thread (int): Thread id of the current event on the flowcell.
- signal_event (str): Event that signals the other flowcell to continue
- wait_thread (threading.Event()): Blocks other flowcell until current
flowcell reaches signal event.
- waits_for (str): Flowcell A waits for flowcell B and vice versa.
- pump_speed (dict): Dictionary of pump scenario keys and pump speed
values.
- volume (dict): Keys are events/situations and values are volumes
in uL to use at the event/situation.
- filters (dict): Dictionary of filter set at each cycle, c: em, ex1, ex2.
- IMAG_counter (None/int): Counter for multiple images per cycle.
- events_since_IMAG (list): Record events since last IMAG step.
- temp_timer: Timer to check temperature of flowcell.
- temperature (float): Set temperature of flowcell in °C.
- temp_interval (float): Interval in seconds to check flowcell temperature.
- z_planes (int): Override number of z planes to image in recipe.
- pre_recipe_path (path): Recipe to run before actually starting experiment
- pre_recipe (file): File handle for the pre recipe.
"""
def __init__(self, position):
"""Constructor for flowcells
**Parameters:**
- position (str): Flowcell is at either position A (left slot) or
B (right slot).
"""
self.recipe_path = None
self.recipe = None
self.first_line = None
self.cycle = 0 # Current cycle
self.total_cycles = 0 # Total number of cycles for experiment
self.history = [[],[],[]] # summary of events in flowcell history
self.sections = {} # coordinates of flowcell of sections to image
self.stage = {} # stage positioning info for each section
self.thread = None # threading to do parallel actions on flowcells
self.signal_event = None # defines event that signals the next flowcell to continue
self.wait_thread = threading.Event() # blocks next flowcell until current flowcell reaches signal event
self.waits_for = None # position of the flowcell that signals current flowcell to continue
self.pump_speed = {}
self.volume = {'main':None,'side':None,'sample':None,'flush':None} # Flush volume
self.filters = {} # Dictionary of filter set at each cycle, c: em, ex1, ex2
self.IMAG_counter = None # Counter for multiple images per cycle
self.events_since_IMAG = [] # List events since last IMAG step
self.temp_timer = None # Timer to check temperature of flowcell
self.temperature = None # Set temperature of flowcell
self.temp_interval = None # Interval in minutes to check flowcell temperature
self.z_planes = None # Override number of z planes to image in recipe.
self.pre_recipe_path = None # Recipe to run before actually starting experiment
while position not in ['A', 'B']:
print('Flowcell must be at position A or B')
position = input('Enter A or B for ' + str(position) + ' : ')
self.position = position
def addEvent(self, event, command):
"""Record history of events on flow cell.
**Parameters:**
- instrument (str): Type of event can be valv, pump, hold, wait, or
imag.
- command (str): Details specific to each event such as hold time,
buffer, event to wait for, z planes to image, or pump volume.
**Returns:**
- int: A time stamp of the last event.
"""
self.history[0].append(time.time()) # time stamp
self.history[1].append(event) # event (valv, pump, hold, wait, imag)
self.history[2].append(command) # details such hold time, buffer, event to wait for
self.events_since_IMAG.append(event)
if event is 'PORT':
self.events_since_IMAG.append(command)
if event in ['IMAG', 'STOP']:
self.events_since_IMAG.append(event)
return self.history[0][-1] # return time stamp of last event
def restart_recipe(self):
"""Restarts the recipe and returns the number of completed cycles."""
# Restart recipe
if self.recipe is not None:
self.recipe.close()
self.recipe = open(self.recipe_path)
# Reset image counter (if mulitple images per cycle)
if self.IMAG_counter is not None:
self.IMAG_counter = 0
msg = 'PySeq::'+self.position+'::'
if self.cycle == self.total_cycles:
# Increase cycle counter
self.cycle += 1
# Flowcell completed all cycles
hs.message(msg+'Completed '+ str(self.total_cycles) + ' cycles')
hs.T.fc_off(fc.position)
self.temperature = None
do_rinse(self)
if self.temp_timer is not None:
self.temp_timer.cancel()
self.temp_timer = None
self.thread = threading.Thread(target = time.sleep, args = (10,))
elif self.cycle < self.total_cycles:
# Increase cycle counter
self.cycle += 1
# Start new cycle
restart_message = msg+'Starting cycle '+str(self.cycle)
self.thread = threading.Thread(target = hs.message,
args = (restart_message,))
else:
self.thread = threading.Thread(target = time.sleep, args = (10,))
thread_id = self.thread.start()
return self.cycle
def pre_recipe(self):
"""Initializes pre recipe before starting experiment."""
prerecipe_message = 'PySeq::'+self.position+'::'+'Starting pre recipe'
self.recipe = open(self.prerecipe_path)
self.thread = threading.Thread(target = hs.message,
args = (prerecipe_message,))
thread_id = self.thread.start()
return thread_id
def endHOLD(self):
"""Ends hold for incubations in buffer, returns False."""
msg = 'PySeq::'+self.position+'::cycle'+str(self.cycle)+'::Hold stopped'
hs.message(msg)
return False
##########################################################
## Setup Flowcells #######################################
##########################################################
def setup_flowcells(first_line, IMAG_counter):
"""Read configuration file and create flowcells.
**Parameters:**
- first_line (int): Line number for the recipe to start from on the
initial cycle.
**Returns:**
- dict: Dictionary of flowcell position keys with flowcell object values.
"""
err_msg = 'ConfigFile::sections::'
experiment = config['experiment']
method = experiment['method']
method = config[method]
flowcells = {}
for sect_name in config['sections']:
f_sect_name = sect_name.replace('_','') #remove underscores
position = config['sections'][sect_name]
AorB, coord = position.split(':')
# Create flowcell if it doesn't exist
if AorB not in flowcells.keys():
fc = Flowcell(AorB)
fc.recipe_path = experiment['recipe path']
fc.first_line = first_line
fc.volume['main'] = int(method.get('main prime volume', fallback=500))
fc.volume['side'] = int(method.get('side prime volume', fallback=350))
fc.volume['sample'] = int(method.get('sample prime volume', fallback=250))
fc.volume['flush'] = int(method.get('flush volume', fallback=1000))
fs = int(method.get('flush flowrate',fallback=700))
fc.pump_speed['flush'] = fs
ps = int(method.get('prime flowrate',fallback=100))
fc.pump_speed['prime'] = ps
rs = int(method.get('reagent flowrate', fallback=40))
fc.pump_speed['reagent'] = rs
fc.total_cycles = int(config.get('experiment','cycles'))
fc.temp_interval = float(method.get('temperature interval', fallback=5))*60
z_planes = int(method.get('z planes', fallback=0))
if z_planes > 0:
fc.z_planes = z_planes
if IMAG_counter > 1:
fc.IMAG_counter = 0
fc.prerecipe_path = method.get('pre recipe', fallback = None)
flowcells[AorB] = fc
# Add section to flowcell
if sect_name in flowcells[AorB].sections:
error(err_msg, sect_name, 'duplicated on flowcell', AorB)
else:
coord = coord.split(',')
flowcells[AorB].sections[f_sect_name] = [] # List to store coordinates of section on flowcell
flowcells[AorB].stage[f_sect_name] = {} # Dictionary to store stage position of section on flowcell
if float(coord[0]) < float(coord[2]):
error(err_msg,'Invalid x coordinates for', sect_name)
if float(coord[1]) < float(coord[3]):
error(err_msg, 'Invalid y coordinates for', sect_name)
for i in range(4):
try:
flowcells[AorB].sections[f_sect_name].append(float(coord[i]))
except:
error(err_msg,' No position for', sect_name)
# if runnning mulitiple flowcells...
# Define first flowcell
# Define prior flowcell signals to next flowcell
if len(flowcells) > 1:
flowcell_list = [*flowcells]
for fc in flowcells.keys():
flowcells[fc].waits_for = flowcell_list[
flowcell_list.index(fc)-1]
if experiment['first flowcell'] not in flowcells:
error('ConfigFile::First flowcell does not exist')
if isinstance(IMAG_counter, int):
error('Recipe::Need WAIT before IMAG with 2 flowcells.')
# table = {}
# for fc in flowcells:
# table[fc] = flowcells[fc].sections.keys()
# print('Flowcell section summary')
# print(tabulate.tabulate(table, headers = 'keys', tablefmt = 'presto'))
#
# userYN('Confirm flowcell(s)')
return flowcells
##########################################################
## Parse lines from recipe ###############################
##########################################################
def parse_line(line):
"""Parse line and return event (str) and command (str).
If line starts with the comment character, #, then None is return for
both event and command.
"""
comment_character = '#'
#delimiter = '\t'
no_comment = line.split(comment_character)[0] # remove comment
sections = no_comment.split(':')
if len(sections) == 2:
event = sections[0].strip() # first section is event
event = event[0:4] # event identified by first 4 characters
command = sections[1] # second section is command
command = command.strip() # remove space
else:
event = None
command = None
return event, command
##########################################################
## Setup Logging #########################################
##########################################################
def setup_logger():
"""Create a logger and return the handle."""
# Get experiment info from config file
experiment = config['experiment']
experiment_name = experiment['experiment name']
# Make directory to save data
save_path = join(experiment['save path'],experiment_name)
if not os.path.exists(save_path):
os.mkdir(save_path)
# Make directory to save logs
log_path = join(save_path, experiment['log path'])
if not os.path.exists(log_path):
os.mkdir(log_path)
# Create a custom logger
logger = logging.getLogger(__name__)
logger.setLevel(10)
# Create console handler
c_handler = logging.StreamHandler()
c_handler.setLevel(21)
# Create file handler
f_log_name = join(log_path,experiment_name + '.log')
f_handler = logging.FileHandler(f_log_name)
f_handler.setLevel(logging.INFO)
# Create formatters and add it to handlers
c_format = logging.Formatter('%(asctime)s - %(message)s', datefmt = '%Y-%m-%d %H:%M')
f_format = logging.Formatter('%(asctime)s - %(message)s')
c_handler.setFormatter(c_format)
f_handler.setFormatter(f_format)
# Add handlers to the logger
logger.addHandler(c_handler)
logger.addHandler(f_handler)
# Save copy of config with log
config_path = join(log_path,'config.cfg')
with open(config_path, 'w') as configfile:
config.write(configfile)
return logger
def configure_instrument(IMAG_counter, port_dict):
"""Configure and check HiSeq settings."""
global n_errors
model, name = methods.get_machine_info(args_['virtual'])
if model is not None:
config['experiment']['machine'] = model+'::'+name
experiment = config['experiment']
method = experiment['method']
method = config[method]
try:
total_cycles = int(experiment.get('cycles'))
except:
error('ConfigFile:: Cycles not specified')
# Creat HiSeq Object
if model == 'HiSeq2500':
if args_['virtual']:
from . import virtualHiSeq
hs = virtualHiSeq.HiSeq(name, logger)
hs.speed_up = int(method.get('speed up', fallback = 5000))
else:
import pyseq
com_ports = pyseq.get_com_ports()
hs = pyseq.HiSeq(name, logger)
else:
sys.exit()
# Check side ports
try:
side_ports = method.get('side ports', fallback = '9,21,22,23,24')
side_ports = side_ports.split(',')
side_ports = list(map(int, side_ports))
except:
error('ConfigFile:: Side ports not valid')
# Check sample port
try:
sample_port = int(method.get('sample port', fallback = 20))
except:
error('ConfigFile:: Sample port not valid')
# Check barrels per lane make sense:
n_barrels = int(method.get('barrels per lane', fallback = 1)) # Get method specific pump barrels per lane, fallback to 1
if n_barrels not in [1,2,4,8]:
error('ConfigFile:: Barrels per lane must be 1, 2, 4 or 8')
# Check inlet ports, note switch inlet ports in initialize_hs
inlet_ports = int(method.get('inlet ports', fallback = 2))
if inlet_ports not in [2,8]:
error('MethodFile:: inlet ports must be 2 or 8.')
variable_ports = method.get('variable reagents', fallback = None)
hs.z.image_step = int(method.get('z position', fallback = 21500))
hs.overlap = abs(int(method.get('overlap', fallback = 0)))
hs.overlap_dir = method.get('overlap direction', fallback = 'left').lower()
if hs.overlap_dir not in ['left', 'right']:
error('MethodFile:: overlap direction must be left or right')
for fc in flowcells.values():
AorB = fc.position
hs.v24[AorB].side_ports = side_ports
hs.v24[AorB].sample_port = sample_port
hs.v24[AorB].port_dict = port_dict # Assign ports on HiSeq
if variable_ports is not None:
v_ports = variable_ports.split(',')
for v in v_ports: # Assign variable ports
hs.v24[AorB].variable_ports.append(v.strip())
hs.p[AorB].update_limits(n_barrels) # Assign barrels per lane to pump
for section in fc.sections: # Convert coordinate sections on flowcell to stage info
pos = hs.position(AorB, fc.sections[section])
fc.stage[section] = pos
fc.stage[section]['z_pos'] = [hs.z.image_step]*3
## TODO: Changing laser color unecessary for now, revist if upgrading HiSeq
# Configure laser color & filters
# colors = [method.get('laser color 1', fallback = 'green'),
# method.get('laser color 2', fallback = 'red')]
# for i, color in enumerate(default_colors):
# if color is not colors[i]:
# laser = hs.lasers.pop(color) # Remove default laser color
# hs.lasers[colors[i]] = laser # Add new laser
# hs.lasers[colors[i]].color = colors[i] # Update laser color
# hs.optics.colors[i] = colors[i] # Update laser line color
# Check laser power
for color in hs.lasers.keys():
lp = int(method.get(color+' laser power', fallback = 10))
if hs.lasers[color].min_power <= lp <= hs.lasers[color].max_power:
hs.lasers[color].set_point = lp
else:
error('MethodFile:: Invalid '+color+' laser power')
#Check filters for laser at each cycle are valid
hs.optics.cycle_dict = check_filters(hs.optics.cycle_dict, hs.optics.ex_dict)
focus_filters = [method.get('green focus filter', fallback = 2.0),
method.get('red focus filter', fallback = 2.4)]
for i, f in enumerate(focus_filters):
try:
f = float(f)
except:
pass
if f not in hs.optics.ex_dict[hs.optics.colors[i]]:
error('ConfigFile:: Focus filter not valid.')
else:
hs.optics.focus_filters[i] = f
# Check Autofocus Settings
hs.AF = method.get('autofocus', fallback = 'partial once')
if hs.AF.lower() in ['','none']: hs.AF = None
if hs.AF not in ['partial', 'partial once', 'full', 'full once', 'manual', None]:
# Skip autofocus and set objective position in config file
try:
if hs.obj.min_z <= int(hs.AF) <= hs.obj.max_z:
hs.AF = int(hs.AF)
except:
error('ConfigFile:: Auto focus method not valid.')
#Enable/Disable z stage
hs.z.active = method.getboolean('enable z stage', fallback = True)
# Get focus Tolerance
hs.focus_tol = float(method.get('focus tolerance', fallback = 0))
# Get focus range
range = float(method.get('focus range', fallback = 90))
spacing = float(method.get('focus spacing', fallback = 4.1))
hs.obj.update_focus_limits(range=range, spacing=spacing) # estimate, get actual value in hs.obj_stack()
hs.stack_split = float(method.get('stack split', fallback = 2/3))
hs.bundle_height = int(method.get('bundle height', fallback = 128))
# Assign output directory
save_path = experiment['save path']
experiment_name = experiment['experiment name']
save_path = join(experiment['save path'], experiment['experiment name'])
if not os.path.exists(save_path):
try:
os.mkdir(save_path)
except:
error('ConfigFile:: Save path not valid.')
# Assign image directory
image_path = join(save_path, experiment['image path'])
if not os.path.exists(image_path):
os.mkdir(image_path)
with open(join(image_path,'machine_name.txt'),'w') as file:
file.write(hs.name)
hs.image_path = image_path
# Assign log directory
log_path = join(save_path, experiment['log path'])
if not os.path.exists(log_path):
os.mkdir(log_path)
hs.log_path = log_path
return hs
def confirm_settings(recipe_z_planes = []):
"""Have user confirm the HiSeq settings before experiment."""
experiment = config['experiment']
method = experiment['method']
method = config[method]
total_cycles = int(experiment['cycles'])
# Print settings to screen
try:
import tabulate
print_table = True
except:
print_table = False
if n_errors > 0:
print()
if not userYN('Continue checking experiment before exiting'):
sys.exit()
# Experiment summary
print()
print('-'*80)
print()
print(experiment['experiment name'], 'summary')
print()
print('method:', experiment['method'])
print('recipe:', method['recipe'])
print('cycles:', experiment['cycles'])
pre_recipe = method.get('pre recipe', fallback = None)
if pre_recipe is not None:
print('pre recipe:', pre_recipe)
first_port = method.get('first port', fallback = None)
if first_port is not None:
print('first_port:', first_port)
print('save path:', experiment['save path'])
print('enable z stage:', hs.z.active)
print('machine:', experiment['machine'])
print()
if not userYN('Confirm experiment'):
sys.exit()
print()
# Flowcell summary
table = {}
for fc in flowcells:
table[fc] = flowcells[fc].sections.keys()
print('-'*80)
print()
print('Flowcells:')
print()
if print_table:
print(tabulate.tabulate(table, headers = 'keys', tablefmt = 'presto'))
else:
print(table)
print()
if not userYN('Confirm flowcells'):
sys.exit()
print()
# Valve summary:
table = []
ports = []
for port in port_dict:
if not isinstance(port_dict[port], dict):
ports.append(int(port_dict[port]))
table.append([port_dict[port], port])
print('-'*80)
print()
print('Valve:')
print()
if print_table:
print(tabulate.tabulate(table, headers=['port', 'reagent'], tablefmt = 'presto'))
else:
print(table)
print()
if not userYN('Confirm valve assignment'):
sys.exit()
print()
# Pump summary:
AorB = [*flowcells.keys()][0]
fc = flowcells[AorB]
print('-'*80)
print()
print('Pump Settings:')
print()
inlet_ports = int(method.get('inlet ports', fallback = 2))
print('Reagents pumped through row with ', inlet_ports, 'inlet ports')
print(hs.p[AorB].n_barrels, 'syringe pump barrels per lane')
print('Flush volume:',fc.volume['flush'], 'μL')
if any([True for port in ports if port in [*range(1,9),*range(10,20)]]):
print('Main prime volume:', fc.volume['main'], 'μL')
if any([True for port in ports if port in [9,21,22,23,24]]):
print('Side prime volume:', fc.volume['side'], 'μL')
if 20 in ports:
print('Sample prime volume:', fc.volume['sample'], 'μL')
print('Flush flowrate:',fc.pump_speed['flush'], 'μL/min')
print('Prime flowrate:',fc.pump_speed['prime'], 'μL/min')
print('Reagent flowrate:',fc.pump_speed['reagent'], 'μL/min')
print('Max volume:', hs.p[AorB].max_volume, 'μL')
print('Min flow:', hs.p[AorB].min_flow, 'μL/min')
print()
if not userYN('Confirm pump settings'):
sys.exit()
# Cycle summary:
variable_ports = hs.v24[AorB].variable_ports
start_cycle = 1
if method.get('pre recipe', fallback = None) is not None:
start_cycle = 0
table = []
for cycle in range(start_cycle,total_cycles+1):
row = []
row.append(cycle)
if len(variable_ports) > 0:
for vp in variable_ports:
if cycle > 0:
row.append(port_dict[vp][cycle])
else:
row.append(None)
if IMAG_counter > 0:
colors = [*hs.optics.cycle_dict.keys()]
for color in colors:
row.append(hs.optics.cycle_dict[color][cycle])
else:
colors = []
table.append(row)
print('-'*80)
print()
print('Cycles:')
print()
if len(variable_ports) + len(colors) > 0:
headers = ['cycle', *variable_ports, *colors]
if print_table:
print(tabulate.tabulate(table, headers, tablefmt='presto'))
else:
print(headers)
print(table)
print()
stop_experiment = not userYN('Confirm cycles')
else:
if total_cycles == 1:
stop_experiment = not userYN('Confirm only 1 cycle')
else:
stop_experiment = not userYN('Confirm all', total_cycles, 'cycles are the same')
if stop_experiment:
sys.exit()
print()
if IMAG_counter > 0:
print('-'*80)
print()
print('Imaging settings:')
print()
laser_power = [hs.lasers['green'].set_point,
hs.lasers['red'].set_point]
print('green laser power:', laser_power[0], 'mW')
print('red laser power:',laser_power[1], 'mW')
print('autofocus:', hs.AF)
if hs.AF is not None:
print('focus spacing', hs.obj.focus_spacing,'um')
print('focus range', hs.obj.focus_range, '%')
if hs.focus_tol > 0 and hs.AF != 'manual':
print('focus tolerance:', hs.focus_tol, 'um')
elif hs.AF != 'manual':
print('focus tolerance: None')
print('WARNING::Out of focus image risk increased')
for i, filter in enumerate(hs.optics.focus_filters):
if filter == 'home':
focus_laser_power = 0
elif filter == 'open':
focus_laser_power = laser_power[i]
else:
focus_laser_power = laser_power[i]*10**(-float(filter))
print(colors[i+1], 'focus laser power ~', focus_laser_power, 'mW')
print('z position when imaging:', hs.z.image_step)
if hs.overlap > 0:
print('pixel overlap:', hs.overlap)
print('overlap direction:', hs.overlap_dir)
z_planes = int(method.get('z planes', fallback = 0))
if z_planes > 0:
print('z planes:', z_planes)
else:
print('z planes:', *recipe_z_planes)
if z_planes > 1 or any(recipe_z_planes):
print('stack split:', hs.stack_split)
if not userYN('Confirm imaging settings'):
sys.exit()
# Check if previous focus positions have been found, and confirm to use
if os.path.exists(join(hs.log_path, 'focus_config.cfg')):
focus_config = configparser.ConfigParser()
focus_config.read(join(hs.log_path, 'focus_config.cfg'))
cycles = 0
sections = []
for section in config.options('sections'):
if focus_config.has_section(section):
sections.append(section)
n_focus_cycles = len(focus_config.options(section))
if n_focus_cycles > cycles:
cycles = n_focus_cycles
table = []
for section in sections:
row = []
row.append(section)
for c in range(1,cycles+1):
if focus_config.has_option(section, str(c)):
row.append(focus_config[section][str(c)])
else:
row.append(None)
table.append(row)
if len(sections) > 0 and cycles > 0:
print('-'*80)
print()
print('Previous Autofocus Objective Positions:')
print()
headers = ['section', *['cycle'+str(c) for c in range(1,cycles+1)]]
if print_table:
print(tabulate.tabulate(table, headers, tablefmt='presto'))
else:
print(headers)
print(table)
print()
if not userYN('Confirm using previous autofocus positions'):
sys.exit()
print()
##########################################################
## Setup HiSeq ###########################################
##########################################################
def initialize_hs(IMAG_counter):
"""Initialize the HiSeq and return the handle."""
global n_errors
experiment = config['experiment']
method = experiment['method']
method = config[method]
if n_errors is 0:
if not userYN('Initialize HiSeq'):
sys.exit()
hs.initializeCams(logger)
x_homed = hs.initializeInstruments()
if not x_homed:
error('HiSeq:: X-Stage did not home correctly')
# HiSeq Settings
inlet_ports = int(method.get('inlet ports', fallback = 2))
hs.move_inlet(inlet_ports) # Move to 2 or 8 port inlet
# Set laser power
for color in hs.lasers.keys():
laser_power = int(method.get(color+' laser power', fallback = 10))
hs.lasers[color].set_power(laser_power)
if IMAG_counter > 0:
if not hs.lasers[color].on:
error('HiSeq:: Lasers did not turn on.')
hs.f.LED('A', 'off')
hs.f.LED('B', 'off')
LED('all', 'startup')
hs.move_stage_out()
return hs
##########################################################
## Check Instructions ####################################
##########################################################
def check_instructions():
"""Check the instructions for errors.
**Returns:**
- first_line (int): Line number for the recipe to start from on the
initial cycle.
- IMAG_counter (int): The number of imaging steps.
"""
method = config.get('experiment', 'method')
method = config[method]
first_port = method.get('first port', fallback = None) # Get first reagent to use in recipe
# Backdoor to input line number for first step in recipe
try:
first_port = int(first_port)
first_line = first_port
first_port = None
except:
first_line = 0
variable_ports = method.get('variable reagents', fallback = None)
valid_wait = []
ports = []
for port in config['reagents'].items():
ports.append(port[1])
if variable_ports is not None:
variable_ports = variable_ports.split(',')
for port in variable_ports:
ports.append(port.strip())
valid_wait = ports
valid_wait.append('IMAG')
valid_wait.append('STOP')
valid_wait.append('TEMP')
recipes = {}
recipes['Recipe'] = config['experiment']['recipe path']
pre_recipe = method.get('pre recipe',fallback= None)
if pre_recipe is not None:
recipes['Pre Recipe'] = pre_recipe
for recipe in sorted([*recipes.keys()]):
f = recipes[recipe]
try:
f = open(recipes[recipe])
except:
error(recipe,'::Unable to open', recipes[recipe])
#Remove blank lines
f_ = [line for line in f if line.strip()]
f.close()
IMAG_counter = 0.0
wait_counter = 0
z_planes = []
for line_num, line in enumerate(f_):
instrument, command = parse_line(line)
if instrument == 'PORT':
# Make sure ports in instruction files exist in port dictionary in config file
if command not in ports:
error(recipe,'::', command, 'on line', line_num,
'is not listed as a reagent')
#Find line to start at for first cycle
if first_line == 0 and first_port is not None and recipe is 'Recipe':
if command.find(first_port) != -1:
first_line = line_num
# Make sure pump volume is a number
elif instrument == 'PUMP':
if command.isdigit() == False:
error(recipe,'::Invalid volume on line', line_num)
# Make sure wait command is valid
elif instrument == 'WAIT':
wait_counter += 1
if command not in valid_wait:
error(recipe,'::Invalid wait command on line', line_num)
# Make sure z planes is a number
elif instrument == 'IMAG':
IMAG_counter = int(IMAG_counter + 1)
# Flag to make check WAIT is used before IMAG for 2 flowcells
if wait_counter >= IMAG_counter:
IMAG_counter = float(IMAG_counter)
if command.isdigit() == False:
error(recipe,'::Invalid number of z planes on line', line_num)
else:
z_planes.append(command)
# Make sure hold time (minutes) is a number
elif instrument == 'HOLD':
if command.isdigit() == False:
if command != 'STOP':
error(recipe,'::Invalid time on line', line_num)
else:
print(recipe,'::WARNING::HiSeq will stop until user input at line',
line_num)
elif instrument == 'TEMP':
if not command.isdigit():
error(recipe,'::Invalid temperature on line', line_num)
# # Warn user that HiSeq will completely stop with this command
# elif instrument == 'STOP':
# print('WARNING::HiSeq will stop until user input at line',
# line_num)
# Make sure the instrument name is valid
else:
error(recipe,'::Bad instrument name on line',line_num)
print(line)
return first_line, IMAG_counter, z_planes
##########################################################
## Check Ports ###########################################
##########################################################
def check_ports():
"""Check for port errors and return a port dictionary.
"""
method = config.get('experiment', 'method')
method = config[method]
total_cycles = int(config.get('experiment', 'cycles'))
# Get cycle and port information from configuration file
valve = config['reagents'] # Get dictionary of port number of valve : name of reagent
cycle_variables = method.get('variable reagents', fallback = None ) # Get list of port names in recipe that change every cycle
cycle_reagents = config['cycles'].items() # Get variable reagents that change with each cycle
port_dict = {}
# Make sure there are no duplicated names in the valve
if len(valve.values()) != len(set(valve.values())):
error('ConfigFile: Reagent names are not unique')
#TODO: PRINT DUPLICATES
if len(valve) > 0:
# Create port dictionary
for port in valve.keys():
try:
port_dict[valve[port]] = int(port)
except:
error('ConfigFile:List reagents as n (int) = name (str) ')
# Add cycle variable port dictionary
if cycle_variables is not None:
cycle_variables = cycle_variables.split(',')
for variable in cycle_variables:
variable = variable.replace(' ','')
if variable in port_dict:
error('ConfigFile::Variable', variable, 'can not be a reagent')
else:
port_dict[variable] = {}
# Fill cycle variable port dictionary with cycle: reagent name
for cycle in cycle_reagents:
reagent = cycle[1]
variable, cyc_number = cycle[0].split(' ')
if reagent in valve.values():
if variable in port_dict:
port_dict[variable][int(cyc_number)] = reagent
else:
error('ConfigFile::', variable, 'not listed as variable reagent')
else:
error('ConfigFiles::Cycle reagent:', reagent, 'does not exist on valve')
# Check number of reagents in variable reagents matches number of total cycles
for variable in cycle_variables:
variable = variable.replace(' ','')
if len(port_dict[variable]) != total_cycles:
error('ConfigFile::Number of', variable, 'reagents does not match experiment cycles')
else:
print('WARNING::No ports are specified')
# table = []
# for port in port_dict:
# if not isinstance(port_dict[port], dict):
# table.append([port_dict[port], port])
# print('Valve summary')
# print(tabulate.tabulate(table, headers=['port', 'reagent'], tablefmt = 'presto'))
return port_dict
def check_filters(cycle_dict, ex_dict):
"""Check filter section of config file.
**Errors:**
- Invalid Filter: System exits when a listed filter does not match
configured filters on the HiSeq.
- Duplicate Cycle: System exists when a filter for a laser is listed for
the same cycle more than once.
- Invalid laser: System exits when a listed laser color does not match
configured laser colors on the HiSeq.
"""
colors = [*cycle_dict.keys()]
# Check laser, cycle, and filter are valid
cycle_filters = config['filters'].items()
for item in cycle_filters:
# Get laser cycle = filter
filter = item[1]
# filters are floats, except for home and open,
# and emission (True/False)
if filter.lower() in ['true', 'yes', '1', 't', 'y']:
filter = True
elif filter.lower() in ['false', 'no', '0', 'f', 'n']:
filter = False
elif filter not in ['home','open']:
filter = float(filter)
laser, cycle = item[0].split()
cycle = int(cycle)
# Check if laser is valid, can use partial match ie, g or G for green
if laser in colors:
laser = [laser]
else:
laser = [colors[i] for i, c in enumerate(colors) if laser.lower() in c[0]]
if len(laser) > 0:
laser = laser[0]
if laser in ex_dict.keys():
if filter in ex_dict[laser]:
if cycle not in cycle_dict[laser]:
cycle_dict[laser][cycle] = filter
else:
error('ConfigFile::Duplicated cycle for', laser, 'laser')
elif laser == 'em':
if isinstance(filter, bool):
if cycle not in cycle_dict[laser]:
cycle_dict[laser][cycle] = filter
else:
error('ConfigFile::Duplicated emission filter cycle')
else:
error('ConfigFile::Invalid filter for', laser, 'laser')
else:
error('ConfigFile:Invalid laser')
# Add default/home to cycles with out filters specified
method = config.get('experiment', 'method')
method = config[method]
start_cycle = 1
if method.get('pre recipe', fallback = None):
start_cycle = 0
last_cycle = int(config.get('experiment','cycles'))+1
# Get/check default filters
default_filters = {}
fallbacks = {'red':'home', 'green':'home', 'em':'True'}
for laser in colors:
filter = method.get('default '+laser+' filter', fallback = fallbacks[laser])
try:
filter = float(filter)
except:
pass
if laser in ex_dict.keys():
if filter in ex_dict[laser].keys():
default_filters[laser] = filter
elif laser == 'em':
if filter in ['True', 'False']:
default_filters[laser] = filter
# Assign default filters to missing cycles
for cycle in range(start_cycle,last_cycle):
for laser in colors:
if cycle not in cycle_dict[laser]:
cycle_dict[laser][cycle] = default_filters[laser]
return cycle_dict
def LED(AorB, indicate):
"""Control front LEDs to communicate what the HiSeq is doing.
**Parameters:**
- AorB (str): Flowcell position (A or B), or all.
- indicate (str): Current action of the HiSeq or state of the flowcell.
=========== =========== =============================
LED MODE indicator HiSeq Action / Flowcell State
=========== =========== ===================================================
off off The flowcell is not in use.
yellow error There is an error with the flowcell.
green startup The HiSeq is starting up or shutting down
pulse green user The HiSeq requires user input
blue sleep The flowcell is holding or waiting.
pulse blue awake HiSeq valve, pump, or temperature action on the flowcell.
sweep blue imaging HiSeq is imaging the flowcell.
=========== =========== ========================================
"""
fc = []
if AorB in flowcells.keys():
fc = [AorB]
elif AorB == 'all':
fc = [*flowcells.keys()]
for AorB in fc:
if indicate == 'startup':
hs.f.LED(AorB, 'green')
elif indicate == 'user':
hs.f.LED(AorB, 'pulse green')
elif indicate == 'error':
hs.f.LED(AorB, 'yellow')
elif indicate == 'sleep':
hs.f.LED(AorB, 'blue')
elif indicate == 'awake':
hs.f.LED(AorB, 'pulse blue')
elif indicate == 'imaging':
hs.f.LED(AorB, 'sweep blue')
elif indicate == 'off':
hs.f.LED(AorB, 'off')
return True
def userYN(*args):
"""Ask a user a Yes/No question and return True if Yes, False if No."""
question = ''
for a in args:
question += str(a) + ' '
response = True
while response:
answer = input(question + '? Y/N = ')
answer = answer.upper().strip()
if answer == 'Y':
response = False
answer = True
elif answer == 'N':
response = False
answer = False
return answer
def do_flush():
"""Flush all, some, or none of lines."""
AorB_ = [*flowcells.keys()][0]
port_dict = hs.v24[AorB_].port_dict
# Select lines to flush
LED('all', 'user')
confirm = False
while not confirm:
flush_ports = input("Flush all, some, or none of the lines? ")
if flush_ports.strip().lower() == 'all':
flush_all = True
flush_ports = [*port_dict.keys()]
for vp in hs.v24[AorB_].variable_ports:
if vp in flush_ports:
flush_ports.remove(vp)
confirm = userYN('Confirm flush all lines')
elif flush_ports.strip().lower() in ['none', 'N', 'n', '']:
flush_ports = []
confirm = userYN('Confirm skip flushing lines')
else:
good =[]
bad = []
for fp in flush_ports.split(','):
fp = fp.strip()
if fp in port_dict.keys():
good.append(fp)
else:
try:
fp = int(fp)
if fp in range(1,hs.v24[AorB_].n_ports+1):
good.append(fp)
else:
bad.append(fp)
except:
bad.append(fp)
if len(bad) > 0:
print('Valid ports:', *good)
print('Invalid ports:', *bad)
confirm = not userYN('Re-enter lines to flush')
else:
confirm = userYN('Confirm only flushing',*good)
if confirm:
flush_ports = good
if len(flush_ports) > 0:
while not userYN('Temporary flowcell(s) locked on to stage'): pass
while not userYN('All valve input lines in water'): pass
while not userYN('Ready to flush'): pass
LED('all', 'startup')
# Flush ports
speed = flowcells[AorB_].pump_speed['flush']
volume = flowcells[AorB_].volume['flush']
for port in flush_ports:
if port in hs.v24[AorB_].variable_ports:
flush_ports.append(*hs.v24[AorB_].port_dict[port].values())
else:
hs.message('Flushing ' + str(port))
for fc in flowcells.values():
AorB = fc.position
fc.thread = threading.Thread(target=hs.v24[AorB].move,
args=(port,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
for fc in flowcells.values():
AorB = fc.position
fc.thread = threading.Thread(target=hs.p[AorB].pump,
args=(volume, speed,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
##########################################################
## Flush Lines ###########################################
##########################################################
def do_prime(flush_YorN):
"""Prime lines with all reagents in config if prompted."""
LED('all', 'user')
## Prime lines
confirm = False
while not confirm:
prime_YorN = userYN("Prime lines")
if prime_YorN:
confirm = userYN("Confirm prime lines")
else:
confirm = userYN("Confirm skip priming lines")
# LED('all', 'startup')
# hs.z.move([0,0,0])
# hs.move_stage_out()
#LED('all', 'user')
if prime_YorN:
if flush_YorN:
while not userYN('Temporary flowcell(s) locked on to stage'): pass
while not userYN('Valve input lines in reagents'): pass
while not userYN('Ready to prime lines'): pass
#Flush all lines
LED('all', 'startup')
while True:
AorB_ = [*flowcells.keys()][0]
port_dict = hs.v24[AorB_].port_dict
speed = flowcells[AorB_].pump_speed['prime']
for port in port_dict.keys():
if isinstance(port_dict[port], int):
hs.message('Priming ' + str(port))
for fc in flowcells.values():
port_num = port_dict[port]
AorB = fc.position
fc.thread = threading.Thread(target=hs.v24[AorB].move,
args=(port,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
for fc in flowcells.values():
if port_num in hs.v24[AorB].side_ports:
volume = fc.volume['side']
elif port_num == hs.v24[AorB].sample_port:
volume = fc.volume['sample']
else:
volume = fc.volume['main']
AorB = fc.position
fc.thread = threading.Thread(target=hs.p[AorB].pump,
args=(volume, speed,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
break
# Rinse flowcells
method = config.get('experiment', 'method') # Read method specific info
method = config[method]
rinse_port = method.get('rinse', fallback = None)
rinse = rinse_port in hs.v24[AorB].port_dict
if rinse_port == port: # Option to skip rinse if last reagent pump was rinse reagent
rinse = False
# Get rinse reagents
if not rinse:
LED('all', 'user')
print('Last reagent pumped was', port)
if userYN('Rinse flowcell'):
while not rinse:
if rinse_port not in hs.v24[AorB].port_dict:
rinse_port = input('Specify rinse reagent: ')
rinse = rinse_port in hs.v24[AorB].port_dict
if not rinse:
print('ERROR::Invalid rinse reagent')
print('Choose from:', *list(hs.v24[AorB].port_dict.keys()))
if rinse:
# Simultaneously Rinse Flowcells
for fc in flowcells.values():
fc.thread = threading.Thread(target=do_rinse,
args=(fc,rinse_port,))
fc.thread.start()
alive = True
# Wait for rinsing to complete
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
LED('all', 'user')
while not userYN('Temporary flowcell(s) removed'): pass
while not userYN('Experiment flowcell(s) locked on to stage'): pass
if not prime_YorN:
while not userYN('Valve input lines in reagents'): pass
while not userYN('Door closed'): pass
##########################################################
def do_nothing():
"""Do nothing."""
pass
##########################################################
## iterate over lines, send to pump, and print response ##
##########################################################
def do_recipe(fc):
"""Do the next event in the recipe.
**Parameters:**
- fc (flowcell): The current flowcell.
"""
AorB = fc.position
fc.thread = None
# Skip to first line of recipe on initial cycle
if fc.cycle == 1 and fc.first_line is not None:
for i in range(fc.first_line):
line = fc.recipe.readline()
fc.first_line = None
#get instrument and command
instrument = None
while instrument is None:
line = fc.recipe.readline()
if line:
instrument, command = parse_line(line)
else:
break
if line:
# Move reagent valve
if instrument == 'PORT':
#Move to cycle specific reagent if it is variable a reagent
if fc.cycle <= fc.total_cycles:
if command in hs.v24[AorB].variable_ports:
command = hs.v24[AorB].port_dict[command][fc.cycle]
log_message = 'Move to ' + command
fc.thread = threading.Thread(target = hs.v24[AorB].move,
args = (command,))
if fc.cycle <= fc.total_cycles:
LED(AorB, 'awake')
# Pump reagent into flowcell
elif instrument == 'PUMP':
volume = int(command)
speed = fc.pump_speed['reagent']
log_message = 'Pumping ' + str(volume) + ' uL'
fc.thread = threading.Thread(target = hs.p[AorB].pump,
args = (volume, speed,))
if fc.cycle <= fc.total_cycles:
LED(AorB, 'awake')
# Incubate flowcell in reagent for set time
elif instrument == 'HOLD':
if command.isdigit():
holdTime = float(command)*60
log_message = 'Flowcell holding for ' + str(command) + ' min.'
if hs.virtual:
fc.thread = threading.Timer(holdTime/hs.speed_up, fc.endHOLD)
#fc.thread = threading.Timer(holdTime, fc.endHOLD)
else:
fc.thread = threading.Timer(holdTime, fc.endHOLD)
elif command == 'STOP':
hs.message('PySeq::Paused')
LED(AorB, 'user')
input("Press enter to continue...")
log_message = ('Continuing...')
fc.thread = threading.Thread(target = do_nothing)
if fc.cycle <= fc.total_cycles:
LED(AorB, 'sleep')
# Wait for other flowcell to finish event before continuing with current flowcell
elif instrument == 'WAIT':
if command == 'TEMP':
fc.thread = threading.Thread(target = hs.T.wait_fc_T,
args=(AorB, fc.temperature,))
log_message = ('Waiting to reach '+str(fc.temperature)+'°C')
elif fc.waits_for is not None:
if command in flowcells[fc.waits_for].events_since_IMAG:
log_message = command + ' has occurred, skipping WAIT'
fc.thread = threading.Thread(target = do_nothing)
else:
log_message = 'Waiting for ' + command
fc.thread = threading.Thread(target = WAIT,
args = (AorB, command,))
else:
log_message = 'Skip waiting for ' + command
fc.thread = threading.Thread(target = do_nothing)
if fc.cycle <= fc.total_cycles:
LED(AorB, 'sleep')
# Image the flowcell
elif instrument == 'IMAG':
if hs.scan_flag and fc.cycle <= fc.total_cycles:
hs.message('PySeq::'+AorB+'::Waiting for camera')
while hs.scan_flag:
pass
#hs.scan_flag = True
fc.events_since_IMAG = []
log_message = 'Imaging flowcell'
fc.thread = threading.Thread(target = IMAG,
args = (fc,int(command),))
if fc.cycle <= fc.total_cycles:
LED(AorB, 'imaging')
elif instrument == 'TEMP':
log_message = 'Setting temperature to ' + command + ' °C'
command = float(command)
fc.thread = threading.Thread(target = hs.T.set_fc_T,
args = (AorB,command,))
fc.temperature = command
# Block all further processes until user input
# elif instrument == 'STOP':
# hs.message('PySeq::Paused')
# LED(AorB, 'user')
# input("Press enter to continue...")
# hs.message('PySeq::Continuing...')
#Signal to other flowcell that current flowcell reached signal event
if fc.signal_event == instrument or fc.signal_event == command:
fc.wait_thread.set()
fc.signal_event = None
# Start new action on current flowcell
if fc.thread is not None and fc.cycle <= fc.total_cycles:
fc.addEvent(instrument, command)
hs.message('PySeq::'+AorB+'::cycle'+str(fc.cycle)+'::'+log_message)
thread_id = fc.thread.start()
elif fc.thread is not None and fc.cycle > fc.total_cycles:
fc.thread = threading.Thread(target = time.sleep, args = (10,))
else:
# End of recipe
fc.restart_recipe()
##########################################################
## Image flowcell ########################################
##########################################################
def IMAG(fc, n_Zplanes):
"""Image the flowcell at a number of z planes.
For each section on the flowcell, the stage is first positioned
to the center of the section to find the optimal focus. Then if no
optical settings are listed, the optimal filter sets are found.
Next, the stage is repositioned to scan the entire section and
image the specified number of z planes.
**Parameters:**
fc: The flowcell to image.
n_Zplanes: The number of z planes to image.
**Returns:**
int: Time in seconds to scan the entire section.
"""
hs.scan_flag = True
AorB = fc.position
cycle = str(fc.cycle)
start = time.time()
# Manual focus ALL sections across flowcells
if hs.AF == 'manual':
focus.manual_focus(hs, flowcells)
hs.AF = 'partial once'
#Image sections on flowcell
for section in fc.sections:
pos = fc.stage[section]
hs.y.move(pos['y_initial'])
hs.x.move(pos['x_initial'])
hs.z.move(pos['z_pos'])
hs.obj.move(hs.obj.focus_rough)
# Autofocus
msg = 'PySeq::' + AorB + '::cycle' + cycle+ '::' + str(section) + '::'
if hs.AF and not isinstance(hs.AF, int):
obj_pos = focus.get_obj_pos(hs, section, cycle)
if obj_pos is None:
# Move to focus filters
for i, color in enumerate(hs.optics.colors):
hs.optics.move_ex(color,hs.optics.focus_filters[i])
hs.message(msg + 'Start Autofocus')
try:
if hs.autofocus(pos): # Moves to optimal objective position
hs.message(msg + 'Autofocus complete')
pos['obj_pos'] = hs.obj.position
else: # Moves to rough focus objective position
hs.message(msg + 'Autofocus failed')
pos['obj_pos'] = None
except:
hs.message(msg + 'Autofocus failed')
print(sys.exc_info()[0])
pos['obj_pos'] = None
else:
hs.obj.move(obj_pos)
pos['obj_pos'] = hs.obj.position
focus.write_obj_pos(hs, section, cycle)
#Override recipe number of z planes
if fc.z_planes is not None: n_Zplanes = fc.z_planes
# Calculate objective positions to image
if n_Zplanes > 1 and not isinstance(hs.AF, int):
obj_start = int(hs.obj.position - hs.nyquist_obj*n_Zplanes*hs.stack_split) # (Default) 2/3 of planes below opt_ob_pos and 1/3 of planes above
elif isinstance(hs.AF, int):
obj_start = hs.AF
else:
obj_start = hs.obj.position
image_name = AorB
image_name += '_s' + str(section)
image_name += '_r' + cycle
if fc.IMAG_counter is not None:
image_name += '_' + str(fc.IMAG_counter)
# Scan section on flowcell
hs.y.move(pos['y_initial'])
hs.x.move(pos['x_initial'])
hs.obj.move(obj_start)
n_tiles = pos['n_tiles']
n_frames = pos['n_frames']
# Set filters
for color in hs.optics.cycle_dict.keys():
filter = hs.optics.cycle_dict[color][fc.cycle]
if color is 'em':
hs.optics.move_em_in(filter)
else:
hs.optics.move_ex(color, filter)
hs.message(msg + 'Start Imaging')
try:
scan_time = hs.scan(n_tiles, n_Zplanes, n_frames, image_name)
scan_time = str(int(scan_time/60))
hs.message(msg + 'Imaging completed in', scan_time, 'minutes')
except:
error('Imaging failed.')
# Reset filters
for color in hs.optics.cycle_dict.keys():
if color is 'em':
hs.optics.move_em_in(True)
else:
hs.optics.move_ex(color, 'home')
if fc.IMAG_counter is not None:
fc.IMAG_counter += 1
hs.scan_flag = False
def WAIT(AorB, event):
"""Hold the flowcell *AorB* until the specfied event in the other flowell.
**Parameters:**
AorB (str): Flowcell position, A or B, to be held.
event: Event in the other flowcell that releases the held flowcell.
**Returns:**
int: Time in seconds the current flowcell was held.
"""
signaling_fc = flowcells[AorB].waits_for
cycle = str(flowcells[AorB].cycle)
start = time.time()
flowcells[signaling_fc].signal_event = event # Set the signal event in the signal flowcell
flowcells[signaling_fc].wait_thread.wait() # Block until signal event in signal flowcell
hs.message('PySeq::'+AorB+'::cycle'+cycle+'::Flowcell ready to continue')
flowcells[signaling_fc].wait_thread.clear() # Reset wait event
stop = time.time()
return stop-start
def do_rinse(fc, port=None):
"""Rinse flowcell with reagent specified in config file.
**Parameters:**
fc (flowcell): The flowcell to rinse.
"""
method = config.get('experiment', 'method') # Read method specific info
method = config[method]
if port is None:
port = method.get('rinse', fallback = None)
AorB = fc.position
rinse = port in hs.v24[AorB].port_dict
if rinse:
LED(fc.position, 'awake')
# Move valve
hs.message('PySeq::'+AorB+'::Rinsing flowcell with', port)
fc.thread = threading.Thread(target = hs.v24[AorB].move, args = (port,))
fc.thread.start()
# Pump
port_num = hs.v24[AorB].port_dict[port]
if port_num in hs.v24[AorB].side_ports:
volume = fc.volume['side']
elif port_num == hs.v24[AorB].sample_port:
volume = fc.volume['sample']
else:
volume = fc.volume['main']
speed = fc.pump_speed['reagent']
while fc.thread.is_alive(): # Wait till valve has moved
pass
fc.thread = threading.Thread(target = hs.p[AorB].pump,
args = (volume, speed,))
else:
fc.thread = threading.Thread(target = do_nothing)
##########################################################
## Shut down system ######################################
##########################################################
def do_shutdown():
"""Shutdown the HiSeq and flush all reagent lines if prompted."""
for fc in flowcells.values():
while fc.thread.is_alive():
fc.wait_thread.set()
time.sleep(10)
LED('all', 'startup')
hs.message('PySeq::Shutting down...')
hs.z.move([0, 0, 0])
hs.move_stage_out()
do_flush()
##Flush all lines##
# LED('all', 'user')
#
# # flush_YorN = userYN("Flush lines")
# if flush_YorN:
# hs.message('Lock temporary flowcell on stage')
# hs.message('Place all valve input lines in PBS/water')
# input('Press enter to continue...')
#
# LED('all', 'startup')
# for fc in flowcells.keys():
# volume = flowcells[fc].volume['main']
# speed = flowcells[fc].pump_speed['flush']
# for port in hs.v24[fc].port_dict.keys():
# if isinstance(port_dict[port], int):
# hs.v24[fc].move(port)
# hs.p[fc].pump(volume, speed)
# ##Return pump to top and NO port##
# hs.p[fc].command('OA0R')
# hs.p[fc].command('IR')
# else:
# LED('all', 'user')
hs.message('Retrieve experiment flowcells')
input('Press any key to finish shutting down')
for fc in flowcells.values():
AorB = fc.position
fc_log_path = join(hs.log_path, 'Flowcell'+AorB+'.log')
with open(fc_log_path, 'w') as fc_file:
for i in range(len(fc.history[0])):
fc_file.write(str(fc.history[0][i])+' '+
str(fc.history[1][i])+' '+
str(fc.history[2][i])+'\n')
# Turn off y stage motor
hs.y.move(0)
hs.y.command('OFF')
LED('all', 'off')
##########################################################
## Free Flowcells ########################################
##########################################################
def free_fc():
"""Release the first flowcell if flowcells are waiting on each other."""
# Get which flowcell is to be first
experiment = config['experiment']
cycles = int(experiment.get('first flowcell', fallback = 'A'))
first_fc = experiment.get('first flowcell', fallback = 'A')
if len(flowcells) == 1:
fc = flowcells[[*flowcells][0]]
try:
fc.wait_thread.set()
except:
pass
fc.signal_event = None
else:
flowcells_ = [fc.position for fc in flowcells.values() if fc.total_cycles <= cycles]
if len(flowcells_) == 1:
fc = flowcells_[0]
else:
fc = flowcells[first_fc]
flowcells[fc.waits_for].wait_thread.set()
flowcells[fc.waits_for].signal_event = None
hs.message('PySeq::Flowcells are waiting on each other starting flowcell',
fc.position)
return fc.position
def get_config(args):
"""Return the experiment config appended with the method config.
**Parameters:**
- args (dict): Dictionary with the config path, the experiment name and
the output path to store images and logs.
**Returns:**
- config: The experiment config appended with the method config.
"""
# Create config parser
config = configparser.ConfigParser()
# Defaults that can be overided
config.read_dict({'experiment' : {'log path': 'logs',
'image path': 'images'}
})
# Open config file
if os.path.isfile(args['config']):
config.read(args['config'])
else:
error('ConfigFile::Does not exist')
sys.exit()
# Set output path
config['experiment']['save path'] = args['output']
# Set experiment name
config['experiment']['experiment name'] = args['name']
# save user valve
USERVALVE = False
if config.has_section('reagents'):
valve = config['reagents'].items()
if len(valve) > 0:
USERVALVE = True
# Get method specific configuration
method = config['experiment']['method']
if method in methods.get_methods():
config_path, recipe_path = methods.return_method(method)
config.read(config_path)
elif os.path.isfile(method):
config.read(method)
recipe_path = None
elif config.has_section(method):
recipe_path = None
else:
error('ConfigFile::Error reading method configuration')
sys.exit()
# Check method keys
if not methods.check_settings(config[method]):
go = userYN('Proceed with experiment')
if not go:
sys.exit()
# Get recipe
recipe_name = config[method]['recipe']
if recipe_path is not None:
pass
elif os.path.isfile(recipe_name):
recipe_path = recipe_name
else:
error('ConfigFile::Error reading recipe')
config['experiment']['recipe path'] = recipe_path
# Don't override user defined valve
user_config = configparser.ConfigParser()
user_config.read(args['config'])
if USERVALVE:
config.read_dict({'reagents':dict(user_config['reagents'])})
if user_config.has_section(method):
config.read_dict({method:dict(user_config[method])})
return config
def check_fc_temp(fc):
"""Check temperature of flowcell."""
if fc.temperature is not None:
if fc.temp_timer is None:
fc.temp_timer = threading.Timer(fc.temp_interval, do_nothing)
fc.temp_timer.start()
if not fc.temp_timer.is_alive():
#print('checking temp')
T = hs.T.get_fc_T(fc.position)
hs.message(False, 'PySeq::'+fc.position+'::Temperature::',T,'°C')
fc.temp_timer = None
if abs(fc.temperature - T) > 5:
msg = 'PySeq::'+fc.position+'::WARNING::Set Temperature '
msg += str(fc.temperature) + ' C'
hs.message(msg)
msg = 'PySeq::'+fc.position+'::WARNING::Actual Temperature '
msg += str(T) + ' C'
hs.message(msg)
return T
###################################
## Run System #####################
###################################
args_ = args.get_arguments() # Get config path, experiment name, & output path
if __name__ == 'pyseq.main':
n_errors = 0
config = get_config(args_) # Get config file
logger = setup_logger() # Create logfiles
port_dict = check_ports() # Check ports in configuration file
first_line, IMAG_counter, z_planes = check_instructions() # Checks instruction file is correct and makes sense
flowcells = setup_flowcells(first_line, IMAG_counter) # Create flowcells
hs = configure_instrument(IMAG_counter, port_dict)
confirm_settings(z_planes)
hs = initialize_hs(IMAG_counter) # Initialize HiSeq, takes a few minutes
if n_errors is 0:
flush_YorN = do_flush() # Ask to flush out lines
do_prime(flush_YorN) # Ask to prime lines
if not userYN('Start experiment'):
sys.exit()
# Do prerecipe or Initialize Flowcells
for fc in flowcells.values():
if fc.prerecipe_path:
fc.pre_recipe()
else:
fc.restart_recipe()
cycles_complete = False
while not cycles_complete:
stuck = 0
complete = 0
for fc in flowcells.values():
if not fc.thread.is_alive(): # flowcell not busy, do next step in recipe
do_recipe(fc)
if fc.signal_event: # check if flowcells are waiting on each other
stuck += 1
if fc.cycle > fc.total_cycles: # check if all cycles are complete on flowcell
complete += 1
check_fc_temp(fc)
if stuck == len(flowcells): # Start the first flowcell if they are waiting on each other
free_fc()
if complete == len(flowcells): # Exit while loop
cycles_complete = True
if hs.current_view is not None: # Show latest images in napari, WILL BLOCK
hs.current_view.show()
hs.current_view = None
do_shutdown() # Shutdown HiSeq
else:
error('Total number of errors =', n_errors)
def main():
pass
| 38.437079
| 159
| 0.52832
| 8,205
| 0.110527
| 0
| 0
| 0
| 0
| 0
| 0
| 25,511
| 0.343652
|
0b77f76b149075d4d3817aa9211f7115e499a12a
| 273
|
py
|
Python
|
tests/parser/rewriting.projection.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/rewriting.projection.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/rewriting.projection.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
f(X,1) :- a(X,Y),
g(A,X),g(B,X),
not f(1,X).
a(X,Y) :- g(X,0),g(Y,0).
g(x1,0).
g(x2,0).
"""
output = """
f(X,1) :- a(X,Y),
g(A,X),g(B,X),
not f(1,X).
a(X,Y) :- g(X,0),g(Y,0).
g(x1,0).
g(x2,0).
"""
| 11.869565
| 25
| 0.296703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 254
| 0.930403
|
0b78677adaa1ddcbacf884f29508f3b4ea829e33
| 5,100
|
py
|
Python
|
focus/receiver.py
|
frederikhermans/focus
|
6228ba5fc8b41c74f2e22d5c2de20040b206d70a
|
[
"BSD-3-Clause"
] | 6
|
2016-04-18T09:40:16.000Z
|
2021-01-05T22:03:54.000Z
|
focus/receiver.py
|
horizon00/focus
|
6228ba5fc8b41c74f2e22d5c2de20040b206d70a
|
[
"BSD-3-Clause"
] | 1
|
2017-12-10T14:13:50.000Z
|
2017-12-10T14:13:50.000Z
|
focus/receiver.py
|
horizon00/focus
|
6228ba5fc8b41c74f2e22d5c2de20040b206d70a
|
[
"BSD-3-Clause"
] | 5
|
2018-01-04T14:59:50.000Z
|
2018-10-20T14:40:21.000Z
|
# Copyright (c) 2016, Frederik Hermans, Liam McNamara
#
# This file is part of FOCUS and is licensed under the 3-clause BSD license.
# The full license can be found in the file COPYING.
import cPickle as pickle
import sys
import click
import imageframer
import numpy as np
import rscode
import focus
def _grayscale(frame):
if len(frame.shape) == 2:
return frame # Is already grayscale
elif len(frame.shape) == 3:
return frame[:, :, 1] # Return green channel
else:
raise ValueError('Unexpected data format {}.'.format(frame.shape))
class Receiver(object):
def __init__(self, nsubchannels, nelements_per_subchannel=(64+16)*4,
parity=16, shape=(512, 512), border=0.15, cyclic_prefix=8,
use_hints=True, calibration_profile=None):
self.rs = rscode.RSCode(parity)
self.qpsk = focus.modulation.QPSK()
self.idxs = focus.spectrum.subchannel_idxs(nsubchannels,
nelements_per_subchannel,
shape)
self.shape_with_cp = tuple(np.array(shape) + 2*cyclic_prefix)
self.framer = imageframer.Framer(self.shape_with_cp, border,
calibration_profile=calibration_profile)
self.cyclic_prefix = cyclic_prefix
# Crop indices
self.spectrum_bbox = focus.spectrum.get_bbox(self.idxs)
cropped_idxs = tuple(focus.spectrum.crop(i, *self.spectrum_bbox)
for i in self.idxs)
self.idxs = np.array(cropped_idxs)
if use_hints:
self.hints = list()
else:
self.hints = None
def decode(self, frame, debug=False, copy_frame=True):
# Locate
try:
corners = self.framer.locate(frame, hints=self.hints)
except ValueError as ve:
# sys.stderr.write('WARNING: {}\n'.format(ve))
result = {'fragments': []}
if debug:
result['status'] = 'notfound'
result['locator-message'] = str(ve)
return result
if copy_frame:
frame = frame.copy()
code = self.framer.extract(_grayscale(frame), self.shape_with_cp,
corners, hints=self.hints)
code = focus.phy.strip_cyclic_prefix(code, self.cyclic_prefix)
# Compute, crop and unload spectrum
spectrum = focus.phy.rx(code)
# -> complex64 makes angle() faster.
spectrum = spectrum.astype(np.complex64)
spectrum = focus.spectrum.crop(spectrum, *self.spectrum_bbox)
# Unload symbols from the spectrum
symbols = focus.spectrum.unload(spectrum, self.idxs)
# Modulate all symbols with one call to demodulate()
coded_fragments = self.qpsk.demodulate(np.array(symbols).T).T
# Make array contiguous, so we can pass it to rs.decode()
coded_fragments = np.ascontiguousarray(coded_fragments)
# Recover and unmask all fragments
fragments = list()
for channel_idx, coded_frag in enumerate(coded_fragments):
nerrors, fragment = self.rs.decode(coded_frag)
if nerrors < 0:
# Recovery failed
fragment = None
else:
focus.link.mask_fragments(fragment, channel_idx)
fragments.append(fragment)
result = {'fragments': fragments}
if debug:
result.update({'coded_fragments': coded_fragments,
'symbols': symbols,
'corners': corners,
'status': 'found'})
return result
def decode_many(self, frames, debug=False):
return tuple(self.decode(frame, debug=debug) for frame in frames)
def benchmark(frames='frames.pickle'):
import cProfile as profile
import pstats
if isinstance(frames, basestring):
frames = focus.util.load_frames(frames)
pr = profile.Profile()
recv = Receiver(16)
pr.enable()
recv.decode_many(frames)
pr.disable()
stats = pstats.Stats(pr).sort_stats(2)
stats.print_stats()
@click.command('receiver')
@click.option('--nsubchannels', type=int, default=16)
@click.option('--calibration-profile', type=str, default=None)
@click.option('--shape', type=str, default='512x512')
@click.option('--cyclic-prefix', type=int, default=8)
@click.option('--verbosity', type=int, default=0)
def main(nsubchannels, calibration_profile, shape, cyclic_prefix, verbosity):
shape = focus.util.parse_resolution(shape)
recv = Receiver(nsubchannels, calibration_profile=calibration_profile,
shape=shape, cyclic_prefix=cyclic_prefix)
while True:
try:
frames = pickle.load(sys.stdin)
except EOFError:
break
fragments = recv.decode_many(frames, debug=verbosity > 0)
pickle.dump(fragments, sys.stdout, protocol=pickle.HIGHEST_PROTOCOL)
sys.stdout.flush()
if __name__ == '__main__':
main()
| 36.428571
| 81
| 0.610196
| 3,263
| 0.639804
| 0
| 0
| 849
| 0.166471
| 0
| 0
| 827
| 0.162157
|
0b7aa19dc4e53889b36908ba53b351bf9cbef5d2
| 6,444
|
py
|
Python
|
calc/bond.py
|
RaphaelOneRepublic/financial-calculator
|
2451b35a4cb52a6c254ae9fdae462dfebdc51e65
|
[
"MIT"
] | 2
|
2020-12-10T13:00:43.000Z
|
2020-12-19T16:59:48.000Z
|
calc/bond.py
|
RaphaelOneRepublic/financial-calculator
|
2451b35a4cb52a6c254ae9fdae462dfebdc51e65
|
[
"MIT"
] | null | null | null |
calc/bond.py
|
RaphaelOneRepublic/financial-calculator
|
2451b35a4cb52a6c254ae9fdae462dfebdc51e65
|
[
"MIT"
] | null | null | null |
import logging
from typing import Sequence
import numpy as np
from calc.optimize import root
class Bond(object):
"""
Represents a coupon paying bond.
Upon creation, the time to maturity, coupon periods per year, coupon rate must be provided.
If yield to maturity is provided, bond value would be ignored.
If yield to maturity is provided, bond value would be used to compute the implied yield to maturity.
Face value is assumed to be 100 if not provided.
"""
def __init__(self, T: float, R: float, m: int = 2, y: float = None, F: float = 100, B: float = None):
"""
construct a coupon paying bond
:param T: time to maturity in years
:param m: coupon payments per year
:param R: quoted annual coupon rate
:param y: (implied) yield to maturity
:param (optional) F: face value
:param B: traded bond price
"""
self._T = T
self._m = m
self._R = R
self._F = F
if y is not None:
self._y = y
self.__refresh_value_cache__()
elif B is not None:
self.B = B
else:
raise ValueError("one of yield to maturity or bond price must be provided")
def __refresh_value_cache__(self):
"""
recompute cached bond properties.
:return:
"""
self.__refresh_primary_cache__()
self._d2Bdy2 = np.sum(self._ts * self._ts * self._dcs)
self._duration = -self._dBdy / self._B
self._convexity = self._d2Bdy2 / self._B
def __refresh_primary_cache__(self):
"""
recompute frequently accessed bond properties except for duration, convexity and second order derivative.
:return:
"""
self._ts = np.arange(self._T, 0, -1 / self._m)[::-1]
self._cs = [self._R * self._F / 100 / self._m for _ in range(len(self._ts))]
self._cs[-1] += self._F
self._dcs = np.exp(-self._y * self._ts) * self._cs
self._B = np.sum(self._dcs)
self._dBdy = float(np.sum(-self._ts * self._dcs))
@property
def T(self):
"""
time to maturity
:return:
"""
return self._T
@T.setter
def T(self, value):
self._T = value
self.__refresh_value_cache__()
@property
def m(self):
"""
coupon payments per year
:return:
"""
return self._m
@m.setter
def m(self, value):
self._m = value
self.__refresh_value_cache__()
@property
def R(self):
"""
coupon rate
:return:
"""
return self._R
@R.setter
def R(self, value):
self._R = value
self.__refresh_value_cache__()
@property
def y(self):
"""
yield to maturity
:return:
"""
return self._y
@property
def ytm(self):
"""
yield to maturity
:return:
"""
return self._y
@property
def current(self):
"""
the current yield of the bond
= annual interest payment / bond price
:return:
"""
return self._R / 100 * self._F / self._B
@property
def bankeq(self):
"""
the bank equivalent yield of the bond
= (par - value) / par * 360 / days to maturity
:return:
"""
assert self._R == 0
return (self._F - self._B) / self._F * 360 / (self._T * 365)
@property
def cdeq(self):
"""
the money market equivalent yield of the bond
= (par - value) / value * 360 / days to maturity
:return:
"""
assert self._R == 0
return (self._F - self._B) / self._B * 360 / (self._T * 365)
@y.setter
def y(self, value):
self._y = value
self.__refresh_value_cache__()
@property
def B(self):
"""
bond value
:return:
"""
return self._B
@B.setter
def B(self, value):
def f(x):
self._y = x
self.__refresh_primary_cache__()
return self._B - value
def df(x: float):
return self._dBdy
try:
# compute implied yield to maturity with initial guess = 0.1
self.y = root(f, 0.1, df, epsilon=10e-9, delta=10e-9)
except RuntimeError:
logging.error("invalid bond value")
@property
def F(self):
"""
face value
:return:
"""
return self._F
@F.setter
def F(self, value):
self._F = value
self.__refresh_value_cache__()
@property
def duration(self):
"""
modified duration of the bond
:return:
"""
return self._duration
@property
def convexity(self):
"""
:convexity of the bond
:return:
"""
return self._convexity
def find_curve(bond, known: np.array, epsilon: float = 10e-10):
t = np.arange(bond.T, 0, - 1. / bond.m)[::-1]
c = np.array([bond.R / bond.m] * len(t))
c[-1] += bond.F
def f(x: float) -> float:
r = np.linspace(x, known[-1], len(t) + 1 - len(known), endpoint=False)[::-1]
rr = np.concatenate([known[1:], r])
return float(np.sum(c * np.exp(-rr * t))) - bond.B
def df(x: float) -> float:
r = np.linspace(x, known[-1], len(t) + 1 - len(known), endpoint=False)[::-1]
cc = c[len(known) - 1:]
tt = t[len(known) - 1:]
return float(np.sum(-cc * tt * np.exp(-r * tt) * np.arange(1, len(tt) + 1) / len(tt)))
x = root(f, 0.05, df=df)
r = np.linspace(x, known[-1], len(t) + 1 - len(known), endpoint=False)[::-1]
rr = np.concatenate([known[:], r])
return rr
def bootstrap(bonds: Sequence[Bond], overnight: float, epsilon: float = 10e-10):
"""
Bootstrap a zero rate curve from the given bonds and bond values.
Note that the bonds must have equal coupon payment periods (equal <m>s).
Zero rates at times for which we do not have a bond are calculated
by a linear line connecting the two nearest rates at times for which we do have a bond.
:param overnight:
:param epsilon:
:param bonds:
:return:
"""
bonds = sorted(bonds, key=lambda x: x.T)
known = [overnight]
for bond in bonds:
known = find_curve(bond, known)
return known
| 24.689655
| 113
| 0.542675
| 4,874
| 0.756363
| 0
| 0
| 2,768
| 0.429547
| 0
| 0
| 2,361
| 0.366387
|
0b7ab6dccc22b64a51e866ea9c844d792babb7c7
| 9,074
|
py
|
Python
|
jasy/build/Script.py
|
sebastian-software/jasy
|
9740ed33f0836ab2dd3e00ab4fae4049f9908072
|
[
"MIT"
] | 2
|
2015-05-27T19:30:49.000Z
|
2015-12-10T16:55:14.000Z
|
jasy/build/Script.py
|
sebastian-software/jasy
|
9740ed33f0836ab2dd3e00ab4fae4049f9908072
|
[
"MIT"
] | 2
|
2015-03-16T09:15:58.000Z
|
2015-04-07T19:05:47.000Z
|
jasy/build/Script.py
|
sebastian-software/jasy
|
9740ed33f0836ab2dd3e00ab4fae4049f9908072
|
[
"MIT"
] | 2
|
2017-07-18T20:08:05.000Z
|
2021-01-04T10:46:14.000Z
|
#
# Jasy - Web Tooling Framework
# Copyright 2010-2012 Zynga Inc.
# Copyright 2013-2014 Sebastian Werner
#
import os
import jasy
import jasy.core.Console as Console
from jasy.item.Script import ScriptError
from jasy.item.Script import ScriptItem
import jasy.script.Resolver as ScriptResolver
from jasy.script.Resolver import Resolver
import jasy.script.output.Optimization as ScriptOptimization
import jasy.script.output.Formatting as ScriptFormatting
class ScriptBuilder:
# --------------------------------------------------------------------------------------------
# ESSENTIALS
# --------------------------------------------------------------------------------------------
def __init__(self, profile):
self.__profile = profile
self.__session = profile.getSession()
self.__assetManager = profile.getAssetManager()
self.__fileManager = profile.getFileManager()
self.__outputPath = os.path.join(profile.getDestinationPath(), profile.getJsOutputFolder())
self.__kernelScripts = []
self.__scriptOptimization = ScriptOptimization.Optimization()
self.__scriptFormatting = ScriptFormatting.Formatting()
compressionLevel = profile.getCompressionLevel()
formattingLevel = profile.getFormattingLevel()
self.__addDividers = formattingLevel > 0
if compressionLevel > 0:
self.__scriptOptimization.enable("variables")
self.__scriptOptimization.enable("declarations")
if compressionLevel > 1:
self.__scriptOptimization.enable("blocks")
self.__scriptOptimization.enable("privates")
if formattingLevel > 1:
self.__scriptFormatting.enable("semicolon")
self.__scriptFormatting.enable("comma")
def __sortScriptItems(self, items, bootCode=None, filterBy=None, inlineTranslations=False):
profile = self.__profile
session = self.__session
# 1. Add given set of items
resolver = Resolver(profile)
for item in items:
resolver.add(item)
# 2. Add optional boot code
if bootCode:
bootScriptItem = session.getVirtualItem("jasy.generated.BootCode", ScriptItem, "(function(){%s})();" % bootCode, ".js")
resolver.add(bootScriptItem)
# 3. Check for asset usage
includedScripts = resolver.getIncluded()
usesAssets = False
for item in includedScripts:
if item.getId() == "jasy.Asset":
usesAssets = True
break
# 4. Add asset data if needed
if usesAssets:
assetData = self.__assetManager.exportToJson(includedScripts)
if assetData:
assetScriptItem = session.getVirtualItem("jasy.generated.AssetData", ScriptItem, "jasy.Asset.addData(%s);" % assetData, ".js")
resolver.add(assetScriptItem, prepend=True)
# 5. Add translation data
if not inlineTranslations:
translationBundle = session.getTranslationBundle(profile.getCurrentLocale())
if translationBundle:
translationData = translationBundle.export(includedScripts)
if translationData:
translationScriptItem = session.getVirtualItem("jasy.generated.TranslationData", ScriptItem, "jasy.Translate.addData(%s);" % translationData, ".js")
resolver.add(translationScriptItem, prepend=True)
# 6. Sorting items
sortedScripts = resolver.getSorted()
# 7. Apply filter
if filterBy:
filteredScripts = []
for item in sortedScripts:
if item not in filterBy:
filteredScripts.append(item)
sortedScripts = filteredScripts
return sortedScripts
def __compressScripts(self, items):
try:
profile = self.__profile
session = self.__session
result = []
for item in items:
compressed = item.getCompressed(profile)
if self.__addDividers:
result.append("// FILE ID: %s\n%s\n\n" % (item.getId(), compressed))
else:
result.append(compressed)
except ScriptError as error:
raise jasy.UserError("Error during script compression! %s" % error)
return "".join(result)
def __generateScriptLoader(self, items):
# For loading items we require core.ui.Queue and core.io.Script
# being available. If they are not part of the kernel, we have to
# prepend them as compressed code into the resulting output.
hasLoader = False
hasQueue = False
for item in self.__kernelScripts:
itemId = item.getId()
if itemId == "core.io.Queue":
hasQueue = True
elif itemId == "core.io.Script":
hasLoader = True
code = ""
if not hasQueue or not hasLoader:
compress = []
if not hasQueue:
compress.append("core.io.Queue")
if not hasLoader:
compress.append("core.io.Script")
compressedList = self.__sortScriptItems(compress, filterBy=self.__kernelScripts)
code += self.__compressScripts(compressedList)
main = self.__session.getMain()
files = []
for item in items:
# Ignore already compressed items
if item.getId() in ("core.io.Script", "core.io.Queue"):
continue
path = item.getPath()
# Support for multi path items
# (typically in projects with custom layout/structure e.g. 3rd party)
if isinstance(path, list):
for singleFileName in path:
files.append(main.toRelativeUrl(singleFileName))
else:
files.append(main.toRelativeUrl(path))
if self.__addDividers:
loaderList = '"%s"' % '",\n"'.join(files)
else:
loaderList = '"%s"' % '","'.join(files)
code += 'core.io.Queue.load([%s], null, null, true);' % loaderList
return code
# --------------------------------------------------------------------------------------------
# PUBLIC API
# --------------------------------------------------------------------------------------------
def getWorkingPath(self):
# Locations inside scripts are always relative to the application root folder
# aka the folder where HTML files are loaded from
return self.__profile.getDestinationPath()
def buildKernel(self, fileId):
if not fileId:
return
self.__profile.setWorkingPath(self.getWorkingPath())
self.storeKernelScript("kernel.js", bootCode="%s.boot();" % fileId)
def buildPart(self, partId, fileId):
if not fileId:
return
Console.info("Generating script (%s)...", fileId)
Console.indent()
self.__profile.setWorkingPath(self.getWorkingPath())
ScriptItems = ScriptResolver.Resolver(self.__profile).add(fileId).getSorted()
if self.__profile.getUseSource():
self.storeLoaderScript(ScriptItems, "%s-{{id}}.js" % partId, "new %s;" % fileId)
else:
self.storeCompressedScript(ScriptItems, "%s-{{id}}.js" % partId, "new %s;" % fileId)
Console.outdent()
def storeKernelScript(self, fileName, bootCode=""):
Console.info("Generating kernel script...")
Console.indent()
# Export all profile data for the kernel
items = self.__profile.getSetupScripts().values()
# Transfer all hard-wired fields into a permutation
self.__profile.setStaticPermutation()
# Sort and compress
sortedScripts = self.__sortScriptItems(items, bootCode, inlineTranslations=True)
compressedCode = self.__compressScripts(sortedScripts)
self.__fileManager.writeFile(os.path.join(self.__outputPath, fileName), compressedCode)
self.__kernelScripts = sortedScripts
Console.outdent()
def storeLoaderScript(self, items, fileName, bootCode=""):
Console.info("Generating loader script...")
Console.indent()
sortedScripts = self.__sortScriptItems(items, bootCode, filterBy=self.__kernelScripts)
loaderCode = self.__generateScriptLoader(sortedScripts)
self.__fileManager.writeFile(os.path.join(self.__outputPath, fileName), loaderCode)
Console.outdent()
def storeCompressedScript(self, items, fileName, bootCode=""):
Console.info("Generating compressed script...")
Console.indent()
sortedScripts = self.__sortScriptItems(items, bootCode, filterBy=self.__kernelScripts, inlineTranslations=True)
compressedCode = self.__compressScripts(sortedScripts)
self.__fileManager.writeFile(os.path.join(self.__outputPath, fileName), compressedCode)
Console.outdent()
| 33.116788
| 168
| 0.601278
| 8,614
| 0.949306
| 0
| 0
| 0
| 0
| 0
| 0
| 1,897
| 0.209059
|
0b7b1e425f8017f791073b532d42d48a2786d924
| 171
|
py
|
Python
|
13.py
|
kwoshvick/project-euler
|
d27370b0f22b51ad9ccb15afa912983d8fd8be5c
|
[
"MIT"
] | null | null | null |
13.py
|
kwoshvick/project-euler
|
d27370b0f22b51ad9ccb15afa912983d8fd8be5c
|
[
"MIT"
] | null | null | null |
13.py
|
kwoshvick/project-euler
|
d27370b0f22b51ad9ccb15afa912983d8fd8be5c
|
[
"MIT"
] | null | null | null |
file = open("13")
sum = 0
for numbers in file:
#print(numbers.rstrip())
numbers = int(numbers)
sum += numbers;
print(sum)
sum = str(sum)
print(sum[:10])
| 10.6875
| 28
| 0.596491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 28
| 0.163743
|
0b7d1c464ba5f7b2f25f469546bc0725ef4ae2f0
| 25,033
|
py
|
Python
|
snlds/model_cavi_snlds.py
|
egonrian/google-research
|
8177adbe9ca0d7e5a9463b54581fe6dd27be0974
|
[
"Apache-2.0"
] | 3
|
2021-01-18T04:46:49.000Z
|
2021-03-05T09:21:40.000Z
|
snlds/model_cavi_snlds.py
|
Alfaxad/google-research
|
2c0043ecd507e75e2df9973a3015daf9253e1467
|
[
"Apache-2.0"
] | 25
|
2020-07-25T08:53:09.000Z
|
2022-03-12T00:43:02.000Z
|
snlds/model_cavi_snlds.py
|
Alfaxad/google-research
|
2c0043ecd507e75e2df9973a3015daf9253e1467
|
[
"Apache-2.0"
] | 4
|
2021-02-08T10:25:45.000Z
|
2021-04-17T14:46:26.000Z
|
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Collapsed Amortized Variational Inference for SNLDS.
This is a reasonable baseline model for switching non-linear dynamical system
with the following architecture:
1. an inference network, with Bidirectional-RNN for input embedding, and a
forward RNN to get the posterior distribution of `q(z[1:T] | x[1:T])`.
2. a continuous state transition network, `p(z[t] | z[t-1], s[t])`.
3. a discrete state transition network that conditioned on the input,
`p(s[t] | s[t-1], x[t-1])`.
4. an emission network conditioned on the continuous hidden dynamics,
`p(x[t] | z[t])`.
It also contains a function, `create_model()`, to help to create the SNLDS
model discribed in ``Collapsed Amortized Variational Inference for Switching
Nonlinear Dynamical Systems``. 2019. https://arxiv.org/abs/1910.09588.
All the networks are configurable through function arguments `network_*`.
"""
import collections
import tensorflow as tf
import tensorflow_probability as tfp
from snlds import model_base
from snlds import utils
namedtuple = collections.namedtuple
layers = tf.keras.layers
tfd = tfp.distributions
tfpl = tfp.layers
RANDOM_SEED = 131
def construct_initial_state_distribution(
latent_dim,
num_categ,
use_trainable_cov=False,
use_triangular_cov=False,
raw_sigma_bias=0.0,
sigma_min=1e-5,
sigma_scale=0.05,
dtype=tf.float32,
name="z0"):
"""Construct the initial state distribution, `p(z[0])`.
Args:
latent_dim: an `int` scalar for dimension of continuous hidden states, `z`.
num_categ: an `int` scalar for number of discrete states, `s`.
use_trainable_cov: a `bool` scalar indicating whether the scale of `p(z[0])`
is trainable. Default to False.
use_triangular_cov: a `bool` scalar indicating whether to use triangular
covariance matrices and `tfp.distributions.MultivariateNormalTriL` for
distribution. Otherwise, a diagonal covariance matrices and
`tfp.distributions.MultivariateNormalDiag` will be used.
raw_sigma_bias: a `float` scalar to be added to the raw sigma, which is
standard deviation of the distribution. Default to `0.`.
sigma_min: a `float` scalar for minimal level of sigma to prevent
underflow. Default to `1e-5`.
sigma_scale: a `float` scalar for scaling the sigma. Default to `0.05`.
The above three arguments are used as
`sigma_scale * max(softmax(raw_sigma + raw_sigma_bias), sigma_min))`.
dtype: data type for variables within the scope. Default to `tf.float32`.
name: a `str` to construct names of variables.
Returns:
return_dist: a `tfp.distributions` instance for the initial state
distribution, `p(z[0])`.
"""
glorot_initializer = tf.keras.initializers.GlorotUniform()
z0_mean = tf.Variable(
initial_value=glorot_initializer(shape=[num_categ, latent_dim],
dtype=dtype),
name="{}_mean".format(name))
if use_triangular_cov:
z0_scale = tfp.math.fill_triangular(
tf.Variable(
initial_value=glorot_initializer(
shape=[int(latent_dim * (latent_dim + 1) / 2)],
dtype=dtype),
name="{}_scale".format(name),
trainable=use_trainable_cov))
z0_scale = (tf.maximum(tf.nn.softmax(z0_scale + raw_sigma_bias),
sigma_min)
* sigma_scale)
return_dist = tfd.Independent(
distribution=tfd.MultivariateNormalTriL(
loc=z0_mean, scale_tril=z0_scale),
reinterpreted_batch_ndims=0)
else:
z0_scale = tf.Variable(
initial_value=glorot_initializer(
shape=[latent_dim],
dtype=dtype),
name="{}_scale".format(name),
trainable=use_trainable_cov)
z0_scale = (tf.maximum(tf.nn.softmax(z0_scale + raw_sigma_bias),
sigma_min)
* sigma_scale)
return_dist = tfd.Independent(
distribution=tfd.MultivariateNormalDiag(
loc=z0_mean, scale_diag=z0_scale),
reinterpreted_batch_ndims=0)
return tfp.experimental.as_composite(return_dist)
class ContinuousStateTransition(tf.keras.Model):
"""Transition for `p(z[t] | z[t-1], s[t])`."""
def __init__(self,
transition_mean_networks,
distribution_dim,
num_categories=1,
cov_mat=None,
use_triangular_cov=False,
use_trainable_cov=True,
raw_sigma_bias=0.0,
sigma_min=1e-5,
sigma_scale=0.05,
dtype=tf.float32,
name="ContinuousStateTransition"):
"""Construct a `ContinuousStateTransition` instance.
Args:
transition_mean_networks: a list of `callable` networks, with the length
of list same as `num_categories`. Each one of the networks will take
previous step hidden state, `z[t-1]`, and returns the mean of
transition distribution, `p(z[t] | z[t-1], s[t]=i)` for each
discrete state `i`.
distribution_dim: an `int` scalar for dimension of continuous hidden
states, `z`.
num_categories: an `int` scalar for number of discrete states, `s`.
cov_mat: an optional `float` Tensor for predefined covariance matrix.
Default to `None`, in which case, a `cov` variable will be created.
use_triangular_cov: a `bool` scalar indicating whether to use triangular
covariance matrices and `tfp.distributions.MultivariateNormalTriL` for
distribution. Otherwise, a diagonal covariance matrices and
`tfp.distributions.MultivariateNormalDiag` will be used.
use_trainable_cov: a `bool` scalar indicating whether the scale of
the distribution is trainable. Default to False.
raw_sigma_bias: a `float` scalar to be added to the raw sigma, which is
standard deviation of the distribution. Default to `0.`.
sigma_min: a `float` scalar for minimal level of sigma to prevent
underflow. Default to `1e-5`.
sigma_scale: a `float` scalar for scaling the sigma. Default to `0.05`.
The above three arguments are used as
`sigma_scale * max(softmax(raw_sigma + raw_sigma_bias), sigma_min))`.
dtype: data type for variables within the scope. Default to `tf.float32`.
name: a `str` to construct names of variables.
"""
super(ContinuousStateTransition, self).__init__()
assertion_str = (
"There has to be one transition mean networks for each discrete state")
assert len(transition_mean_networks) == num_categories, assertion_str
self.z_trans_networks = transition_mean_networks
self.num_categ = num_categories
self.use_triangular_cov = use_triangular_cov
self.distribution_dim = distribution_dim
if cov_mat:
self.cov_mat = cov_mat
elif self.use_triangular_cov:
self.cov_mat = tfp.math.fill_triangular(
tf.Variable(
tf.random.uniform(
shape=[
int(self.distribution_dim
* (self.distribution_dim + 1) / 2)],
minval=0., maxval=1.,
dtype=dtype),
name="{}_cov".format(name),
dtype=dtype,
trainable=use_trainable_cov))
self.cov_mat = tf.maximum(tf.nn.softmax(self.cov_mat + raw_sigma_bias),
sigma_min) * sigma_scale
else:
self.cov_mat = tf.Variable(
tf.random.uniform(shape=[self.distribution_dim],
minval=0.0, maxval=1.,
dtype=dtype),
name="{}_cov".format(name),
dtype=dtype,
trainable=use_trainable_cov)
self.cov_mat = tf.maximum(tf.nn.softmax(self.cov_mat + raw_sigma_bias),
sigma_min) * sigma_scale
def call(self, input_tensor, dtype=tf.float32):
input_tensor = tf.convert_to_tensor(input_tensor, dtype_hint=dtype)
batch_size, num_steps, distribution_dim = tf.unstack(tf.shape(input_tensor))
# The shape of the mean_tensor after tf.stack is [num_categ, batch_size,
# num_steps, distribution_dim].,
mean_tensor = tf.transpose(
tf.stack([
z_net(input_tensor) for z_net in self.z_trans_networks]),
[1, 2, 0, 3])
mean_tensor = tf.reshape(mean_tensor,
[batch_size, num_steps,
self.num_categ, distribution_dim])
if self.use_triangular_cov:
output_dist = tfd.MultivariateNormalTriL(
loc=mean_tensor,
scale_tril=self.cov_mat)
else:
output_dist = tfd.MultivariateNormalDiag(
loc=mean_tensor,
scale_diag=self.cov_mat)
return tfp.experimental.as_composite(output_dist)
@property
def output_event_dims(self):
return self.distribution_dim
class DiscreteStateTransition(tf.keras.Model):
"""Discrete state transition p(s[t] | s[t-1], x[t-1])."""
def __init__(self,
transition_network,
num_categories):
"""Construct a `DiscreteStateTransition` instance.
Args:
transition_network: a `callable` network taking batch conditional inputs,
`x[t-1]`, and returning the discrete state transition matrices,
`log p(s[t] |s[t-1], x[t-1])`.
num_categories: an `int` scalar for number of discrete states, `s`.
"""
super(DiscreteStateTransition, self).__init__()
self.dense_net = transition_network
self.num_categ = num_categories
def call(self, input_tensor, dtype=tf.float32):
input_tensor = tf.convert_to_tensor(input_tensor, dtype_hint=dtype)
batch_size, num_steps = tf.unstack(tf.shape(input_tensor)[:2])
transition_tensor = self.dense_net(input_tensor)
transition_tensor = tf.reshape(
transition_tensor,
[batch_size, num_steps, self.num_categ, self.num_categ])
return transition_tensor
@property
def output_event_dims(self):
return self.num_categ
class GaussianDistributionFromMean(tf.keras.Model):
"""Emission model p(x[t] | z[t])."""
def __init__(self,
emission_mean_network,
observation_dim,
cov_mat=None,
use_triangular_cov=False,
use_trainable_cov=True,
raw_sigma_bias=0.0,
sigma_min=1e-5,
sigma_scale=0.05,
dtype=tf.float32,
name="GaussianDistributionFromMean"):
"""Construct a `GaussianDistributionFromMean` instance.
Args:
emission_mean_network: a `callable` network taking continuous hidden
states, `z[t]`, and returning the mean of emission distribution,
`p(x[t] | z[t])`.
observation_dim: an `int` scalar for dimension of observations, `x`.
cov_mat: an optional `float` Tensor for predefined covariance matrix.
Default to `None`, in which case, a `cov` variable will be created.
use_triangular_cov: a `bool` scalar indicating whether to use triangular
covariance matrices and `tfp.distributions.MultivariateNormalTriL` for
distribution. Otherwise, a diagonal covariance matrices and
`tfp.distributions.MultivariateNormalDiag` will be used.
use_trainable_cov: a `bool` scalar indicating whether the scale of
the distribution is trainable. Default to False.
raw_sigma_bias: a `float` scalar to be added to the raw sigma, which is
standard deviation of the distribution. Default to `0.`.
sigma_min: a `float` scalar for minimal level of sigma to prevent
underflow. Default to `1e-5`.
sigma_scale: a `float` scalar for scaling the sigma. Default to `0.05`.
The above three arguments are used as
`sigma_scale * max(softmax(raw_sigma + raw_sigma_bias), sigma_min))`.
dtype: data type for variables within the scope. Default to `tf.float32`.
name: a `str` to construct names of variables.
"""
super(GaussianDistributionFromMean, self).__init__()
self.observation_dim = observation_dim
self.x_emission_net = emission_mean_network
self.use_triangular_cov = use_triangular_cov
if cov_mat:
self.cov_mat = cov_mat
elif self.use_triangular_cov:
local_variable = tf.Variable(
tf.random.uniform(
shape=[int(self.observation_dim*(self.observation_dim+1)/2)],
minval=0., maxval=1.,
dtype=dtype),
name="{}_cov".format(name),
dtype=dtype,
trainable=use_trainable_cov)
self.cov_mat = tfp.math.fill_triangular(
local_variable)
self.cov_mat = tf.maximum(tf.nn.softmax(self.cov_mat + raw_sigma_bias),
sigma_min) * sigma_scale
else:
self.cov_mat = tf.Variable(
initial_value=tf.random.uniform(shape=[self.observation_dim],
minval=0.0, maxval=1.,
dtype=dtype),
name="{}_cov".format(name),
dtype=dtype,
trainable=use_trainable_cov)
self.cov_mat = tf.maximum(tf.nn.softmax(self.cov_mat + raw_sigma_bias),
sigma_min) * sigma_scale
def call(self, input_tensor, dtype=tf.float32):
input_tensor = tf.convert_to_tensor(input_tensor, dtype_hint=dtype)
mean_tensor = self.x_emission_net(input_tensor)
if self.use_triangular_cov:
output_dist = tfd.MultivariateNormalTriL(
loc=mean_tensor,
scale_tril=self.cov_mat)
else:
output_dist = tfd.MultivariateNormalDiag(
loc=mean_tensor,
scale_diag=self.cov_mat)
return tfp.experimental.as_composite(output_dist)
@property
def output_event_dims(self):
return self.observation_dim
class RnnInferenceNetwork(tf.keras.Model):
"""Inference network for posterior q(z[1:T] | x[1:T])."""
def __init__(self,
posterior_rnn,
posterior_dist,
latent_dim,
embedding_network=None):
"""Construct a `RnnInferenceNetwork` instance.
Args:
posterior_rnn: a RNN cell, `h[t]=f_RNN(h[t-1], z[t-1], input[t])`,
which recursively takes previous step RNN states `h`, previous step
sampled dynamical state `z[t-1]`, and conditioned input `input[t]`.
posterior_dist: a distribution instance for `p(z[t] | h[t])`,
where h[t] is the output of `posterior_rnn`.
latent_dim: an `int` scalar for dimension of continuous hidden
states, `z`.
embedding_network: an optional network to embed the observations, `x[t]`.
Default to `None`, in which case, no embedding is applied.
"""
super(RnnInferenceNetwork, self).__init__()
self.latent_dim = latent_dim
self.posterior_rnn = posterior_rnn
self.posterior_dist = posterior_dist
if embedding_network is None:
self.embedding_network = lambda x: x
self.embedding_network = embedding_network
def call(self,
inputs,
num_samples=1,
dtype=tf.float32,
random_seed=RANDOM_SEED,
parallel_iterations=10):
"""Recursively sample z[t] ~ q(z[t]|h[t]=f_RNN(h[t-1], z[t-1], h[t]^b)).
Args:
inputs: a float `Tensor` of size [batch_size, num_steps, obs_dim], where
each observation should be flattened.
num_samples: an `int` scalar for number of samples per time-step, for
posterior inference networks, `z[i] ~ q(z[1:T] | x[1:T])`.
dtype: The data type of input data.
random_seed: an `Int` as the seed for random number generator.
parallel_iterations: a positive `Int` indicates the number of iterations
allowed to run in parallel in `tf.while_loop`, where `tf.while_loop`
defaults it to be 10.
Returns:
sampled_z: a float 3-D `Tensor` of size [num_samples, batch_size,
num_steps, latent_dim], which stores the z_t sampled from posterior.
entropies: a float 2-D `Tensor` of size [num_samples, batch_size,
num_steps], which stores the entropies of posterior distributions.
log_probs: a float 2-D `Tensor` of size [num_samples. batch_size,
num_steps], which stores the log posterior probabilities.
"""
inputs = tf.convert_to_tensor(inputs, dtype_hint=dtype)
batch_size, num_steps = tf.unstack(tf.shape(inputs)[:2])
latent_dim = self.latent_dim
## passing through embedding_network, e.g. bidirectional RNN
inputs = self.embedding_network(inputs)
## passing through forward RNN
ta_names = ["rnn_states", "latent_states", "entropies", "log_probs"]
tas = [tf.TensorArray(tf.float32, num_steps, name=n) for n in ta_names]
t0 = tf.constant(0, tf.int32)
loopstate = namedtuple("LoopState", "rnn_state latent_encoded")
initial_rnn_state = self.posterior_rnn.get_initial_state(
batch_size=batch_size * num_samples,
dtype=dtype)
if (isinstance(self.posterior_rnn, layers.GRUCell)
or isinstance(self.posterior_rnn, layers.SimpleRNNCell)):
initial_rnn_state = [initial_rnn_state]
init_state = (t0,
loopstate(
rnn_state=initial_rnn_state,
latent_encoded=tf.zeros(
[batch_size * num_samples, latent_dim],
dtype=tf.float32)), tas)
def _cond(t, *unused_args):
return t < num_steps
def _step(t, loop_state, tas):
"""One step in tf.while_loop."""
prev_latent_state = loop_state.latent_encoded
prev_rnn_state = loop_state.rnn_state
current_input = inputs[:, t, :]
# Duplicate current observation to sample multiple trajectories.
current_input = tf.tile(current_input, [num_samples, 1])
rnn_input = tf.concat([current_input, prev_latent_state],
axis=-1) # num_samples * BS, latent_dim+input_dim
rnn_out, rnn_state = self.posterior_rnn(
inputs=rnn_input,
states=prev_rnn_state)
dist = self.posterior_dist(rnn_out)
latent_state = dist.sample(seed=random_seed)
## rnn_state is a list of [batch_size, rnn_hidden_dim],
## after TA.stack(), the dimension will be
## [num_steps, 1 for GRU/2 for LSTM, batch, rnn_dim]
tas_updates = [rnn_state,
latent_state,
dist.entropy(),
dist.log_prob(latent_state)]
tas = utils.write_updates_to_tas(tas, t, tas_updates)
return (t+1,
loopstate(rnn_state=rnn_state,
latent_encoded=latent_state),
tas)
## end of _step function
_, _, tas_final = tf.while_loop(
_cond, _step, init_state, parallel_iterations=parallel_iterations)
sampled_z, entropies, log_probs = [
utils.tensor_for_ta(ta, swap_batch_time=True) for ta in tas_final[1:]
]
sampled_z = tf.reshape(sampled_z,
[num_samples, batch_size, num_steps, latent_dim])
entropies = tf.reshape(entropies, [num_samples, batch_size, num_steps])
log_probs = tf.reshape(log_probs, [num_samples, batch_size, num_steps])
return sampled_z, entropies, log_probs
def create_model(num_categ,
hidden_dim,
observation_dim,
config_emission,
config_inference,
config_z_initial,
config_z_transition,
network_emission,
network_input_embedding,
network_posterior_rnn,
network_s_transition,
networks_z_transition,
network_posterior_mlp=lambda x: x,
name="snlds"):
"""Construct SNLDS model.
Args:
num_categ: an `int` scalar for number of discrete states, `s`.
hidden_dim: an `int` scalar for dimension of continuous hidden states, `z`.
observation_dim: an `int` scalar for dimension of observations, `x`.
config_emission: a `dict` for configuring emission distribution,
`p(x[t] | z[t])`.
config_inference: a `dict` for configuring the posterior distribution,
`q(z[t]|h[t]=f_RNN(h[t-1], z[t-1], h[t]^b))`.
config_z_initial: a `dict` for configuring the initial distribution of
continuous hidden state, `p(z[0])`.
config_z_transition: a `dict` for configuring the transition distribution
`p(z[t] | z[t-1], s[t])`.
network_emission: a `callable` network taking continuous hidden
states, `z[t]`, and returning the mean of emission distribution,
`p(x[t] | z[t])`.
network_input_embedding: a `callable` network to embed the observations,
`x[t]`. E.g. a bidirectional RNN to embedding `x[1:T]`.
network_posterior_rnn: a RNN cell, `h[t]=f_RNN(h[t-1], z[t-1], input[t])`,
which recursively takes previous step RNN states `h`, previous step
sampled dynamical state `z[t-1]`, and conditioned input `input[t]`.
network_s_transition: a `callable` network taking batch conditional inputs,
`x[t-1]`, and returning the discrete state transition matrices,
`log p(s[t] |s[t-1], x[t-1])`.
networks_z_transition: a list of `callable` networks, with the length
of list same as `num_categories`. Each one of the networks will take
previous step hidden state, `z[t-1]`, and returns the mean of
transition distribution, `p(z[t] | z[t-1], s[t]=i)` for each
discrete state `i`.
network_posterior_mlp: an optional network to embedding the output of
inference RNN networks, before passing into the distribution as mean,
`q(z[t] | mlp( h[t] ))`. Default to identity mapping.
name: a `str` to construct names of variables.
Returns:
An instance of instantiated `model_base.SwitchingNLDS` model.
"""
z_transition = ContinuousStateTransition(
transition_mean_networks=networks_z_transition,
distribution_dim=hidden_dim,
num_categories=num_categ,
cov_mat=config_z_transition.cov_mat,
use_triangular_cov=config_z_transition.use_triangular_cov,
use_trainable_cov=config_z_transition.use_trainable_cov,
raw_sigma_bias=config_z_transition.raw_sigma_bias,
sigma_min=config_z_transition.sigma_min,
sigma_scale=config_z_transition.sigma_scale,
name=name+"_z_trans")
s_transition = DiscreteStateTransition(
transition_network=network_s_transition,
num_categories=num_categ)
emission_network = GaussianDistributionFromMean(
emission_mean_network=network_emission,
observation_dim=observation_dim,
cov_mat=config_emission.cov_mat,
use_triangular_cov=config_emission.use_triangular_cov,
use_trainable_cov=config_emission.use_trainable_cov,
raw_sigma_bias=config_emission.raw_sigma_bias,
sigma_min=config_emission.sigma_min,
sigma_scale=config_emission.sigma_scale,
name=name+"_x_emit")
posterior_distribution = GaussianDistributionFromMean(
emission_mean_network=network_posterior_mlp,
observation_dim=hidden_dim,
cov_mat=config_inference.cov_mat,
use_triangular_cov=config_inference.use_triangular_cov,
use_trainable_cov=config_inference.use_trainable_cov,
raw_sigma_bias=config_inference.raw_sigma_bias,
sigma_min=config_inference.sigma_min,
sigma_scale=config_inference.sigma_scale,
name=name+"_posterior")
posterior_network = RnnInferenceNetwork(
posterior_rnn=network_posterior_rnn,
posterior_dist=posterior_distribution,
latent_dim=hidden_dim,
embedding_network=network_input_embedding)
z_initial_distribution = construct_initial_state_distribution(
latent_dim=hidden_dim,
num_categ=num_categ,
use_trainable_cov=config_z_initial.use_trainable_cov,
use_triangular_cov=config_z_initial.use_triangular_cov,
raw_sigma_bias=config_z_initial.raw_sigma_bias,
sigma_min=config_z_initial.sigma_min,
sigma_scale=config_z_initial.sigma_scale,
name="init_dist")
snlds_model = model_base.SwitchingNLDS(
continuous_transition_network=z_transition,
discrete_transition_network=s_transition,
emission_network=emission_network,
inference_network=posterior_network,
initial_distribution=z_initial_distribution,
continuous_state_dim=None,
num_categories=None,
discrete_state_prior=None)
return snlds_model
| 40.770358
| 80
| 0.667878
| 15,104
| 0.603364
| 0
| 0
| 211
| 0.008429
| 0
| 0
| 11,161
| 0.445851
|
0b7d1f1e2fd547f10391c4be9766498485799dc7
| 1,581
|
py
|
Python
|
grid_search/mlp_gridsearch.py
|
RiboswitchClassifier/RiboswitchClassification
|
4a4ab0590aa50aa765638b2bd8aa0cfd84054ac7
|
[
"MIT"
] | 2
|
2019-12-16T13:08:28.000Z
|
2021-02-23T03:03:18.000Z
|
grid_search/mlp_gridsearch.py
|
RiboswitchClassifier/RiboswitchClassification
|
4a4ab0590aa50aa765638b2bd8aa0cfd84054ac7
|
[
"MIT"
] | null | null | null |
grid_search/mlp_gridsearch.py
|
RiboswitchClassifier/RiboswitchClassification
|
4a4ab0590aa50aa765638b2bd8aa0cfd84054ac7
|
[
"MIT"
] | 3
|
2019-01-01T06:00:20.000Z
|
2020-01-28T13:57:49.000Z
|
from sklearn.model_selection import cross_val_score, GridSearchCV, cross_validate, train_test_split
from sklearn.metrics import accuracy_score, classification_report
from sklearn.neural_network import MLPClassifier
import pandas as pd
import csv
from sklearn.preprocessing import label_binarize
from sklearn.preprocessing import StandardScaler
import numpy as np
data = pd.read_csv('processed_datasets/final_32classes.csv')
# Separate out the x_data and y_data.
x_data = data.loc[:, data.columns != "Type"]
x_data = x_data.loc[:,x_data.columns != "Sequence"]
y_data = data.loc[:, "Type"]
random_state = 100
x_train, x_test, y_train, y_test = train_test_split(x_data, y_data, test_size=0.7, random_state=100,stratify=y_data)
scaler = StandardScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
x_test = scaler.transform(x_test)
mlp = MLPClassifier()
mlp.fit(x_train, y_train)
y_pred_train = mlp.predict(x_train)
y_pred_test = mlp.predict(x_test)
print("classifier", mlp)
print ("Accuracy on Train Set")
print (mlp.score(x_train, y_train))
print ("MLP Classifier")
print ("Accuracy on Test Set")
print (mlp.score(x_test, y_test))
print ("Report")
print (classification_report(y_test,mlp.predict(x_test)))
param_grid = {
'activation': ['tanh', 'relu'],
'solver': ['sgd', 'adam'],
'alpha': [0.0001,0.01, 0.05,0.1,1.0],
'learning_rate': ['constant','adaptive'],
}
#,2000
#,70
grid_search = GridSearchCV(mlp, param_grid=param_grid,n_jobs=-1,cv=10)
grid_search.fit(x_train,y_train)
print(grid_search.best_params_)
print(grid_search.best_score_)
| 27.258621
| 116
| 0.759646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 275
| 0.173941
|
0b7e90a5769d5e45dae418f0e034fd90269bdc99
| 711
|
py
|
Python
|
bermuda/demos/shape_options.py
|
glue-viz/bermuda
|
0bc26bac376d4f08a4964481d1f737f6deb86270
|
[
"BSD-3-Clause"
] | 1
|
2018-07-20T21:09:46.000Z
|
2018-07-20T21:09:46.000Z
|
bermuda/demos/shape_options.py
|
glue-viz/bermuda
|
0bc26bac376d4f08a4964481d1f737f6deb86270
|
[
"BSD-3-Clause"
] | null | null | null |
bermuda/demos/shape_options.py
|
glue-viz/bermuda
|
0bc26bac376d4f08a4964481d1f737f6deb86270
|
[
"BSD-3-Clause"
] | 1
|
2018-07-20T21:15:41.000Z
|
2018-07-20T21:15:41.000Z
|
import matplotlib.pyplt as plt
from bermuda import ellipse, polygon, rectangle
plt.plot([1,2,3], [2,3,4])
ax = plg.gca()
# default choices for everything
e = ellipse(ax)
# custom position, genric interface for all shapes
e = ellipse(ax, bbox = (x, y, w, h, theta))
e = ellipse(ax, cen=(x, y), width=w, height=h, theta=theta)
# force square/circle?
e = ellipse(ax, aspect_equal=True)
# freeze properties?
e = ellipse(ax, width=1, height=2, aspect_frozen = True)
e = ellipse(ax, rotation_frozen=True)
e = ellipse(ax, center_frozen=True)
e = ellipse(ax, size_frozen=True)
# all of these kwargs should be settable properties as well
e.bbox = (x, y, w, h, theta)
e.aspect_equal = True
e.aspect_frozen = True
| 24.517241
| 59
| 0.707454
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 183
| 0.257384
|
0b7fbd1451d21df02b8ac7806cf7eef5c4dcbb14
| 5,605
|
py
|
Python
|
porthole/contact_management.py
|
speedyturkey/porthole
|
5d47bb00d33d5aa93c3d2e84af993b5387b66be6
|
[
"MIT"
] | 3
|
2017-06-22T01:52:10.000Z
|
2019-09-25T22:52:56.000Z
|
porthole/contact_management.py
|
speedyturkey/porthole
|
5d47bb00d33d5aa93c3d2e84af993b5387b66be6
|
[
"MIT"
] | 48
|
2017-06-22T23:36:03.000Z
|
2019-11-26T02:51:54.000Z
|
porthole/contact_management.py
|
speedyturkey/porthole
|
5d47bb00d33d5aa93c3d2e84af993b5387b66be6
|
[
"MIT"
] | 1
|
2019-02-27T13:59:07.000Z
|
2019-02-27T13:59:07.000Z
|
from sqlalchemy.orm.exc import NoResultFound
from porthole.app import Session
from .logger import PortholeLogger
from porthole.models import AutomatedReport, AutomatedReportContact, AutomatedReportRecipient
class AutomatedReportContactManager(object):
def __init__(self, session=None):
self.session = session or Session()
self.logger = PortholeLogger(name=__name__)
def get_report_by_name(self, report_name, should_exist=False):
report = self.session.query(AutomatedReport).filter_by(report_name=report_name).one_or_none()
if report is None and should_exist:
raise NoResultFound(f"Report {report_name} should exist but no record was found.")
return report
def report_exists(self, report_name):
report = self.get_report_by_name(report_name)
return report is not None
def add_report(self, report_name: str, active: int = 1):
if self.report_exists(report_name):
self.logger.warning(f"{report_name} already exists")
return None
report = AutomatedReport(report_name=report_name, active=active)
self.session.add(report)
self.session.commit()
self.logger.info(f"Report '{report_name}' created successfully")
def report_is_active(self, report_name):
report = self.get_report_by_name(report_name, should_exist=True)
return bool(report.active)
def activate_report(self, report_name):
if self.report_is_active(report_name):
self.logger.warning(f"Report '{report_name}' is already active")
return None
report = self.get_report_by_name(report_name, should_exist=True)
report.active = 1
self.session.commit()
self.logger.info(f"Report '{report_name}' is now active")
def deactivate_report(self, report_name):
if not self.report_is_active(report_name):
self.logger.warning(f"Report '{report_name}' is already inactive")
return None
report = self.get_report_by_name(report_name, should_exist=True)
report.active = 0
self.session.commit()
self.logger.info(f"Report '{report_name}' is now inactive")
def get_contact_by_email_address(self, email_address, should_exist=False):
contact = self.session.query(AutomatedReportContact).filter_by(
email_address=email_address
).one_or_none()
if contact is None and should_exist:
raise NoResultFound(f"Contact with email {email_address} should exist but no record was found.")
return contact
def contact_exists(self, email_address: str):
contact = self.get_contact_by_email_address(email_address)
return contact is not None
def add_contact(self, last_name: str = None, first_name: str = None, email_address: str = None):
if self.contact_exists(email_address):
self.logger.warning(f"Contact {last_name}, {first_name} ({email_address}) already exists ")
return None
contact = AutomatedReportContact(last_name=last_name, first_name=first_name, email_address=email_address)
self.session.add(contact)
self.session.commit()
self.logger.info(f"Contact {last_name}, {first_name} ({email_address}) created successfully")
def get_report_recipient(self, report_name: str, email_address: str, should_exist: bool = False):
report = self.get_report_by_name(report_name, should_exist=True)
contact = self.get_contact_by_email_address(email_address, should_exist=True)
recipient = self.session.query(AutomatedReportRecipient).filter_by(
report_id=report.report_id, contact_id=contact.contact_id
).one_or_none()
if recipient is None and should_exist:
raise NoResultFound(
f"Recipient for report {report_name} with email {email_address} should exist but no record was found."
)
return recipient
def report_recipient_exists(self, report_name: str, email_address: str):
recipient = self.get_report_recipient(report_name, email_address)
return recipient is not None
def add_report_recipient(self, report_name: str, email_address: str, recipient_type: str):
if recipient_type not in ['to', 'cc']:
raise ValueError("Recipient type must be either `to` or `cc`.")
if self.report_recipient_exists(report_name, email_address):
self.logger.warning(f"Recipient '{email_address}' already exists for report '{report_name}'")
return None
report = self.get_report_by_name(report_name, should_exist=True)
contact = self.get_contact_by_email_address(email_address, should_exist=True)
recipient = AutomatedReportRecipient(
report_id=report.report_id, contact_id=contact.contact_id, recipient_type=recipient_type
)
self.session.add(recipient)
self.session.commit()
self.logger.info(f"{recipient_type} recipient '{email_address}' added successfully to report '{report_name}'")
def remove_report_recipient(self, report_name: str, email_address: str):
if not self.report_recipient_exists(report_name, email_address):
self.logger.warning(f"Recipient '{email_address}' does not exist for report '{report_name}'")
return None
recipient = self.get_report_recipient(report_name, email_address)
self.session.delete(recipient)
self.session.commit()
self.logger.info(f"Recipient '{email_address}' removed successfully from report '{report_name}'")
| 49.60177
| 118
| 0.702587
| 5,395
| 0.962533
| 0
| 0
| 0
| 0
| 0
| 0
| 996
| 0.177698
|
0b8210f4f1d6486c1ca027ea81ba3795882b8a8f
| 3,433
|
py
|
Python
|
tests/python/benchmarks/two_neighborhood_bench.py
|
sid17/weaver
|
f9074397ca854a777a873eaf409621de679f9749
|
[
"BSD-3-Clause"
] | 163
|
2015-01-02T03:51:38.000Z
|
2022-03-21T23:06:39.000Z
|
tests/python/benchmarks/two_neighborhood_bench.py
|
sid17/weaver
|
f9074397ca854a777a873eaf409621de679f9749
|
[
"BSD-3-Clause"
] | 1
|
2015-04-08T23:17:06.000Z
|
2015-04-24T15:25:26.000Z
|
tests/python/benchmarks/two_neighborhood_bench.py
|
sid17/weaver
|
f9074397ca854a777a873eaf409621de679f9749
|
[
"BSD-3-Clause"
] | 20
|
2015-02-17T19:24:05.000Z
|
2020-10-29T01:59:18.000Z
|
#! /usr/bin/env python
#
# ===============================================================
# Description: Two neighborhood benchmark
#
# Created: 2014-03-21 13:39:06
#
# Author: Ayush Dubey, dubey@cs.cornell.edu
#
# Copyright (C) 2013-2014, Cornell University, see the LICENSE
# file for licensing agreement
# ===============================================================
#
import random
import sys
import time
import threading
import weaver.client as client
import simple_client
random.seed(42)
num_edges = 1768149
edge_sources = [None] * num_edges
def choose_random_pair():
global edge_sources
return (edge_sources[random.randint(0, num_edges-1)], edge_sources[random.randint(0, num_edges-1)])
if (len(sys.argv) != 2):
print "want single extra arg for file to open"
assert(False)
f = open(sys.argv[1])
i = 0
for line in f:
if (line[0] is '#'):
continue
edge_sources[i] = int(line.split(" ")[0])
i += 1
print "done loading file"
num_started = 0
num_finished = 0
cv = threading.Condition()
num_nodes = 81306 # snap twitter-combined
read_percent = 95
# node handles are range(0, num_nodes)
num_vts = 1
num_clients = 100
requests_per_client = 200
def add_labels(c, idx):
global num_nodes
tx_id = c.begin_tx()
for i in range(num_nodes):
if i % num_clients is idx:
c.set_node_property(tx_id, i, 'name', str(i))
assert(c.end_tx(tx_id))
print "writing labels finished for client " + str(idx)
def exec_reads(reqs, sc, c, exec_time, idx):
global num_started
global cv
global num_clients
global num_finished
with cv:
while num_started < num_clients:
cv.wait()
start = time.time()
cnt = 0
for pair in reqs:
cnt += 1
if (random.randint(1,100) > read_percent) :
tx_id = c.begin_tx()
c.create_edge(tx_id, pair[0], pair[1])
assert(c.end_tx(tx_id))
else:
two_neighborhood = sc.two_neighborhood(pair[0], "name", caching = True)
end = time.time()
with cv:
num_finished += 1
cv.notify_all()
exec_time[idx] = end - start
clients = []
simple_clients = []
for i in range(num_clients):
clients.append(client.Client(client._CLIENT_ID + i, i % num_vts))
simple_clients.append(simple_client.simple_client(clients[i]))
reqs = []
for i in range(num_clients):
cl_reqs = []
for _ in range(requests_per_client):
cl_reqs.append(choose_random_pair())
reqs.append(cl_reqs)
exec_time = [0] * num_clients
threads = []
print "starting writes"
for i in range(num_clients):
thr = threading.Thread(target=add_labels, args=(clients[i], i))
thr.start()
threads.append(thr)
for thr in threads:
thr.join()
print "starting requests"
for i in range(num_clients):
thr = threading.Thread(target=exec_reads, args=(reqs[i], simple_clients[i], clients[i], exec_time, i))
thr.start()
threads.append(thr)
start_time = time.time()
with cv:
num_started = num_clients
cv.notify_all()
while num_finished < num_clients:
cv.wait()
end_time = time.time()
total_time = end_time-start_time
for thr in threads:
thr.join()
print 'Total time for ' + str(num_clients * requests_per_client) + 'requests = ' + str(total_time)
throughput = (num_clients * requests_per_client) / total_time
print 'Throughput = ' + str(throughput)
| 26.206107
| 106
| 0.633265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 665
| 0.193708
|
0b829ab3570dda786322f71fd0c867093ba7b3dc
| 1,705
|
py
|
Python
|
composer/datasets/brats_hparams.py
|
growlix/composer
|
27418a3c65dca26d90ac09c6ae67cbd5d0202ccf
|
[
"Apache-2.0"
] | null | null | null |
composer/datasets/brats_hparams.py
|
growlix/composer
|
27418a3c65dca26d90ac09c6ae67cbd5d0202ccf
|
[
"Apache-2.0"
] | null | null | null |
composer/datasets/brats_hparams.py
|
growlix/composer
|
27418a3c65dca26d90ac09c6ae67cbd5d0202ccf
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2022 MosaicML Composer authors
# SPDX-License-Identifier: Apache-2.0
"""BraTS (Brain Tumor Segmentation) dataset hyperparameters."""
from dataclasses import dataclass
import torch
import yahp as hp
from composer.datasets.brats import PytTrain, PytVal, get_data_split
from composer.datasets.dataset_hparams import DataLoaderHparams, DatasetHparams
from composer.utils import dist
def _my_collate(batch):
"""Custom collate function to handle images with different depths."""
data = [item[0] for item in batch]
target = [item[1] for item in batch]
return [torch.Tensor(data), torch.Tensor(target)]
@dataclass
class BratsDatasetHparams(DatasetHparams):
"""Defines an instance of the BraTS dataset for image segmentation.
Args:
oversampling (float): The oversampling ratio to use. Default: ``0.33``.
"""
oversampling: float = hp.optional("oversampling", default=0.33)
def initialize_object(self, batch_size: int, dataloader_hparams: DataLoaderHparams):
oversampling = self.oversampling
if self.datadir is None:
raise ValueError("datadir must be specified.")
x_train, y_train, x_val, y_val = get_data_split(self.datadir)
dataset = PytTrain(x_train, y_train, oversampling) if self.is_train else PytVal(x_val, y_val)
collate_fn = None if self.is_train else _my_collate
sampler = dist.get_sampler(dataset, drop_last=self.drop_last, shuffle=self.shuffle)
return dataloader_hparams.initialize_object(
dataset=dataset,
batch_size=batch_size,
sampler=sampler,
drop_last=self.drop_last,
collate_fn=collate_fn,
)
| 32.788462
| 101
| 0.71261
| 1,061
| 0.622287
| 0
| 0
| 1,072
| 0.628739
| 0
| 0
| 419
| 0.245748
|
0b83bfc7e85aab893f830a54d4b1eb6b31224483
| 43
|
py
|
Python
|
examples/getchar.py
|
scalabli/quo
|
70b6d4129ee705930f1f8a792fc4c9247d973f9d
|
[
"MIT"
] | 3
|
2022-03-13T13:22:35.000Z
|
2022-03-18T08:22:51.000Z
|
examples/getchar.py
|
scalabli/quo
|
70b6d4129ee705930f1f8a792fc4c9247d973f9d
|
[
"MIT"
] | 1
|
2022-03-21T16:29:54.000Z
|
2022-03-21T16:29:54.000Z
|
examples/getchar.py
|
scalabli/quo
|
70b6d4129ee705930f1f8a792fc4c9247d973f9d
|
[
"MIT"
] | null | null | null |
from quo.getchar import getchar
getchar()
| 10.75
| 31
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0b83f0ab273b13a1a169d3aa5355aab90ac31ca1
| 313
|
py
|
Python
|
setup.py
|
cfbolz/syntaxerrors
|
1c7ecc8fd0d05253d5c55dee39802cfb86fb69f7
|
[
"Apache-2.0",
"OpenSSL"
] | 5
|
2018-04-11T15:19:53.000Z
|
2020-10-27T15:23:18.000Z
|
setup.py
|
cfbolz/syntaxerrors
|
1c7ecc8fd0d05253d5c55dee39802cfb86fb69f7
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
setup.py
|
cfbolz/syntaxerrors
|
1c7ecc8fd0d05253d5c55dee39802cfb86fb69f7
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name='syntaxerrors',
version='0.0.1',
description='Report better SyntaxErrors',
author='Carl Friedrich Bolz-Tereick',
author_email='cfbolz@gmx.de',
packages=['syntaxerrors'],
package_dir={'': 'src'},
include_package_data=True,
)
| 24.076923
| 45
| 0.686901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 114
| 0.364217
|
0b843ec57c40e34a7b0ee2c71349c26723ef8771
| 1,492
|
py
|
Python
|
unit/either_spec.py
|
tek/amino
|
51b314933e047a45587a24ecff02c836706d27ff
|
[
"MIT"
] | 33
|
2016-12-21T07:05:46.000Z
|
2020-04-29T04:26:46.000Z
|
unit/either_spec.py
|
tek/amino
|
51b314933e047a45587a24ecff02c836706d27ff
|
[
"MIT"
] | 1
|
2019-04-19T17:15:52.000Z
|
2019-04-20T18:28:23.000Z
|
unit/either_spec.py
|
tek/amino
|
51b314933e047a45587a24ecff02c836706d27ff
|
[
"MIT"
] | 4
|
2017-09-04T18:46:23.000Z
|
2021-11-02T04:18:13.000Z
|
import operator
from amino.either import Left, Right
from amino import Empty, Just, Maybe, List, Either, _
from amino.test.spec_spec import Spec
from amino.list import Lists
class EitherSpec(Spec):
def map(self) -> None:
a = 'a'
b = 'b'
Right(a).map(_ + b).value.should.equal(a + b)
Left(a).map(_ + b).value.should.equal(a)
def optional(self) -> None:
a = 'a'
b = 'b'
Right(a).to_maybe.should.just_contain(a)
Left(a).to_maybe.should.be.a(Empty)
Right(a).to_either(b).should.equal(Right(a))
Left(a).to_either(b).should.equal(Left(a))
def ap2(self) -> None:
a = 'a'
b = 'b'
Right(a).ap2(Right(b), operator.add).should.equal(Right(a + b))
def traverse(self) -> None:
a = 'a'
Right(Just(a)).sequence(Maybe).should.equal(Just(Right(a)))
Left(Just(a)).sequence(Maybe).should.equal(Just(Left(Just(a))))
List(Right(a)).sequence(Either).should.equal(Right(List(a)))
List(Right(a), Left(a)).sequence(Either).should.equal(Left(a))
def fold_m(self) -> None:
def f(z: int, a: int) -> Either[str, int]:
return Right(z + a) if a < 5 else Left('too large')
Lists.range(5).fold_m(Right(8))(f).should.contain(18)
Lists.range(6).fold_m(Right(8))(f).should.be.left
def list_flat_map(self) -> None:
(List(Right(1), Left(2), Right(3)).join).should.equal(List(1, 3))
__all__ = ('EitherSpec',)
| 31.744681
| 73
| 0.586461
| 1,287
| 0.862601
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.029491
|
0b84b4636dfd6d734d772cca8a444833fce6d004
| 221
|
py
|
Python
|
Modulo_1/semana2/variables_contantes/sentencia-global.py
|
rubens233/cocid_python
|
492ebdf21817e693e5eb330ee006397272f2e0cc
|
[
"MIT"
] | null | null | null |
Modulo_1/semana2/variables_contantes/sentencia-global.py
|
rubens233/cocid_python
|
492ebdf21817e693e5eb330ee006397272f2e0cc
|
[
"MIT"
] | null | null | null |
Modulo_1/semana2/variables_contantes/sentencia-global.py
|
rubens233/cocid_python
|
492ebdf21817e693e5eb330ee006397272f2e0cc
|
[
"MIT"
] | 1
|
2022-03-04T00:57:18.000Z
|
2022-03-04T00:57:18.000Z
|
variable1 = "variable original"
def variable_global():
global variable1
variable1 = "variable global modificada"
print(variable1)
#variable original
variable_global()
print(variable1)
#variable global modificada
| 20.090909
| 44
| 0.78733
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 92
| 0.41629
|
0b84d29786d1202df0158d5a5b88910f8c8196a5
| 1,314
|
py
|
Python
|
weather_alarm/main.py
|
Cs4r/weather_alarm
|
b78b6f11f91e3b81aa43a1bfaa55074a0626a036
|
[
"MIT"
] | null | null | null |
weather_alarm/main.py
|
Cs4r/weather_alarm
|
b78b6f11f91e3b81aa43a1bfaa55074a0626a036
|
[
"MIT"
] | null | null | null |
weather_alarm/main.py
|
Cs4r/weather_alarm
|
b78b6f11f91e3b81aa43a1bfaa55074a0626a036
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import datetime
import os
from apscheduler.schedulers.blocking import BlockingScheduler
from weather_alarm.constants import *
from weather_alarm.forecaster import Forecaster
from weather_alarm.sender import NotificationSender
sender = NotificationSender(BOT_TOKEN, TELEGRAM_USER_ID)
forecaster = Forecaster(OWM_API_KEY)
def send_tomorrow_forecast(hour, time):
sender.send_message(forecaster.tomorrow_forecast_at(CITY, hour, time))
def send_current_observed_weather():
sender.send_message(forecaster.current_observed_weather(CITY))
now = datetime.datetime.now()
nightly_alarm_time = datetime.datetime(now.year, now.month, now.day, *NIGHTLY_ALARM_TIME)
daily_alarm_time = datetime.datetime(now.year, now.month, now.day, *DAILY_ALARM_TIME)
scheduler = BlockingScheduler()
scheduler.add_job(func=send_tomorrow_forecast, args=FORECAST_TIME, trigger='interval', next_run_time=nightly_alarm_time,
misfire_grace_time=30, days=1)
scheduler.add_job(func=send_current_observed_weather, trigger='interval', next_run_time=daily_alarm_time,
misfire_grace_time=30, days=1)
print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C'))
try:
scheduler.start()
except (KeyboardInterrupt, SystemExit):
pass
| 32.04878
| 120
| 0.780822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 103
| 0.078387
|
0b88cc0b918db3b0b9bc55668bf46c025033b785
| 2,237
|
py
|
Python
|
authlib/oauth2/rfc6749/__init__.py
|
geoffwhittington/authlib
|
096f2a41f4fb18f9850427f07d556d4b9ab97383
|
[
"BSD-3-Clause"
] | null | null | null |
authlib/oauth2/rfc6749/__init__.py
|
geoffwhittington/authlib
|
096f2a41f4fb18f9850427f07d556d4b9ab97383
|
[
"BSD-3-Clause"
] | null | null | null |
authlib/oauth2/rfc6749/__init__.py
|
geoffwhittington/authlib
|
096f2a41f4fb18f9850427f07d556d4b9ab97383
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
authlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~
This module represents a direct implementation of
The OAuth 2.0 Authorization Framework.
https://tools.ietf.org/html/rfc6749
"""
from .wrappers import OAuth2Request, OAuth2Token, HttpRequest
from .errors import (
OAuth2Error,
AccessDeniedError,
MissingAuthorizationError,
InvalidGrantError,
InvalidClientError,
InvalidRequestError,
InvalidScopeError,
InsecureTransportError,
UnauthorizedClientError,
UnsupportedResponseTypeError,
UnsupportedGrantTypeError,
UnsupportedTokenTypeError,
# exceptions for clients
MissingCodeException,
MissingTokenException,
MissingTokenTypeException,
MismatchingStateException,
)
from .models import ClientMixin, AuthorizationCodeMixin, TokenMixin
from .authenticate_client import ClientAuthentication
from .authorization_server import AuthorizationServer
from .resource_protector import ResourceProtector, TokenValidator
from .token_endpoint import TokenEndpoint
from .grants import (
BaseGrant,
AuthorizationEndpointMixin,
TokenEndpointMixin,
AuthorizationCodeGrant,
ImplicitGrant,
ResourceOwnerPasswordCredentialsGrant,
ClientCredentialsGrant,
RefreshTokenGrant,
)
__all__ = [
'OAuth2Request', 'OAuth2Token', 'HttpRequest',
'OAuth2Error',
'AccessDeniedError',
'MissingAuthorizationError',
'InvalidGrantError',
'InvalidClientError',
'InvalidRequestError',
'InvalidScopeError',
'InsecureTransportError',
'UnauthorizedClientError',
'UnsupportedResponseTypeError',
'UnsupportedGrantTypeError',
'UnsupportedTokenTypeError',
'MissingCodeException',
'MissingTokenException',
'MissingTokenTypeException',
'MismatchingStateException',
'ClientMixin', 'AuthorizationCodeMixin', 'TokenMixin',
'ClientAuthentication',
'AuthorizationServer',
'ResourceProtector',
'TokenValidator',
'TokenEndpoint',
'BaseGrant',
'AuthorizationEndpointMixin',
'TokenEndpointMixin',
'AuthorizationCodeGrant',
'ImplicitGrant',
'ResourceOwnerPasswordCredentialsGrant',
'ClientCredentialsGrant',
'RefreshTokenGrant',
]
| 27.617284
| 67
| 0.743406
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 980
| 0.438087
|
0b88fa702aed7c893ac61d3d5a2bd66384c1a59d
| 1,613
|
py
|
Python
|
scripts/serial_command.py
|
philip-long/singletact-python-wrapper
|
659796f614116db77f31d6b0cc1e0c963104948e
|
[
"MIT"
] | null | null | null |
scripts/serial_command.py
|
philip-long/singletact-python-wrapper
|
659796f614116db77f31d6b0cc1e0c963104948e
|
[
"MIT"
] | null | null | null |
scripts/serial_command.py
|
philip-long/singletact-python-wrapper
|
659796f614116db77f31d6b0cc1e0c963104948e
|
[
"MIT"
] | null | null | null |
TIMEOUT=100
def GenerateWriteCommand(i2cAddress, ID, writeLocation, data):
i = 0
TIMEOUT = 100
command = bytearray(len(data)+15)
while (i < 4):
command[i] = 255
i += 1
command[4] = i2cAddress
command[5] = TIMEOUT
command[6] = ID
command[7] = 2
command[8] = writeLocation
command[9] = len(data)
command[(10 + len(data))] = 255
i = 0
while (i < len(data)):
command[(10 + i)] = data[i]
i += 1
i = 0
while (i < 4):
command[(11 + i) + len(data)] = 254
i += 1
return command
def GenerateReadCommand(i2cAddress, ID, readLocation, numToRead):
command = bytearray(16)
i = 0
TIMEOUT = 100
while (i < 4):
command[i] = 0xFF
i += 1
command[4] = i2cAddress
command[5] = TIMEOUT
command[6] = ID
command[7] = 0x01
command[8] = readLocation
command[9] = numToRead
command[10] = 0xFF
i = 0
while (i < 4):
command[(11 + i)] = 0xFE
i += 1
return command
def GenerateToggleCommand(i2cAddress, ID, writeLocation, data):
i = 0
command = bytearray(16 + 15)
while (i < 4):
command[i] = 255
i += 1
command[4] = i2cAddress
command[5] = TIMEOUT
command[6] = ID
command[7] = 3
command[8] = data
command[9] = 16
command[(10 + 16)] = 255
i = 0
while (i < 16):
command[(10 + i)] = 7
i += 1
i = 0
while (i < 4):
command[((11 + i) + 16)] = 254
i += 1
return command
| 23.042857
| 66
| 0.49349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0b89d5110511e9a326a0adf1605527ae76c9199c
| 1,220
|
py
|
Python
|
1SiteRanking/create_kernel_density_map_arcpy.py
|
HCH2CHO/EmotionMap
|
bc572b4182637dcdd65e9a13c92f2fa0d9a3d680
|
[
"MIT"
] | 3
|
2021-07-15T15:58:52.000Z
|
2021-07-16T13:22:47.000Z
|
1SiteRanking/create_kernel_density_map_arcpy.py
|
HCH2CHO/EmotionMap
|
bc572b4182637dcdd65e9a13c92f2fa0d9a3d680
|
[
"MIT"
] | null | null | null |
1SiteRanking/create_kernel_density_map_arcpy.py
|
HCH2CHO/EmotionMap
|
bc572b4182637dcdd65e9a13c92f2fa0d9a3d680
|
[
"MIT"
] | 4
|
2017-08-04T12:41:06.000Z
|
2019-01-31T14:55:10.000Z
|
# coding:utf-8
# version:python2.7.3
# author:kyh
# import x,y data from txt and create kernel density map
import arcpy
from arcpy.sa import *
from arcpy import env
def read_point_data(filepath,i):
# Read data file and create shp file
with open(filepath, 'r') as pt_file:
pt=arcpy.Point()
ptGeoms=[]
i=0
for line in pt_file.readlines():
i=i+1
pt.X = float(line.split('\t')[7])
pt.Y = float(line.split('\t')[8])
ptGeoms.append(arcpy.PointGeometry(pt))
arcpy.CopyFeatures_management(ptGeoms, "D://Users//KYH//Documents//ArcGIS//FlickrPhoto//World_Flickr{0}.shp".format(i))
if __name__ == '__main__':
arcpy.CheckOutExtension('Spatial')
env.workspace=("D:\Users\KYH\Documents\ArcGIS\FlickrPhoto")
for i in range(0,25):
if (i==5) or (i==22):
continue
read_point_data("D:\\Users\\KYH\\Desktop\\EmotionMap\\FlickrEmotionData\\3faces_emotion\\faceflickr{0}.txt".format(i))
# Kernel Density Analysis
out_kernel_density=KernelDensity("World_Flickr{0}.shp".format(i),"NONE")
out_kernel_density.save("D:\Users\KYH\Documents\ArcGIS\FlickrPhoto\kd_Face{0}".format(i))
| 33.888889
| 127
| 0.645902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 478
| 0.391803
|
0b8ab81b9a756ab917e11852711a0c75ca7514c2
| 6,676
|
py
|
Python
|
src/explore.py
|
dngo13/enpm808x_inspection_robot
|
41f598d97d6526d4e85d1b738cd0bf1bce781b08
|
[
"MIT"
] | null | null | null |
src/explore.py
|
dngo13/enpm808x_inspection_robot
|
41f598d97d6526d4e85d1b738cd0bf1bce781b08
|
[
"MIT"
] | null | null | null |
src/explore.py
|
dngo13/enpm808x_inspection_robot
|
41f598d97d6526d4e85d1b738cd0bf1bce781b08
|
[
"MIT"
] | 2
|
2021-12-05T23:39:56.000Z
|
2021-12-06T17:54:54.000Z
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""*******************************************************************************
* MIT License
* Copyright (c) Charu Sharma 2021
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
********************************************************************************"""
"""
* @file explore.py
* @author Charu Sharma
* @date 12/14/2021
* @version 1.0
*
* @brief Main source file
*
* @section DESCRIPTION
*
* file to let the bot explore to the target locations and sending confirmation flag when reached
*
"""
#!/usr/bin/env python
#importing Libraries
import rospy
import math
import tf
from geometry_msgs.msg import Twist, Point
from sensor_msgs.msg import LaserScan
from tf.transformations import euler_from_quaternion
from enpm808x_inspection_robot.msg import location, flag_array, flag
rospy.init_node("move_robot")
pub = rospy.Publisher("cmd_vel", Twist, queue_size=1)
# create another publisher for location
# rospy.init_node('location_node')
# loc_pub = rospy.Publisher('location', array, queue_size=1)
loc_pub = rospy.Publisher('/flag', flag_array, queue_size=1)
rate = rospy.Rate(1)
velocity_msg = Twist()
rate = rospy.Rate(4)
tf_listener = tf.TransformListener()
parent_frame = 'odom'
child_frame = 'base_footprint'
k_h_gain = 1
k_v_gain = 1
distance_to_goal = 0.0
# locations = location()
flagged = flag()
flagged_arrays = flag_array()
flagged.check = "false"
try:
tf_listener.waitForTransform(parent_frame, child_frame, rospy.Time(), rospy.Duration(1.0))
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
rospy.loginfo("Cannot find transform between {p} and {c}".format(p=parent_frame, c=child_frame))
rospy.signal_shutdown("tf Exception")
def get_odom_data():
# Get the current pose of the robot from the /odom topic
try:
(trans, rot) = tf_listener.lookupTransform(parent_frame, child_frame, rospy.Time(0))
rotation = euler_from_quaternion(rot)
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
rospy.loginfo("TF Exception")
return
return Point(*trans), rotation[2]
def compute_distance(x1, y1, x2, y2):
return math.sqrt((x2 - x1)**2 + (y2 - y1)**2)
def go_to_goal(goal_x, goal_y):
global velocity_msg
(position, rotation) = get_odom_data()
last_rotation = 0
# locations.loc_x = position.x
# locations.loc_y = position.y
distance_to_goal = compute_distance(position.x, position.y, goal_x, goal_y)
while distance_to_goal > 0.05:
(position, rotation) = get_odom_data()
x_start = position.x
y_start = position.y
rospy.loginfo("x = {0}, y = {1}".format(x_start, y_start))
angle_to_goal = math.atan2(goal_y - y_start, goal_x - x_start)
if angle_to_goal < -math.pi / 4 or angle_to_goal > math.pi / 4:
if 0 > goal_y > y_start:
angle_to_goal = -2 * math.pi + angle_to_goal
elif 0 <= goal_y < y_start:
angle_to_goal = 2 * math.pi + angle_to_goal
if last_rotation > math.pi - 0.1 and rotation <= 0:
rotation = 2 * math.pi + rotation
elif last_rotation < -math.pi + 0.1 and rotation > 0:
rotation = -2 * math.pi + rotation
velocity_msg.angular.z = k_v_gain * angle_to_goal - rotation
distance_to_goal = compute_distance(position.x, position.y, goal_x, goal_y)
velocity_msg.linear.x = min(k_h_gain * distance_to_goal, 0.2)
if velocity_msg.angular.z > 0:
velocity_msg.angular.z = min(velocity_msg.angular.z, 0.6)
else:
velocity_msg.angular.z = max(velocity_msg.angular.z, -0.6)
last_rotation = rotation
sub = rospy.Subscriber('scan', LaserScan, sensor_callback)
pub = rospy.Publisher("cmd_vel", Twist, queue_size=1)
pub.publish(velocity_msg)
rate.sleep()
velocity_msg.linear.x = 0.0
velocity_msg.angular.z = 0.0
pub.publish(velocity_msg)
rate.sleep()
def sensor_callback(msg):
front = msg.ranges[0]
left = msg.ranges[90]
right = msg.ranges[270]
def read_scan():
rospy.Subscriber("scan", LaserScan, sensor_callback)
rospy.spin()
# while not rospy.is_shutdown():
# loc_pub.publish(arrays.loc)
if __name__ == "__main__":
action = ""
go_to_goal(-3, -1)
flagged.check = 'true'
print("The robot has reached the Chiller")
print("Commencing the pressure Detection")
# print("locations.loc_x = ", locations.loc_x)
# print("locations.loc_y = ", locations.loc_y)
flagged.check = 'false'
go_to_goal(0, 3)
print("The robot has reached the Boiler")
print("Commencing the pressure Detection")
# locations.loc_x = 0.0
# locations.loc_y = 3.0
# send message for boiler
go_to_goal(1, 3)
print("The robot has reached the Air Handling Units")
print("Commencing the pressure Detection")
# send message to AHU
# arrays.id.insert(0,flag)
while not rospy.is_shutdown():
loc_pub.publish(flagged_arrays.id)
flagged_arrays.id.insert(0,flagged)
rate.sleep()
exit()
| 34.061224
| 100
| 0.673457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,062
| 0.458658
|
0b8b1ecbeb3c81d6b86bae93b7d7c89aca388a29
| 893
|
py
|
Python
|
mapclientplugins/loadcsvstep/utils/processCSV.py
|
mahyar-osn/mapclientplugins.loadcsvstep
|
2d483d4054f4c30247303e8d4eba706b70364158
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/loadcsvstep/utils/processCSV.py
|
mahyar-osn/mapclientplugins.loadcsvstep
|
2d483d4054f4c30247303e8d4eba706b70364158
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/loadcsvstep/utils/processCSV.py
|
mahyar-osn/mapclientplugins.loadcsvstep
|
2d483d4054f4c30247303e8d4eba706b70364158
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
class ProcessCSV:
"""
A general class to read in and process a csv format file using pandas.
"""
def __init__(self, filename, delim=',', header=None, usecols=None, dtype=None, ignore=False, *args):
self._filename = filename
self._args = args
self._df = self._readFile(delim=delim, header=header, usecols=usecols, dtype=dtype, ignore=ignore)
def _readFile(self, delim=',', header=None, usecols=None, dtype=None, ignore=False):
df = pd.read_csv(self._filename, sep=delim, header=header, usecols=usecols, dtype=dtype)
if ignore:
df.dropna(how="all", inplace=True)
return df.fillna(0)
def getCoordinates(self):
return self._df[['map X', 'map Y', 'map Z']]
def getGene(self):
return self._df.iloc[:, 8:]
def getID(self):
return self._df['Sample Name']
| 28.806452
| 106
| 0.631579
| 866
| 0.969765
| 0
| 0
| 0
| 0
| 0
| 0
| 131
| 0.146697
|
0b8bbb57f61438a9cdd497a599dabe456d4ca928
| 2,521
|
py
|
Python
|
src/api/handlers/projects/tokens.py
|
sap-steffen/InfraBox
|
36c8b626b517415e4363c99037c5d2c118966e56
|
[
"Apache-2.0"
] | 50
|
2017-09-03T15:54:08.000Z
|
2019-03-13T16:53:15.000Z
|
src/api/handlers/projects/tokens.py
|
sap-steffen/InfraBox
|
36c8b626b517415e4363c99037c5d2c118966e56
|
[
"Apache-2.0"
] | 241
|
2017-09-03T14:40:08.000Z
|
2022-03-02T02:32:26.000Z
|
src/api/handlers/projects/tokens.py
|
sap-steffen/InfraBox
|
36c8b626b517415e4363c99037c5d2c118966e56
|
[
"Apache-2.0"
] | 17
|
2017-09-03T11:28:01.000Z
|
2018-04-30T15:58:18.000Z
|
from flask import request, g, abort
from flask_restplus import Resource, fields
from pyinfrabox.utils import validate_uuid4
from pyinfraboxutils.ibflask import auth_required, OK
from pyinfraboxutils.ibrestplus import api
from pyinfraboxutils.token import encode_project_token
from api.namespaces import project as ns
project_token_model = api.model('ProjectToken', {
'description': fields.String(required=True),
'scope_push': fields.Boolean(required=True),
'scope_pull': fields.Boolean(required=True),
'id': fields.String(required=False)
})
@ns.route('/<project_id>/tokens')
class Tokens(Resource):
@auth_required(['user'])
@api.marshal_list_with(project_token_model)
def get(self, project_id):
p = g.db.execute_many_dict('''
SELECT description, scope_push, scope_pull, id
FROM auth_token
WHERE project_id = %s
''', [project_id])
return p
@auth_required(['user'])
@api.expect(project_token_model)
def post(self, project_id):
b = request.get_json()
result = g.db.execute_one("""
SELECT COUNT(*) FROM auth_token
WHERE project_id = %s AND description = %s
""", [project_id, b['description']])[0]
if result != 0:
return abort(400, 'Token with such a description already exists.')
result = g.db.execute_one_dict("""
INSERT INTO auth_token (description, scope_push, scope_pull, project_id)
VALUES (%s, %s, %s, %s) RETURNING id
""", [b['description'], b['scope_push'], b['scope_pull'], project_id])
token_id = result['id']
token = encode_project_token(token_id, project_id)
g.db.commit()
return OK('Successfully added token', {'token': token})
@ns.route('/<project_id>/tokens/<token_id>')
class Token(Resource):
@auth_required(['user'])
def delete(self, project_id, token_id):
if not validate_uuid4(token_id):
abort(400, "Invalid project-token uuid")
num_tokens = g.db.execute_one("""
SELECT COUNT(*) FROM auth_token
WHERE project_id = %s and id = %s
""", [project_id, token_id])[0]
if num_tokens == 0:
return abort(400, 'Such token does not exist.')
g.db.execute("""
DELETE FROM auth_token
WHERE project_id = %s and id = %s
""", [project_id, token_id])
g.db.commit()
return OK('Successfully deleted token')
| 31.911392
| 84
| 0.624752
| 1,879
| 0.745339
| 0
| 0
| 1,958
| 0.776676
| 0
| 0
| 964
| 0.382388
|
0b8c281f2be1b5f006c8dd22b32012df4fb6d732
| 2,716
|
py
|
Python
|
tigergraph/benchmark.py
|
yczhang1017/ldbc_snb_bi
|
5b97da8b2596e88bc460d5568fc7b31587695b62
|
[
"Apache-2.0"
] | null | null | null |
tigergraph/benchmark.py
|
yczhang1017/ldbc_snb_bi
|
5b97da8b2596e88bc460d5568fc7b31587695b62
|
[
"Apache-2.0"
] | null | null | null |
tigergraph/benchmark.py
|
yczhang1017/ldbc_snb_bi
|
5b97da8b2596e88bc460d5568fc7b31587695b62
|
[
"Apache-2.0"
] | null | null | null |
import argparse
from pathlib import Path
from datetime import datetime, date, timedelta
from queries import run_queries, precompute, cleanup
from batches import run_batch_update
import os
import time
import re
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='LDBC TigerGraph BI workload Benchmark')
parser.add_argument('data_dir', type=Path, help='The directory to load data from')
parser.add_argument('--header', action='store_true', help='whether data has the header')
parser.add_argument('--cluster', action='store_true', help='load concurrently on cluster')
parser.add_argument('--skip', action='store_true', help='skip precompute')
parser.add_argument('--para', type=Path, default=Path('../parameters'), help='parameter folder')
parser.add_argument('--test', action='store_true', help='test mode only run one time')
parser.add_argument('--nruns', '-n', type=int, default=10, help='number of runs')
parser.add_argument('--endpoint', type=str, default = 'http://127.0.0.1:9000', help='tigergraph rest port')
args = parser.parse_args()
sf = os.environ.get("SF")
results_file = open('output/results.csv', 'w')
timings_file = open('output/timings.csv', 'w')
timings_file.write(f"tool|sf|q|parameters|time\n")
query_variants = ["1", "2a", "2b", "3", "4", "5", "6", "7", "8a", "8b", "9", "10a", "10b", "11", "12", "13", "14a", "14b", "15a", "15b", "16a", "16b", "17", "18", "19a", "19b", "20"]
query_nums = [int(re.sub("[^0-9]", "", query_variant)) for query_variant in query_variants]
start_date = date(2012, 11, 29)
end_date = date(2013, 1, 1)
batch_size = timedelta(days=1)
needClean = False
batch_date = start_date
while batch_date < end_date:
start = time.time()
duration = run_batch_update(batch_date, args)
# For SF-10k and larger, sleep time may be needed after batch update to release memory
# time.sleep(duration * 0.2)
if needClean:
for query_num in [19,20]:
if query_num in query_nums:
cleanup(query_num, args.endpoint)
needClean = False
for query_num in [4,6,19,20]:
if query_num in query_nums:
precompute(query_num, args.endpoint)
needClean = True
writes_time = time.time() - start
timings_file.write(f"TigerGraph|{sf}|writes|{batch_date}|{writes_time:.6f}\n")
reads_time = run_queries(query_variants, results_file, timings_file, args)
timings_file.write(f"TigerGraph|{sf}|reads|{batch_date}|{reads_time:.6f}\n")
batch_date = batch_date + batch_size
results_file.close()
timings_file.close()
| 48.5
| 186
| 0.654639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 838
| 0.308542
|
0b8cb9211db86e8a3c8e8b138c17ac41f7b2fae4
| 3,001
|
py
|
Python
|
wgan/updater.py
|
Aixile/chainer-gan-experiments
|
4371e8369d2805e8ace6d7aacc397aa6e62680a6
|
[
"MIT"
] | 70
|
2017-06-24T10:55:57.000Z
|
2021-11-23T22:52:37.000Z
|
wgan/updater.py
|
Aixile/chainer-gan-experiments
|
4371e8369d2805e8ace6d7aacc397aa6e62680a6
|
[
"MIT"
] | 1
|
2017-08-21T06:19:31.000Z
|
2017-08-21T07:54:28.000Z
|
wgan/updater.py
|
Aixile/chainer-gan-experiments
|
4371e8369d2805e8ace6d7aacc397aa6e62680a6
|
[
"MIT"
] | 16
|
2017-08-22T07:00:16.000Z
|
2018-11-18T16:15:21.000Z
|
import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import cuda, optimizers, serializers, Variable
import sys
sys.path.insert(0, '../')
from common.loss_functions import *
class Updater(chainer.training.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis = kwargs.pop('models')
self._iter = 0
params = kwargs.pop('params')
self._img_size = params['img_size']
self._img_chan = params['img_chan']
self._latent_len = params['latent_len']
self._dis_iter = params['dis_iter']
self._batch_size = params['batch_size']
self._lambda_gp = params['lambda_gp']
self._mode = params['mode']
super(Updater, self).__init__(*args, **kwargs)
def get_real_image_batch(self):
xp = self.gen.xp
batch = self.get_iterator('main').next()
t_out = xp.zeros((self._batch_size, self._img_chan, self._img_size, self._img_size)).astype("f")
for i in range(self._batch_size):
t_out[i, :] = xp.asarray(batch[i])
return t_out
def get_fake_image_batch(self):
z = self.get_latent_code_batch()
x_out = self.gen(Variable(z, volatile=True), test=True).data
return x_out
def get_latent_code_batch(self):
xp = self.gen.xp
z_in = xp.random.normal(size=(self._batch_size, self._latent_len)).astype("f")
return z_in
def update_core(self):
xp = self.gen.xp
self._iter += 1
opt_d = self.get_optimizer('dis')
for i in range(self._dis_iter):
d_fake = self.get_fake_image_batch()
d_real = self.get_real_image_batch()
y_fake = self.dis(Variable(d_fake), test=False)
y_real = self.dis(Variable(d_real), test=False)
w1 = F.average(y_fake-y_real)
loss_dis = w1
if self._mode == 'gp':
eta = np.random.rand()
c = (d_real * eta + (1.0 - eta) * d_fake).astype('f')
y = self.dis(Variable(c), test=False, retain_forward=True)
g = xp.ones_like(y.data)
grad_c = self.dis.differentiable_backward(Variable(g))
grad_c_l2 = F.sqrt(F.sum(grad_c**2, axis=(1, 2, 3)))
loss_gp = loss_l2(grad_c_l2, 1.0)
loss_dis += self._lambda_gp * loss_gp
opt_d.zero_grads()
loss_dis.backward()
opt_d.update()
if self._mode == 'clip':
self.dis.clip()
chainer.report({'loss': loss_dis,'loss_w1': w1}, self.dis)
z_in = self.get_latent_code_batch()
x_out = self.gen(Variable(z_in), test=False)
opt_g = self.get_optimizer('gen')
y_fake = self.dis(x_out, test=False)
loss_gen = - F.average(y_fake)
chainer.report({'loss': loss_gen}, self.gen)
opt_g.zero_grads()
loss_gen.backward()
opt_g.update()
| 31.589474
| 104
| 0.587471
| 2,776
| 0.925025
| 0
| 0
| 0
| 0
| 0
| 0
| 148
| 0.049317
|
0b8d785a697c0b3da778b64fbf2cc6f8d4ea2d37
| 19,032
|
py
|
Python
|
tools/ig/definitions.py
|
grahamegrieve/vocab-poc
|
9f8b6c29b32f15c9513f16f148fdf2a441ba3897
|
[
"BSD-3-Clause"
] | 2
|
2017-06-25T22:15:18.000Z
|
2017-09-15T05:12:50.000Z
|
tools/ig/definitions.py
|
grahamegrieve/vocab-poc
|
9f8b6c29b32f15c9513f16f148fdf2a441ba3897
|
[
"BSD-3-Clause"
] | null | null | null |
tools/ig/definitions.py
|
grahamegrieve/vocab-poc
|
9f8b6c29b32f15c9513f16f148fdf2a441ba3897
|
[
"BSD-3-Clause"
] | null | null | null |
#! /usr/bin/env python3.
# create ig definition file with all value sets in the /resources directory
import json, os, sys, logging, re, csv
from lxml import etree
#logging.disable(logging.CRITICAL)
logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s- %(message)s')
logging.info('Start of program')
logging.info('The logging module is working.')
# create the ig.json file template as dictoinary
logging.info('create the ig.json file template as dictionary')
# globals
dir = os.getcwd() + '/' # current dir
logging.info('cwd = ' + dir)
''' this is the definitions file skeleton you need to modify as needed see ig publisher documenentation at f http://wiki.hl7.org/index.php?title=IG_Publisher_Documentation or more information.'''
igpy = {
"broken-links": "warning",
"canonicalBase": "http://www.fhir.org/guides/ig-template",
"defaults": {
"Any": {
"template-base": "base.html",
"template-format": "format.html"
},
"CapabilityStatement": {
"template-base": "capst.html"
},
"CodeSystem": {
"template-base": "codesys.html"
},
"ConceptMap": {
"template-base": "cm.html"
},
"OperationDefinition": {
"template-base": "op.html"
},
"StructureDefinition": {
"template-base": "sd.html",
"template-defns": "sd-definitions.html",
"template-mappings": "sd-mappings.html"
},
"ValueSet": {
"template-base": "vs.html"
}
},
"dependencyList": [{}],
"do-transforms": "false",
"extraTemplates": [
"mappings"
],
"fixed-business-version": "0.0.0",
"gen-examples": "false",
"jurisdiction": "US",
"no-inactive-codes": "false",
"paths": {
"output": "output",
"pages": [],
"qa": "qa",
"resources": [],
"specification": "http://build.fhir.org",
"temp": "temp",
"txCache": "txCache"
},
"resources": {},
"sct-edition": "http://snomed.info/sct/731000124108",
"source": "ig.xml",
"special-urls": [],
"spreadsheets": [],
"tool": "jekyll",
"version": "3.1.0",
"working-dir": None,
"title": "Implementation Guide Template",
"status": "draft",
"publisher": "Health eData Inc",
"extensions": [],
"searches": [],
"codesystems": [],
"valuesets": [],
"structuremaps": []
}
logging.info('create the ig.xml file template as string')
''' this is the ig.xml file skeleton may need to modify as needed see ig publisher documenentation at f http://wiki.hl7.org/index.php?title=IG_Publisher_Documentation or more information. The Cap Case words are variables that are replaced by variables in the definitions file'''
igxml ='''<?xml version="1.0" encoding="UTF-8"?><!--Hidden IG for de facto IG publishing--><ImplementationGuide xmlns="http://hl7.org/fhir"><id value="ig"/><url value="BASE/ImplementationGuide/ig"/><name value="TITLE"/><status value="STATUS"/><experimental value="true"/><publisher value="PUBLISHER"/><package><name value="base"/></package><page><source value="index.html"/><title value="TITLE Homepage"/><kind value="page"/></page></ImplementationGuide>'''
# Function definitions here
def init_igpy():
# read non array csv file
with open('definitions.csv') as defnfile: # grab a CSV file and make a dict file 'reader
reader = csv.DictReader(defnfile, dialect='excel')
for row in reader: # each row equal row of csv file as a dict
for row_key in row.keys(): # get keys in row
logging.info('row_key: ' + row_key)
if row[row_key] == 'FALSE' or row[row_key] == 'TRUE': # clean up excel propensity to change the string true/false to TRUE/FALSE
row[row_key] = row[row_key].lower()
if row[row_key] != "":
logging.info('row_key: ' + row_key)
try: # deal with nested elements first
row_key0 = row_key.split(".")[0]
row_key1 = row_key.split(".")[1]
# deal with lists first : append csv element to dict value
for itemz in row[row_key].split('|'):
igpy[row_key0][row_key1].append(itemz)
logging.info('updating ig.json with this: { "' + row_key0 + '" { "' + row_key1 + '": ["' + itemz + '",...] } }')
except IndexError: # unnested dict elements
# deal with lists first : append csv element to dict value
for (itemz) in (row[row_key].split('|')): # loop over list of dependencies
try: # deal with lists first : append csv element to dict value
igpy[row_key].append(itemz)
logging.info('updating ig.json with this: { "' + row_key + '": [..."' + itemz + '",...] }')
except AttributeError: # simple key value pairs
igpy[row_key] = itemz # add/replace csv element to existing dict file
logging.info('updating ig.json with this: { "' + row_key + '": "' + itemz + '" }')
except AttributeError: # nested dict elements
# todo - deal with nested list elements
igpy[row_key0][row_key1] = row[row_key] # add/replace csv element to existing dict fil
logging.info('updating ig.json with this: { "' + row_key0 + '" { "' + row_key1 + '": "' + row[row_key] + '" } }')
except TypeError: # unnested list of objects
for (item,itemz) in enumerate(row[row_key].split('|')): # loop over list of dependencies
try:
igpy[row_key0][item][row_key1]=itemz # create an object for each item in cell
except IndexError:
igpy[row_key0].append({row_key1:itemz}) # create an object for each item in cell
logging.info('updating ig.json with this: { "' + row_key0 + '"[' + str(item) + ']' +':{ "' + row_key1 + '": "' + itemz + '",... }')
return
def init_igxml():
global igxml
logging.info('replace variables in igxml with definitions file value: ' + 'Title: = ' + igpy['title'])
igxml = igxml.replace('TITLE', igpy['title'])
logging.info('replace variables in igxml with defintions file value: ' + 'Status: = ' + igpy['status'])
igxml = igxml.replace('STATUS', igpy['status'])
logging.info('replace variables in igxml with defintions file value: ' + 'Base: = ' + igpy['canonicalBase'])
igxml = igxml.replace('BASE', igpy['canonicalBase'])
logging.info('replace variables in igxml with defintions file value: ' + 'Publisher: = ' + igpy['publisher'])
igxml = igxml.replace('PUBLISHER', igpy['publisher'])
return(igxml)
def make_op_frag(frag_id): # create [id].md file for new operations
# default content for files
op_frag = '''
This is the markdown file that gets inserted into the op.html template.
'''
# check if files already exist before writing files
frag = dir + 'pages/_includes/' + frag_id
fragf = open(frag + '.md', 'w')
fragf.write(frag_id + '.md file\n' + op_frag)
logging.info('added file: ' + frag + '.md')
return
def make_frags(frag_id): # create [id]-intro.md, [id]-search.md and [id]-summary.md files
# default content for files
intro = '''
This is the introduction markdown file that gets inserted into the sd.html template.
This profile sets minimum expectations for blah blah blah
##### Mandatory Data Elements and Terminology
The following data-elements are mandatory (i.e data MUST be present). blah blah blah
**must have:**
1. blah
1. blah
1. blah
**Additional Profile specific implementation guidance:**
#### Examples
'''
srch = '''
This is the search markdown file that gets inserted into the sd.html Quick Start section for explanation of the search requirements.
'''
sumry = '''
This is the summary markdown file that gets inserted into the sd.html template. for a more formal narrative summary of constraints. in future hope to automate this to computer generated code.
#### Complete Summary of the Mandatory Requirements
1.
1.
1.
'''
# check if files already exist before writing files
frag = dir + 'pages/_includes/'+ frag_id
fragf = open(frag + '-intro.md', 'w')
fragf.write(frag_id + '-intro.md file\n' + intro)
logging.info('added file: ' + frag + '-intro.md')
fragf = open(frag + '-summary.md', 'w')
fragf.write(frag_id + '-summary.md' + sumry)
logging.info('added file: ' + frag + '-summary.md')
fragf = open(frag + '-search.md', 'w')
fragf.write(frag_id + '-search.md file\n' + srch)
logging.info('added file: ' + frag +'-search.md')
return
def update_sd(i, type, logical):
namespaces = {'o': 'urn:schemas-microsoft-com:office:office',
'x': 'urn:schemas-microsoft-com:office:excel',
'ss': 'urn:schemas-microsoft-com:office:spreadsheet', }
igpy['spreadsheets'].append(i)
logging.info('cwd = ' + dir)
logging.info('adding ' + i + ' to spreadsheets array')
sd_file = open(dir + 'resources/' + i) # for each spreadsheet in /resources open value and read SD id and create and append dict struct to definiions file
sdxml = etree.parse(sd_file) # lxml module to parse excel xml
if logical: # Get the id from the data element row2 column "element"
sdid = sdxml.xpath('/ss:Workbook/ss:Worksheet[3]/ss:Table/ss:Row[2]/ss:Cell[2]/ss:Data', namespaces=namespaces) # use xpath to get the id from the spreadsheet and retain case
temp_id = sdid[0].text # retain case
update_igxml('StructureDefinition','logical' , temp_id)# add to ig.xml as an SD
else:
sdid = sdxml.xpath('/ss:Workbook/ss:Worksheet[2]/ss:Table/ss:Row[11]/ss:Cell[2]/ss:Data',
namespaces=namespaces) # use xpath to get the id from the spreadsheet and lower case
temp_id = sdid[0].text.lower() # use lower case
update_igjson(type, temp_id) # add base to definitions file
update_igjson(type, temp_id, 'defns') # add base to definitions file
if not os.path.exists(dir + 'pages/_includes/'+ temp_id + '-intro.md'): # if intro fragment is missing then create new page fragments for extension
make_frags(temp_id)
return
def update_igxml(type, purpose, id):
ev = 'false'
if purpose == "example":
ev = 'true'
vsxml = '<resource><example value="' + ev + '"/><sourceReference><reference value="' + type + '/' + id + '"/></sourceReference></resource>' # concat id into appropriate string
global igxml
igxml = igxml.replace('name value="base"/>',
'name value="base"/>' + vsxml) # add valueset base def to ig resource
logging.info('adding ' + type + vsxml + ' to resources in ig.xml')
return
def update_igjson(type, id, template = 'base', filename = "blah"): # add base to ig.json - can extend for other templates if needed with extra 'template' param
if template == 'base':
igpy['resources'][type + '/' + id] = {
template : type + '-' + id + '.html'} # concat id into appropriate strings and add valuset base def to resources in def file
logging.info('adding ' + type + ' ' + id + ' base to resources ig.json')
if template == 'source':
igpy['resources'][type + '/' + id][template] = filename # concat filename + xml into appropriate strings and add source in def file
logging.info('adding ' + id + ' source filename to resources ig.json')
if template == 'defns':
igpy['resources'][type + '/' + id][template] = type + '-' + id + '-definitions.html' # concat id into appropriate strings and add sd defitions to in def file
logging.info('adding ' + type + ' ' + id + ' definitions to resources ig.json')
return
def update_def(filename, type, purpose):
vsid_re = re.compile(r'<id value="(.*)"/>') # regex for finding the index in vs
vs_file = open(
dir + 'resources/' + filename) # can use a package like untangle or Xmltodict but I'm gonna regex it for now"
vsxml = vs_file.read() # convert to string
vsmo = vsid_re.search(vsxml) # get match object which contains id
vsid = vsmo.group(1) # get id as string
update_igjson(type, vsid) # add base to definitions file
update_igjson(type, vsid, 'source', filename) # add source filename to definitions file
if type == 'StructureDefinition':
update_igjson(type, vsid, 'defns') # add base to definitions file
if not os.path.exists(dir + 'pages/_includes/'+ vsid + '-intro.md'): # if intro file fragment is missing then create new page fragments for extension
make_frags(vsid)
if type == 'OperationDefinition':
if not os.path.exists(dir + 'pages/_includes/'+ vsid + '.md'): # if file is missing then create new page fragments for extension
make_op_frag(vsid)
update_igxml(type, purpose, vsid)
return
def update_example(type, id, filename):
update_igxml(type, 'example', id) # add example to ig.xml file
update_igjson(type, id ) # add example base to definitions file
update_igjson(type, id,'source', filename) # add source filename to definitions file
igpy['defaults'][type] = {'template-base': 'ex.html'} # add example template for type
logging.info('adding example template to type ' +type + ' in ig.json')
return
def get_file(e):
ex_file = open(dir + 'examples/' + e) # for each example in /examples open
logging.info('load example xml file ' + dir + 'examples/' + e)
return ex_file
def main():
init_igpy() # read CSV file and update the configuration data
init_igxml() # add title, publisher etc to ig.xml
global dir
if igpy['working-dir']:
dir = igpy['working-dir'] # change to the local path name specified in the csv file if present
logging.info('cwd = ' + dir)
resources = os.listdir(dir + 'resources') # get all the files in the resource directory
for i in range(len(resources)): # run through all the files looking for spreadsheets and valuesets
if 'spreadsheet' in resources[i]: # for spreadsheets append to the igpy[spreadsheet] array.
if 'logical' in resources[i]: # check if logical model
logical = True # these need to be handled differently
else:
logical = False
update_sd(resources[i], 'StructureDefinition', logical) # append to the igpy[spreadsheet] array.
if 'valueset' in resources[
i]: # for each vs in /resources open valueset resources and read id and create and append dict struct to definiions file
update_def(resources[i], 'ValueSet', 'terminology')
if 'codesystem' in resources[
i]: # for each vs in /resources open valueset resources and read id and create and append dict struct to definiions file
update_def(resources[i], 'CodeSystem', 'terminology')
if 'conceptmap' in resources[
i]: # for each vs in /resources open valueset resources and read id and create and append dict struct to definiions file
update_def(resources[i], 'ConceptMap', 'terminology')
if 'capabilitystatement' in resources[
i]: # for each cs in /resources open, read id and create and append dict struct to definiions file
update_def(resources[i], 'CapabilityStatement', 'conformance')
if 'operationdefinition' in resources[
i]: # for each cs in /resources open, read id and create and append dict struct to definiions file
update_def(resources[i], 'OperationDefinition', 'conformance')
if 'structuredefinition' in resources[
i]: # for each cs in /resources open, read id and create and append dict struct to definiions file
update_def(resources[i], 'StructureDefinition', 'conformance')
if 'searchparameter' in resources[
i]: # for each cs in /resources open, read id and create and append dict struct to definiions file
update_def(resources[i], 'SearchParameter', 'conformance')
# add spreadsheet extensions
for extension in igpy['extensions']:
update_igjson('StructureDefinition', extension, 'base')
update_igjson('StructureDefinition', extension, 'defns')
if not os.path.exists(dir + 'pages/_includes/'+ extension + '-intro.md'): # if intro fragment is missing then create new page fragments for extension
make_frags(extension)
# add spreadsheet search parameters
for search in igpy['searches']:
update_igjson('SearchParameter', search, 'base')
# add spreadsheet code systems
for codesystem in igpy['codesystems']:
update_igjson('CodeSystem', codesystem, 'base')
update_igjson('ValueSet', codesystem, 'base')
# add spreadsheet valuesets
for valueset in igpy['valuesets']:
update_igjson('ValueSet', valueset, 'base')
# add spreadsheet structuremaps
for structuremap in igpy['structuremaps']:
update_igjson('StructureMap', structuremap, 'base')
examples = os.listdir(
dir + 'examples') # get all the examples in the examples directory assuming are in json or xml
for i in range(len(examples)): # run through all the examples and get id and resource type
if 'json' in examples[
i]: # for each cs in /resources open, read id and create and append dict struct to definiions file
exjson = json.load(get_file(examples[i]))
extype = exjson['resourceType']
ex_id = exjson['id']
update_example(extype, ex_id, examples[i])
if 'xml' in examples[
i]: # for each cs in /resources open, read id and create and append dict struct to definiions file
ex_xml = etree.parse(get_file(examples[i])) # lxml module to parse example xml
ex_id = ex_xml.xpath('//f:id/@value', namespaces={'f': 'http://hl7.org/fhir'}) # use xpath to get the id
extype = ex_xml.xpath('name(/*)') # use xpath to get the type '''
update_example(extype, ex_id[0], examples[i])
# write files
ig_file = open(dir + 'ig.json', 'w')
ig_file.write(json.dumps(igpy)) # convert dict to json and replace ig.json with this file
logging.info('ig.json now looks like : ' + json.dumps(igpy))
ig_file = open(dir + 'resources/ig.xml', 'w')
ig_file.write(igxml) # replace ig.xml with this file
logging.info('ig.xml now looks like : ' + igxml)
return
#main
if __name__ == '__main__':
main()
logging.info('End of program')
| 49.5625
| 457
| 0.620954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10,782
| 0.56652
|
0b8f097eaf823137c79c39a4fcd3c6e49316ae19
| 8,456
|
py
|
Python
|
src/sadie/renumbering/clients/g3.py
|
jwillis0720/pybody
|
2d7c68650ac1ef5f3003ccb67171898eac1f63eb
|
[
"MIT"
] | null | null | null |
src/sadie/renumbering/clients/g3.py
|
jwillis0720/pybody
|
2d7c68650ac1ef5f3003ccb67171898eac1f63eb
|
[
"MIT"
] | null | null | null |
src/sadie/renumbering/clients/g3.py
|
jwillis0720/pybody
|
2d7c68650ac1ef5f3003ccb67171898eac1f63eb
|
[
"MIT"
] | null | null | null |
from functools import lru_cache
from itertools import product
from pathlib import Path
from typing import Optional, List, Tuple
from pydantic import validate_arguments
import pyhmmer
import requests as r
from yarl import URL
from sadie.typing import Species, Chain, Source
class G3:
"""API Wrapper with OpenAPI found here https://g3.jordanrwillis.com/docs"""
# TODO: most likely make this an import
data_folder = Path(__file__).parent.parent / "data"
segments = {"V", "D", "J"}
chains = {"H", "K", "L"}
def __init__(self):
self.base_url = URL("https://g3.jordanrwillis.com/api/v1")
self.not_usable_species = [
"pig",
"cow",
"cat", # missing L
"alpaca", # missing L and K
"rhesus", # TODO: breaks tests; fix and fall back on numbering for now
"dog", # TODO: viable but does not match. Need to check if diff species of dog from G3
]
self.alphabet = pyhmmer.easel.Alphabet.amino()
self.builder = pyhmmer.plan7.Builder(self.alphabet, architecture="hand")
self.background = pyhmmer.plan7.Background(self.alphabet)
@property
@lru_cache(maxsize=1)
def sources(self):
resp = r.get(self.base_url)
resp.raise_for_status()
return resp.json()["components"]["schemas"]["SourceName"]["enum"]
@property
@lru_cache(maxsize=1)
def species(self):
resp = r.get(self.base_url)
resp.raise_for_status()
species = resp.json()["components"]["schemas"]["CommonName"]["enum"]
return [single_species for single_species in species if single_species not in self.not_usable_species]
@lru_cache(maxsize=None)
@validate_arguments
def __get_gene_resp(
self,
source: Source = "imgt",
species: Species = "human",
segment: str = "V",
limit: Optional[int] = None,
) -> str:
segment = segment.upper()
if segment not in self.segments:
raise ValueError(f"{segment} is not a valid segment from {self.segments}")
params = {
"source": source,
"common": species,
"segment": segment,
"limit": limit if limit else "-1",
}
resp = r.get(self.base_url / "genes", params=params)
resp.raise_for_status()
return resp
@validate_arguments
def get_gene(
self,
source: Source = "imgt",
species: Species = "human",
chain: Chain = "H",
segment: str = "V",
limit: Optional[int] = None,
) -> str:
resp = self.__get_gene_resp(source=source, species=species, segment=segment, limit=limit)
return [x for x in resp.json() if x["gene"][2].lower() == chain.lower()]
def get_stockholm_pairs(
self,
source: Source = "imgt",
chain: Chain = "H",
species: Species = "human",
limit: Optional[int] = None,
) -> List[Tuple[str, str]]:
sub_v = self.get_gene(source=source, species=species, chain=chain, segment="V", limit=limit)
sub_j = self.get_gene(source=source, species=species, chain=chain, segment="J", limit=limit)
stockholm_pairs = []
for merge in product(sub_v, sub_j):
v_seg = merge[0]
j_seg = merge[1]
if v_seg["receptor"] not in ["IG"]:
continue
functional = v_seg["imgt"]["imgt_functional"]
v_part = v_seg["imgt"]["sequence_gapped_aa"].replace(".", "-")[:108].ljust(108).replace(" ", "-")
# if v_part[0] == "-":
# continue
cdr3_part = j_seg["imgt"]["cdr3_aa"]
fwr4_part = j_seg["imgt"]["fwr4_aa"]
v_name = v_seg["gene"]
j_name = j_seg["gene"]
name = f"{species}_{v_name}_{j_name}"
# why?
if functional != "F":
continue
# H rules
if chain.strip().lower() in "H":
if len(cdr3_part[-3:] + fwr4_part) == 13:
fwr4_part += "-"
# K rules
if chain.strip().lower() in "k":
if len(cdr3_part[-3:] + fwr4_part) in [12, 13]:
fwr4_part += "-"
# # L rules
if chain.strip().lower() == "l":
if len(cdr3_part[-3:] + fwr4_part) == 12:
fwr4_part += "-"
# todo: alt fwr4_part based on it's size and who's askin
multiplier = 128 - (len(v_part) + len(cdr3_part[-3:] + fwr4_part))
align = v_part + "-" * multiplier + cdr3_part[-3:] + fwr4_part
# sanity check if chains rules are working
assert len(align) == 128
stockholm_pairs.append((name, align))
return stockholm_pairs
# def get_msa(
# self,
# source: Source = "imgt",
# species: Species = "human",
# chain: Chain = "H",
# limit: Optional[int] = None,
# ) -> str:
# stockholm_pairs = self.get_stockholm_pairs(source=source, chain=chain, species=species, limit=limit)
# sequences = []
# for name, align in stockholm_pairs:
# sequence = pyhmmer.easel.TextSequence(name=name.encode(), sequence=align)
# sequences.append(sequence)
# if not sequences:
# return None
# return pyhmmer.easel.TextMSA(name=f"{species}_{chain}".encode(), sequences=sequences).digitize(self.alphabet)
@lru_cache(maxsize=None)
def build_stockholm(
self,
source: Source = "imgt",
species: Species = "human",
chain: Chain = "H",
limit: Optional[int] = None,
) -> Path:
"""
Get a stockholm file in string format for the given species and chain.
Parameters
----------
source : str, optional
Source of gene data, by default "imgt"
options: 'imgt' or 'custom'
species : str, optional
species selected from avaliabe, by default "human"
chain : str, optional
chain for seq, by default "H"
options: 'H', 'k', 'l' -> heavy, kappa light
Returns
-------
str
stockholm file in string format
"""
sto_path = self.data_folder / f"stockholms/{species}_{chain}.sto"
sto_pairs = self.get_stockholm_pairs(source=source, chain=chain, species=species, limit=limit)
if not sto_pairs:
return
head = f"# STOCKHOLM 1.0\n#=GF ID {species}_{chain}\n"
body = "\n".join([f"{name}\t{ali}" for name, ali in sto_pairs])
tail = "\n#=GC RF" + "\t" + "x" * 128 + "\n//\n"
# TODO: hand arch needs a parsed file -- will be refactored to handle digital directly
with open(sto_path, "w") as outfile:
outfile.write(head + body + tail)
return sto_path
@lru_cache(maxsize=None)
def build_hmm(
self,
source: Source = "imgt",
species: Species = "human",
chain: Chain = "H",
limit: Optional[int] = None,
) -> Path:
sto_path = self.build_stockholm(source=source, chain=chain, species=species, limit=limit)
if not sto_path:
return
hmm_path = self.data_folder / f"hmms/{species}_{chain}.hmm"
with pyhmmer.easel.MSAFile(sto_path, digital=True, alphabet=self.alphabet, format="stockholm") as msa_file:
msa = next(msa_file)
hmm, _, _ = self.builder.build_msa(msa, self.background)
with open(hmm_path, "wb") as output_file:
hmm.write(output_file)
return hmm_path
@lru_cache(maxsize=None)
def get_hmm(
self,
source: Source = "imgt",
species: Species = "human",
chain: Chain = "H",
limit: Optional[int] = None,
prioritize_cached_hmm: bool = False,
):
hmm_path = self.data_folder / f"hmms/{species}_{chain}.hmm"
if prioritize_cached_hmm is True:
if hmm_path.is_file() is False:
hmm_path = self.build_hmm(source=source, chain=chain, species=species, limit=limit)
else:
hmm_path = self.build_hmm(source=source, chain=chain, species=species, limit=limit)
if not hmm_path:
return
with pyhmmer.plan7.HMMFile(hmm_path) as hmm_file:
hmm = next(hmm_file)
return hmm
| 33.555556
| 119
| 0.561968
| 8,178
| 0.967124
| 0
| 0
| 4,525
| 0.535123
| 0
| 0
| 2,472
| 0.292337
|
0b904a57fdd0e7c89f250e4ba02b11a25b8c89d4
| 1,683
|
py
|
Python
|
projects/shadow/kmap-builder-jython27/MapReduce/mappers/__init__.py
|
zaqwes8811/smart-vocabulary-cards
|
abeab5c86b1c6f68d8796475cba80c4f2c6055ff
|
[
"Apache-2.0"
] | null | null | null |
projects/shadow/kmap-builder-jython27/MapReduce/mappers/__init__.py
|
zaqwes8811/smart-vocabulary-cards
|
abeab5c86b1c6f68d8796475cba80c4f2c6055ff
|
[
"Apache-2.0"
] | 11
|
2015-01-25T14:22:52.000Z
|
2015-09-08T09:59:38.000Z
|
projects/shadow/kmap-builder-jython27/MapReduce/mappers/__init__.py
|
zaqwes8811/vocabulary-cards
|
abeab5c86b1c6f68d8796475cba80c4f2c6055ff
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
from nlp_components.content_items_processors import process_list_content_sentences
from nlp_components.content_items_processors import process_list_content_sentences_real
import dals.os_io.io_wrapper as dal
import json
# NO DRY!!
def read_utf_txt_file(fname):
sets = dal.get_utf8_template()
sets['name'] = fname
return dal.file2list(sets)
def mapper(job):
""" [node_name, index_word, [count_sent, summ_sent_len], url, lang]"""
url_tmp_file = job[1]
node_name = job[0]
# Получем текст
file_content = read_utf_txt_file(url_tmp_file)
metadata = file_content[0]
settings = json.loads(metadata)
url = settings['url']
lang = settings['lang']
list_content_items = file_content[1:]
# Теперь можно составлять индекс
index, (count_sents, summ_sents_len) = process_list_content_sentences(
list_content_items, lang)
parallel_pkg = (node_name, index, [count_sents, summ_sents_len], (url, lang))
return parallel_pkg
def mapper_real(job):
""" [node_name, .., .., .., ]"""
url = job[0]
text_extractor = job[1]
tokenizer = job[2]
node_name = job[3]
# Получем текст
text = text_extractor(url)
# Токенизация
lits_content_items = []
if tokenizer:
lits_content_items = tokenizer(text)
else:
lits_content_items = [text]
# Теперь можно составлять индекс
index, (count_sents, summ_sents_len) = \
process_list_content_sentences_real(
lits_content_items,
tokenizer)
parallel_pkg = (node_name, index, [count_sents, summ_sents_len], url)
return parallel_pkg
| 24.75
| 87
| 0.673203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 340
| 0.191874
|
0b9068ae3299f03b04a8be28a4b732a299e46459
| 1,132
|
py
|
Python
|
config.py
|
xXAligatorXx/repostAlert
|
74f450b577fa0971632a57c7d1f599eea4808427
|
[
"MIT"
] | 25
|
2018-10-18T15:16:39.000Z
|
2019-06-06T04:33:20.000Z
|
config.py
|
xXAligatorXx/repostAlert
|
74f450b577fa0971632a57c7d1f599eea4808427
|
[
"MIT"
] | 6
|
2018-10-17T01:34:13.000Z
|
2019-06-08T18:31:41.000Z
|
config.py
|
xXAligatorXx/repostAlert
|
74f450b577fa0971632a57c7d1f599eea4808427
|
[
"MIT"
] | 2
|
2018-10-25T19:42:45.000Z
|
2018-12-05T23:09:06.000Z
|
import os
client_id = os.environ['BOT_CLIENT_ID']
client_secret = os.environ['BOT_CLIENT_SECRET']
user_agent = os.environ['BOT_USER_AGENT']
username = os.environ['BOT_USERNAME']
password = os.environ['BOT_PASSWORD']
num_subs = int(os.environ['BOT_SUB_COUNT'])
sub_settings = [[
os.environ['BOT_SUBREDDIT' + i],
int(os.environ['BOT_TOP_DAYS' + i]) if 'BOT_TOP_DAYS' + i in os.environ else None,
int(os.environ['BOT_HOT_DAYS' + i]) if 'BOT_HOT_DAYS' + i in os.environ else None,
int(os.environ['BOT_NEW_DAYS' + i]) if 'BOT_NEW_DAYS' + i in os.environ else None,
int(os.environ['BOT_TOP_NUM_POSTS' + i]) if 'BOT_TOP_NUM_POSTS' + i in os.environ else 1000,
int(os.environ['BOT_HOT_NUM_POSTS' + i]) if 'BOT_HOT_NUM_POSTS' + i in os.environ else 1000,
int(os.environ['BOT_NEW_NUM_POSTS' + i]) if 'BOT_NEW_NUM_POSTS' + i in os.environ else 1000,
int(os.environ['BOT_THRESH' +i]) if 'BOT_THRESH' + i in os.environ else 5,
bool(os.environ['BOT_TEXT_IN_IMAGE' + i]) if 'BOT_TEXT_IN_IMAGE' + i in os.environ else False,
] for i in [str(x) for x in range(num_subs)]]
| 56.6
| 102
| 0.682862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 368
| 0.325088
|
0b906bf27fc67aeba61a035efc941b80ca56e405
| 4,305
|
py
|
Python
|
lib/aquilon/worker/formats/list.py
|
ned21/aquilon
|
6562ea0f224cda33b72a6f7664f48d65f96bd41a
|
[
"Apache-2.0"
] | 7
|
2015-07-31T05:57:30.000Z
|
2021-09-07T15:18:56.000Z
|
lib/aquilon/worker/formats/list.py
|
ned21/aquilon
|
6562ea0f224cda33b72a6f7664f48d65f96bd41a
|
[
"Apache-2.0"
] | 115
|
2015-03-03T13:11:46.000Z
|
2021-09-20T12:42:24.000Z
|
lib/aquilon/worker/formats/list.py
|
ned21/aquilon
|
6562ea0f224cda33b72a6f7664f48d65f96bd41a
|
[
"Apache-2.0"
] | 13
|
2015-03-03T11:17:59.000Z
|
2021-09-09T09:16:41.000Z
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2015,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List formatter."""
from operator import attrgetter
from six import string_types
from sqlalchemy.orm.collections import InstrumentedList
from sqlalchemy.orm.query import Query
from sqlalchemy.ext.associationproxy import _AssociationList
from aquilon.worker.formats.formatters import ObjectFormatter
class ListFormatter(ObjectFormatter):
def format_raw(self, result, indent="", embedded=True,
indirect_attrs=True):
if hasattr(self, "template_raw"):
return ObjectFormatter.format_raw(self, result, indent,
embedded=embedded,
indirect_attrs=indirect_attrs)
return "\n".join(self.redirect_raw(item, indent, embedded=embedded,
indirect_attrs=indirect_attrs)
for item in result)
def format_csv(self, result, writer):
for item in result:
self.redirect_csv(item, writer)
def format_djb(self, result):
return "\n".join(self.redirect_djb(item) for item in result)
def format_proto(self, result, container, embedded=True, indirect_attrs=True):
for item in result:
skeleton = container.add()
ObjectFormatter.redirect_proto(item, skeleton, embedded=embedded,
indirect_attrs=indirect_attrs)
ObjectFormatter.handlers[list] = ListFormatter()
ObjectFormatter.handlers[Query] = ListFormatter()
ObjectFormatter.handlers[InstrumentedList] = ListFormatter()
ObjectFormatter.handlers[_AssociationList] = ListFormatter()
class StringList(list):
pass
class StringListFormatter(ListFormatter):
""" Format a list of object as strings, regardless of type """
def format_raw(self, objects, indent="", embedded=True, indirect_attrs=True):
return "\n".join(indent + str(obj) for obj in objects)
def format_csv(self, objects, writer):
for obj in objects:
writer.writerow((str(obj),))
ObjectFormatter.handlers[StringList] = StringListFormatter()
class StringAttributeList(list):
def __init__(self, items, attr):
if isinstance(attr, string_types):
self.getter = attrgetter(attr)
else:
self.getter = attr
super(StringAttributeList, self).__init__(items)
class StringAttributeListFormatter(ListFormatter):
""" Format a single attribute of every object as a string """
def format_raw(self, objects, indent="", embedded=True, indirect_attrs=True):
return "\n".join(indent + str(objects.getter(obj)) for obj in objects)
def format_csv(self, objects, writer):
for obj in objects:
writer.writerow((str(objects.getter(obj)),))
def format_proto(self, objects, container, embedded=True, indirect_attrs=True):
# This method always populates the first field of the protobuf message,
# regardless of how that field is called.
field_name = None
for obj in objects:
msg = container.add()
if not field_name:
field_name = msg.DESCRIPTOR.fields[0].name
setattr(msg, field_name, str(objects.getter(obj)))
# TODO: if obj is really the full DB object rather than just a
# string, and it has other attributes already loaded, then we could
# add those attributes to the protobuf message "for free". Let's see
# if a usecase comes up.
ObjectFormatter.handlers[StringAttributeList] = StringAttributeListFormatter()
| 38.783784
| 83
| 0.673635
| 2,899
| 0.673403
| 0
| 0
| 0
| 0
| 0
| 0
| 1,220
| 0.283391
|
0b9113a200832679e9fc55536bc662bb2d860b4c
| 228
|
py
|
Python
|
satyrus/sat/types/string.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/sat/types/string.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/sat/types/string.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
from .main import SatType
class String(SatType, str):
def __new__(cls, *args, **kwargs):
return str.__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
SatType.__init__(self)
| 25.333333
| 49
| 0.605263
| 199
| 0.872807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0b944182e57c98d2c412133b9ff0a2ad81333fdb
| 737
|
py
|
Python
|
setup.py
|
ZeroCater/zerocaterpy
|
824af8613db0c5f203c0b2f7cebd830ee80eea5d
|
[
"MIT"
] | null | null | null |
setup.py
|
ZeroCater/zerocaterpy
|
824af8613db0c5f203c0b2f7cebd830ee80eea5d
|
[
"MIT"
] | null | null | null |
setup.py
|
ZeroCater/zerocaterpy
|
824af8613db0c5f203c0b2f7cebd830ee80eea5d
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(name='zerocater',
version='0.0.1',
description="Python interface to ZeroCater",
long_description='',
keywords='zerocater food delivery meal planning catering lunch',
author='ZeroCater',
author_email='tech@zerocater.com',
url='https://github.com/ZeroCater/PyZeroCater',
download_url='https://github.com/ZeroCater/PyZeroCater/tarball/0.0.1',
license='MIT',
packages=['zerocater'],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP",
]
)
| 35.095238
| 76
| 0.63365
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 431
| 0.584803
|
0b95ab4e62401288fe9f479867e2cab6f6c5d09c
| 373
|
py
|
Python
|
tests/conftest.py
|
scottmanderson/minerva
|
fe6a6857d892d9c7d881701c91990d9697bde00e
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
scottmanderson/minerva
|
fe6a6857d892d9c7d881701c91990d9697bde00e
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
scottmanderson/minerva
|
fe6a6857d892d9c7d881701c91990d9697bde00e
|
[
"MIT"
] | null | null | null |
import pytest
from app import create_app, db
from config import TestConfig
@pytest.fixture
def test_client():
flask_app = create_app(TestConfig)
with flask_app.test_client() as testing_client:
with flask_app.app_context():
yield testing_client
@pytest.fixture
def init_database(test_client):
db.create_all()
yield
db.drop_all()
| 18.65
| 51
| 0.72118
| 0
| 0
| 259
| 0.69437
| 291
| 0.780161
| 0
| 0
| 0
| 0
|
0b96aaa21f422ac0c7d22576279c69b61dd42c95
| 154
|
py
|
Python
|
Test/two/payments/momo/urls.py
|
titan256/Python-Django-Assignment
|
9f56f69ea7182456729116e27435231925d24d11
|
[
"MIT"
] | null | null | null |
Test/two/payments/momo/urls.py
|
titan256/Python-Django-Assignment
|
9f56f69ea7182456729116e27435231925d24d11
|
[
"MIT"
] | 9
|
2020-06-05T23:53:04.000Z
|
2022-02-10T08:33:32.000Z
|
Test/two/payments/momo/urls.py
|
titan256/Python-Django-Assignment
|
9f56f69ea7182456729116e27435231925d24d11
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.urls import path , include
from . import views
urlpatterns = [
path('',views.index,name='index')
]
| 19.25
| 38
| 0.701299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 9
| 0.058442
|
0b975c6ddf1a134fa942ba06d2fe6a39b749365f
| 6,435
|
py
|
Python
|
pdsensorvis/sensors/models.py
|
mickeykkim/masters-project-sphere
|
6dbe0be877058e647f5e3822932e5a70f181bb53
|
[
"MIT"
] | 2
|
2019-10-05T20:59:41.000Z
|
2019-11-01T20:25:39.000Z
|
pdsensorvis/sensors/models.py
|
mickeykkim/masters-project-sphere
|
6dbe0be877058e647f5e3822932e5a70f181bb53
|
[
"MIT"
] | 6
|
2019-10-24T12:28:02.000Z
|
2021-08-09T09:56:26.000Z
|
pdsensorvis/sensors/models.py
|
mickeykkim/masters-project-sphere
|
6dbe0be877058e647f5e3822932e5a70f181bb53
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.urls import reverse
from django.contrib.auth.models import User
from django.utils import timezone
import uuid
ANNOTATION = (
('asm', 'Asymmetry'),
('dst', 'Dystonia'),
('dsk', 'Dyskensia'),
('ebt', 'En Bloc Turning'),
('str', 'Short Stride Length'),
('mov', 'Slow/Hesitant Movement'),
('pos', 'Stooped Posture'),
('trm', 'Tremor'),
('oth', 'Other/Activity')
)
FRAME_RATES = (
('NTSC_Film', 23.98),
('Film', 24),
('PAL', 25),
('NTSC', 29.97),
('Web', 30),
('PAL_HD', 50),
('NTSC_HD', 59.94),
('High', 60),
)
class PatientData(models.Model):
id = models.AutoField(primary_key=True)
first_name = models.CharField(max_length=50, help_text='Patient first name')
last_name = models.CharField(max_length=50, help_text='Patient last name')
date_of_birth = models.DateField(help_text='Patient date of birth')
notes = models.CharField(max_length=500, help_text='Notes regarding patient')
class Meta:
ordering = ['last_name']
permissions = (("can_alter_patientdata", "Can create or edit patient data entries."),)
def get_absolute_url(self):
return reverse('patientdata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.last_name}, {self.first_name}'
class WearableData(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, help_text='Unique ID for this wearable data')
patient = models.ForeignKey('PatientData', on_delete=models.CASCADE, null=True, related_name='wearables')
filename = models.FileField(upload_to='wearable/', help_text='Wearable data file')
time = models.DateTimeField(help_text='Session date & time')
note = models.CharField(max_length=500, help_text='Note regarding wearable data')
class Meta:
ordering = ['patient', '-time']
permissions = (("can_alter_wearabledata", "Can create or edit wearable data entries."),)
def get_absolute_url(self):
return reverse('wearabledata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.patient} ({self.time})'
class CameraData(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, help_text='Unique ID for this wearable data')
patient = models.ForeignKey('PatientData', on_delete=models.CASCADE, null=True, related_name='cameras')
filename = models.FileField(upload_to='camera/', help_text='Camera video file')
framerate = models.CharField(
max_length=9,
choices=FRAME_RATES,
default='Film',
help_text='Video framerate',
)
time = models.DateTimeField(help_text='Session date & time')
note = models.CharField(max_length=500, help_text='Note regarding camera data')
class Meta:
ordering = ['patient', '-time']
permissions = (("can_alter_cameradata", "Can create or edit camera data entries."),)
def get_absolute_url(self):
return reverse('cameradata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.patient} ({self.time})'
def get_user_annotations(self):
return self.c_annotations.filter(annotator=User)
class WearableAnnotation(models.Model):
id = models.AutoField(primary_key=True)
wearable = models.ForeignKey('WearableData', on_delete=models.CASCADE, null=True, related_name='w_annotations')
frame_begin = models.PositiveIntegerField()
frame_end = models.PositiveIntegerField()
annotator = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
annotation = models.CharField(
max_length=3,
choices=ANNOTATION,
default='oth',
help_text='PD Symptom',
)
note = models.CharField(max_length=500, help_text='Note regarding annotation', null=True, blank=True)
class Meta:
ordering = ['frame_begin']
permissions = (("can_alter_wearableannotation", "Can create or edit wearable annotations."),)
def get_absolute_url(self):
return reverse('wearableannotation-detail', args=[str(self.wearable.id), str(self.id)])
def __str__(self):
return f'{self.wearable} - ({self.frame_begin}-{self.frame_end}) - {self.get_annotation_display()}'
class CameraAnnotation(models.Model):
id = models.AutoField(primary_key=True)
camera = models.ForeignKey('CameraData', on_delete=models.CASCADE, null=True, related_name='c_annotations')
time_begin = models.CharField(max_length=11, help_text='hh:mm:ss:ff')
time_end = models.CharField(max_length=11, help_text='hh:mm:ss:ff')
annotator = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
annotation = models.CharField(
max_length=3,
choices=ANNOTATION,
default='oth',
help_text='PD Symptom',
)
note = models.CharField(max_length=500, help_text='Note regarding annotation', null=True, blank=True)
class Meta:
ordering = ['camera', 'time_begin']
permissions = (("can_alter_cameraannotation", "Can create or edit camera annotations."),)
def get_absolute_url(self):
return reverse('cameraannotation-detail', args=[str(self.camera.id), str(self.id)])
def __str__(self):
return f'{self.camera} - ({self.time_begin}-{self.time_end}) - {self.get_annotation_display()}'
class CameraAnnotationComment(models.Model):
id = models.AutoField(primary_key=True)
annotation = models.ForeignKey('CameraAnnotation', on_delete=models.CASCADE, related_name='comments')
author = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
timestamp = models.DateTimeField(default=timezone.now)
text = models.TextField()
class Meta:
ordering = ['annotation', 'timestamp']
permissions = (("can_alter_cameraannotation_comment", "Can create or edit camera annotation comments."),)
def __str__(self):
return self.text
class WearableDataPoint(models.Model):
id = models.AutoField(primary_key=True)
wearable = models.ForeignKey('WearableData', on_delete=models.CASCADE, null=True, related_name='data_point')
frame = models.PositiveIntegerField()
magnitude = models.FloatField()
class Meta:
ordering = ['frame']
permissions = (("can_alter_wearabledata_point", "Can create or edit wearable data point."),)
def __str__(self):
return f'{self.wearable.id} - ({self.frame}, {self.magnitude})'
| 37.631579
| 116
| 0.685004
| 5,796
| 0.900699
| 0
| 0
| 0
| 0
| 0
| 0
| 1,887
| 0.29324
|
0b9809b2c18e28f3af61ecc6021ff494abd1e0f4
| 533
|
py
|
Python
|
setup.py
|
soumyarani/mopac
|
72f10fdd3ea3c9c61b6c808ca07ee9031b7d4aa8
|
[
"MIT"
] | 20
|
2021-03-16T08:18:01.000Z
|
2022-03-12T13:46:43.000Z
|
setup.py
|
soumyarani/mopac
|
72f10fdd3ea3c9c61b6c808ca07ee9031b7d4aa8
|
[
"MIT"
] | 1
|
2021-05-13T14:49:25.000Z
|
2021-05-13T19:45:26.000Z
|
setup.py
|
soumyarani/mopac
|
72f10fdd3ea3c9c61b6c808ca07ee9031b7d4aa8
|
[
"MIT"
] | 5
|
2020-11-01T15:46:39.000Z
|
2021-07-30T13:12:06.000Z
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='mopac',
packages=find_packages(),
version='0.1',
description='Model-based policy optimization',
long_description=open('./README.md').read(),
author='',
author_email='',
url='',
entry_points={
'console_scripts': (
'mopac=softlearning.scripts.console_scripts:main',
'viskit=mopac.scripts.console_scripts:main'
)
},
requires=(),
zip_safe=True,
license='MIT'
)
| 23.173913
| 62
| 0.626642
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 178
| 0.333959
|
0b9851847b18a4b7b38e82d6bd87af07dc1c57a9
| 1,531
|
py
|
Python
|
examples/imu.py
|
dan-stone/canal
|
8a6b03a46102f7e5ca457538eb03ab9526eec095
|
[
"MIT"
] | 2
|
2017-02-08T20:27:39.000Z
|
2019-07-15T00:34:05.000Z
|
examples/imu.py
|
dan-stone/canal
|
8a6b03a46102f7e5ca457538eb03ab9526eec095
|
[
"MIT"
] | null | null | null |
examples/imu.py
|
dan-stone/canal
|
8a6b03a46102f7e5ca457538eb03ab9526eec095
|
[
"MIT"
] | 1
|
2018-05-31T14:09:19.000Z
|
2018-05-31T14:09:19.000Z
|
import datetime
import canal
from influxdb import InfluxDBClient
class IMU(canal.Measurement):
accelerometer_x = canal.IntegerField()
accelerometer_y = canal.IntegerField()
accelerometer_z = canal.IntegerField()
gyroscope_x = canal.IntegerField()
gyroscope_y = canal.IntegerField()
gyroscope_z = canal.IntegerField()
user_id = canal.Tag()
if __name__ == "__main__":
start_date = datetime.datetime.now(datetime.timezone.utc)
duration = datetime.timedelta(seconds=60)
user_id = 12345678
client = InfluxDBClient(
host="localhost",
port=8086,
database="canal"
)
# Write some dummy IMU data, sampled once per second
num_imu_samples = int(duration.total_seconds())
imu = IMU(
time=[start_date + datetime.timedelta(seconds=d) for d in
range(num_imu_samples)],
acc_x=range(0, 1 * num_imu_samples, 1),
acc_y=range(0, 2 * num_imu_samples, 2),
acc_z=range(0, 3 * num_imu_samples, 3),
gyro_x=range(0, 4 * num_imu_samples, 4),
gyro_y=range(0, 5 * num_imu_samples, 5),
gyro_z=range(0, 6 * num_imu_samples, 6),
user_id=user_id
)
client.write(
data=imu.to_line_protocol(),
params=dict(
db="canal"
)
)
# Read back the IMU data
imu_resp = client.query(IMU.make_query_string(
time__gte=start_date,
time__lte=start_date + duration,
user_id=user_id
))
assert imu == IMU.from_json(imu_resp.raw)
| 27.836364
| 65
| 0.640758
| 301
| 0.196604
| 0
| 0
| 0
| 0
| 0
| 0
| 111
| 0.072502
|
0b98688189c3ac958636f3a3393afa2872fb1f5c
| 2,820
|
py
|
Python
|
lib/ravstack/runtime.py
|
geertj/raviron
|
7920c6b71757eddcca16b60051c1cf08706ae11b
|
[
"MIT"
] | 1
|
2015-05-11T21:39:35.000Z
|
2015-05-11T21:39:35.000Z
|
lib/ravstack/runtime.py
|
geertj/raviron
|
7920c6b71757eddcca16b60051c1cf08706ae11b
|
[
"MIT"
] | null | null | null |
lib/ravstack/runtime.py
|
geertj/raviron
|
7920c6b71757eddcca16b60051c1cf08706ae11b
|
[
"MIT"
] | null | null | null |
#
# This file is part of ravstack. Ravstack is free software available under
# the terms of the MIT license. See the file "LICENSE" that was provided
# together with this source file for the licensing terms.
#
# Copyright (c) 2015 the ravstack authors. See the file "AUTHORS" for a
# complete list.
from __future__ import absolute_import, print_function
import sys
import logging
from . import config, defaults, util
prog_name = __name__.split('.')[0]
LOG = logging.getLogger(prog_name)
CONF = config.Config()
DEBUG = util.EnvInt('DEBUG')
VERBOSE = util.EnvInt('VERBOSE')
LOG_STDERR = util.EnvInt('LOG_STDERR')
log_context = ''
log_datetime = '%(asctime)s '
log_template = '%(levelname)s [%(name)s] %(message)s'
log_ctx_template = '%(levelname)s [{}] [%(name)s] %(message)s'
def setup_config():
"""Return the configuration object."""
CONF.set_schema(defaults.config_schema)
CONF.read_file(defaults.config_file)
CONF.update_from_env()
meta = util.get_ravello_metadata()
if 'appName' in meta and CONF['ravello']['application'] == '<None>':
CONF['ravello']['application'] = meta['appName']
CONF.update_to_env()
def setup_logging(context=None):
"""Set up or reconfigure logging."""
root = logging.getLogger()
if root.handlers:
del root.handlers[:]
global log_context
if context is not None:
log_context = context
template = log_ctx_template.format(log_context) if log_context else log_template
# Log to stderr?
if LOG_STDERR:
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(logging.Formatter(template))
root.addHandler(handler)
else:
root.addHandler(logging.NullHandler())
# Available log file?
logfile = defaults.log_file
if util.can_open(logfile, 'a'):
handler = logging.FileHandler(logfile)
handler.setFormatter(logging.Formatter(log_datetime + template))
root.addHandler(handler)
root.setLevel(logging.DEBUG if DEBUG else logging.INFO if VERBOSE else logging.ERROR)
# A little less verbosity for requests.
logger = logging.getLogger('requests.packages.urllib3.connectionpool')
logger.setLevel(logging.DEBUG if DEBUG else logging.ERROR)
# Silence "insecure platform" warning for requests module on Py2.7.x under
# default verbosity.
logging.captureWarnings(True)
logger = logging.getLogger('py.warnings')
logger.setLevel(logging.DEBUG if DEBUG else logging.ERROR)
# Run a main function
def run_main(func):
"""Run a main function."""
setup_config()
setup_logging()
# Run the provided main function.
try:
func()
except Exception as e:
LOG.error('Uncaught exception:', exc_info=True)
if DEBUG:
raise
print('Error: {!s}'.format(e))
| 30.989011
| 89
| 0.693617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 906
| 0.321277
|
0b9897a43237e684b6c66f4d6a3b18dc5aaad9da
| 1,217
|
py
|
Python
|
onetouch.py
|
kakoni/insulaudit
|
18fe0802bafe5764882ac4e65e472fdc840baa45
|
[
"MIT"
] | 1
|
2020-11-28T13:23:58.000Z
|
2020-11-28T13:23:58.000Z
|
onetouch.py
|
kakoni/insulaudit
|
18fe0802bafe5764882ac4e65e472fdc840baa45
|
[
"MIT"
] | null | null | null |
onetouch.py
|
kakoni/insulaudit
|
18fe0802bafe5764882ac4e65e472fdc840baa45
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import user
import serial
from pprint import pprint, pformat
import insulaudit
from insulaudit.data import glucose
from insulaudit.log import io
from insulaudit.devices import onetouch2
import sys
PORT = '/dev/ttyUSB0'
def get_serial( port, timeout=2 ):
return serial.Serial( port, timeout=timeout )
def init( ):
mini = onetouch2.OneTouchUltra2( PORT, 5 )
print "is open? %s\n timeout: %s" % ( mini.serial.isOpen( ), mini.serial.getTimeout() )
print ""
print "read serial number"
serial = mini.execute( onetouch2.ReadSerial( ) )
print "serial number: %s" % serial
print ""
if serial == "":
print "could not connect"
sys.exit(1)
print ""
print "read firmware number"
firmware = mini.execute( onetouch2.ReadFirmware( ) )
print "firmware: %s" % firmware
print ""
print "RFID"
print mini.execute( onetouch2.ReadRFID( ) )
print "GLUCOSE"
data = mini.read_glucose( )
print data
print "len glucose: %s" % len( data )
head, body = data
output = open( 'sugars-debug.txt', 'w' )
output.write( glucose.format_records( body ) )
output.write( '\n' )
output.close( )
return mini
if __name__ == '__main__':
port = init()
io.info( port )
| 22.537037
| 89
| 0.67461
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 229
| 0.188168
|
0b9a8528a7dd0b2f831662e5079ebdffe6991f3a
| 3,743
|
py
|
Python
|
src/graphdb_builder/databases/parsers/smpdbParser.py
|
hhefzi/CKG
|
e117502a05f870174372da275e22ac3f8735d65a
|
[
"MIT"
] | null | null | null |
src/graphdb_builder/databases/parsers/smpdbParser.py
|
hhefzi/CKG
|
e117502a05f870174372da275e22ac3f8735d65a
|
[
"MIT"
] | 1
|
2020-06-11T11:59:42.000Z
|
2020-07-01T11:13:51.000Z
|
src/graphdb_builder/databases/parsers/smpdbParser.py
|
vemonet/CKG
|
c9e15c4c8ec8d81ca05c67e9a6f346ca385d8fbe
|
[
"MIT"
] | 1
|
2022-02-02T10:56:32.000Z
|
2022-02-02T10:56:32.000Z
|
import os.path
import zipfile
import pandas as pd
from collections import defaultdict
from graphdb_builder import builder_utils
#########################
# SMPDB database #
#########################
def parser(databases_directory, download=True):
config = builder_utils.get_config(config_name="smpdbConfig.yml", data_type='databases')
urls = config['smpdb_urls']
entities = set()
relationships = defaultdict(set)
entities_header = config['pathway_header']
relationships_headers = config['relationships_header']
directory = os.path.join(databases_directory, "SMPDB")
builder_utils.checkDirectory(directory)
for dataset in urls:
url = urls[dataset]
file_name = url.split('/')[-1]
if download:
builder_utils.downloadDB(url, directory)
zipped_file = os.path.join(directory, file_name)
with zipfile.ZipFile(zipped_file) as rf:
if dataset == "pathway":
entities = parsePathways(config, rf)
elif dataset == "protein":
relationships.update(parsePathwayProteinRelationships(rf))
elif dataset == "metabolite":
relationships.update(parsePathwayMetaboliteDrugRelationships(rf))
builder_utils.remove_directory(directory)
return entities, relationships, entities_header, relationships_headers
def parsePathways(config, fhandler):
entities = set()
url = config['linkout_url']
organism = 9606
for filename in fhandler.namelist():
if not os.path.isdir(filename):
with fhandler.open(filename) as f:
df = pd.read_csv(f, sep=',', error_bad_lines=False, low_memory=False)
for index, row in df.iterrows():
identifier = row[0]
name = row[1]
description = row[3]
linkout = url.replace("PATHWAY", identifier)
entities.add((identifier, "Pathway", name, description, organism, linkout, "SMPDB"))
return entities
def parsePathwayProteinRelationships(fhandler):
relationships = defaultdict(set)
loc = "unspecified"
evidence = "unspecified"
organism = 9606
for filename in fhandler.namelist():
if not os.path.isdir(filename):
with fhandler.open(filename) as f:
df = pd.read_csv(f, sep=',', error_bad_lines=False, low_memory=False)
for index, row in df.iterrows():
identifier = row[0]
protein = row[3]
if protein != '':
relationships[("protein", "annotated_to_pathway")].add((protein, identifier, "ANNOTATED_TO_PATHWAY", evidence, organism, loc, "SMPDB"))
return relationships
def parsePathwayMetaboliteDrugRelationships(fhandler):
relationships = defaultdict(set)
loc = "unspecified"
evidence = "unspecified"
organism = 9606
for filename in fhandler.namelist():
if not os.path.isdir(filename):
with fhandler.open(filename) as f:
df = pd.read_csv(f, sep=',', error_bad_lines=False, low_memory=False)
for index, row in df.iterrows():
identifier = row[0]
metabolite = row[5]
drug = row[8]
if metabolite != '':
relationships[("metabolite", "annotated_to_pathway")].add((metabolite, identifier, "ANNOTATED_TO_PATHWAY", evidence, organism, loc, "SMPDB"))
if drug != "":
relationships[("drug", "annotated_to_pathway")].add((drug, identifier, "ANNOTATED_TO_PATHWAY", evidence, organism, loc, "SMPDB"))
return relationships
| 39.819149
| 165
| 0.608068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 478
| 0.127705
|
0b9b80c225b518a078b36396f1fbccc56916e124
| 738
|
py
|
Python
|
server/waitFramerate.py
|
mboerwinkle/RingGame
|
5a9b6a6ea394c1e88689fa062d4d348383ab406a
|
[
"MIT"
] | null | null | null |
server/waitFramerate.py
|
mboerwinkle/RingGame
|
5a9b6a6ea394c1e88689fa062d4d348383ab406a
|
[
"MIT"
] | null | null | null |
server/waitFramerate.py
|
mboerwinkle/RingGame
|
5a9b6a6ea394c1e88689fa062d4d348383ab406a
|
[
"MIT"
] | null | null | null |
import time
#Timing stuff
lastTime = None
prevFrameTime = 0;
def waitFramerate(T): #TODO if we have enough time, call the garbage collector
global lastTime, prevFrameTime
ctime = time.monotonic()
if lastTime:
frameTime = ctime-lastTime #how long the last frame took
sleepTime = T-frameTime #how much time is remaining in target framerate
if prevFrameTime > frameTime and prevFrameTime > 1.2*T:
print("Peak frame took "+str(prevFrameTime)[:5]+"/"+str(int(1.0/prevFrameTime))+" FPS (Target "+str(T)[:5]+")")
if(sleepTime <= 0): #we went overtime. set start of next frame to now, and continue
lastTime = ctime
else:
lastTime = lastTime+T
time.sleep(sleepTime)
prevFrameTime = frameTime
else:
lastTime = ctime
| 32.086957
| 114
| 0.720867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 247
| 0.334688
|
0b9de1232f56d34e55746d5d53165b3e1ae67c6c
| 9,988
|
py
|
Python
|
shellbot/spaces/local.py
|
bernard357/shellbot
|
daf64fbab4085d1591bf9a1aecd06b4fc615d132
|
[
"Apache-2.0"
] | 11
|
2017-04-30T18:10:27.000Z
|
2021-11-07T16:59:29.000Z
|
shellbot/spaces/local.py
|
DataCraft-AI/shellbot
|
daf64fbab4085d1591bf9a1aecd06b4fc615d132
|
[
"Apache-2.0"
] | 38
|
2017-04-20T17:33:05.000Z
|
2017-11-10T20:19:07.000Z
|
shellbot/spaces/local.py
|
DataCraft-AI/shellbot
|
daf64fbab4085d1591bf9a1aecd06b4fc615d132
|
[
"Apache-2.0"
] | 3
|
2017-04-21T21:14:53.000Z
|
2021-07-27T22:01:21.000Z
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from multiprocessing import Process, Queue
import os
from six import string_types
import sys
import time
from shellbot.channel import Channel
from shellbot.events import Message
from shellbot.i18n import _
from .base import Space
class LocalSpace(Space):
"""
Handles chat locally
This class allows developers to test their commands interface
locally, without the need for a real API back-end.
If a list of commands is provided as input, then the space will consume
all of them and then it will stop. All kinds of automated tests and
scenarios can be build with this approach.
Example of automated interaction with some commands::
engine = Engine(command=Hello(), type='local')
engine.space.push(['help', 'hello', 'help help'])
engine.configure()
engine.run()
If no input is provided, then the space provides a command-line interface
so that you can play interactively with your bot. This setup is handy
since it does not require access to a real chat back-end.
"""
DEFAULT_PROMPT = u'> '
def on_init(self, input=None, **kwargs):
"""
Handles extended initialisation parameters
:param input: Lines of text to be submitted to the chat
:type input: str or list of str
Example::
space = LocalSpace(input='hello world')
Here we create a new local space, and simulate a user
typing 'hello world' in the chat space.
"""
self.input = []
self.push(input)
self.prompt = self.DEFAULT_PROMPT
self.participants = []
def on_start(self):
"""
Adds processing on engine start
"""
sys.stdout.write(_(u"Type 'help' for guidance, or Ctl-C to exit.")+'\n')
sys.stdout.flush()
def push(self, input):
"""
Adds more input to this space
:parameter input: Simulated user input
:type input: str or list of str
This function is used to simulate input user to the bot.
"""
if not input:
return
if isinstance(input, string_types):
input = [input]
self.input += input
def check(self):
"""
Check settings
This function reads key ``local`` and below, and update
the context accordingly.
This function also selects the right input for this local space.
If some content has been provided during initialisation, it is used
to simulate user input. Else stdin is read one line at a time.
"""
self.context.check('space.title',
_(u'Collaboration space'), filter=True)
self.context.check('space.participants',
'$CHANNEL_DEFAULT_PARTICIPANTS', filter=True)
self.context.set('server.binding', None) # no web server at all
if self.input:
def read_list():
for line in self.input:
sys.stdout.write(line+'\n')
sys.stdout.flush()
yield line
self._lines = read_list() # yield creates an iterator
else:
def read_stdin():
readline = sys.stdin.readline()
while readline:
yield readline.rstrip('\n')
readline = sys.stdin.readline()
self._lines = read_stdin() # yield creates an iterator
def list_group_channels(self, **kwargs):
"""
Lists available channels
:return: list of Channel
"""
attributes = {
'id': '*local',
'title': self.configured_title(),
}
return [Channel(attributes)]
def create(self, title, **kwargs):
"""
Creates a channel
:param title: title of a new channel
:type title: str
:return: Channel
This function returns a representation of the local channel.
"""
assert title
attributes = {
'id': '*local',
'title': title,
}
return Channel(attributes)
def get_by_title(self, title, **kwargs):
"""
Looks for an existing channel by title
:param title: title of the target channel
:type title: str
:return: Channel instance or None
"""
assert title
attributes = {
'id': '*local',
'title': title,
}
return Channel(attributes)
def get_by_id(self, id, **kwargs):
"""
Looks for an existing channel by id
:param id: identifier of the target channel
:type id: str
:return: Channel instance or None
"""
assert id
attributes = {
'id': id,
'title': self.configured_title(),
}
return Channel(attributes)
def update(self, channel, **kwargs):
"""
Updates an existing channel
:param channel: a representation of the updated channel
:type channel: Channel
"""
pass
def delete(self, id, **kwargs):
"""
Deletes a channel
:param id: the unique id of an existing channel
:type id: str
"""
pass
def list_participants(self, id):
"""
Lists participants to a channel
:param id: the unique id of an existing channel
:type id: str
:return: a list of persons
:rtype: list of str
Note: this function returns all participants, except the bot itself.
"""
assert id # target channel is required
return self.participants
def add_participant(self, id, person, is_moderator=False):
"""
Adds one participant
:param id: the unique id of an existing channel
:type id: str
:param person: e-mail address of the person to add
:type person: str
:param is_moderator: if this person has special powers on this channel
:type is_moderator: True or False
"""
assert id # target channel is required
assert person
assert is_moderator in (True, False)
self.participants.append(person)
def remove_participant(self, id, person):
"""
Removes one participant
:param id: the unique id of an existing channel
:type id: str
:param person: e-mail address of the person to remove
:type person: str
"""
assert id # target channel is required
assert person
self.participants.remove(person)
def walk_messages(self,
id=None,
**kwargs):
"""
Walk messages
:param id: the unique id of an existing channel
:type id: str
:return: a iterator of Message objects
This function returns messages from a channel, from the newest to
the oldest.
"""
return iter([])
def post_message(self,
id=None,
text=None,
content=None,
file=None,
person=None,
**kwargs):
"""
Posts a message
:param id: the unique id of an existing channel
:type id: str
:param person: address for a direct message
:type person: str
:param text: message in plain text
:type text: str
:param content: rich format, such as MArkdown or HTML
:type content: str
:param file: URL or local path for an attachment
:type file: str
"""
assert id or person # need a recipient
assert id is None or person is None # only one recipient
if content:
logging.debug(u"- rich content is not supported")
if file:
logging.debug(u"- file attachment is not supported")
sys.stdout.write(text+'\n')
sys.stdout.flush()
def pull(self):
"""
Fetches updates
This function senses most recent item, and pushes it
to the listening queue.
"""
sys.stdout.write(self.prompt)
sys.stdout.flush()
try:
line = next(self._lines)
self.on_message({'text': line}, self.ears)
except StopIteration:
sys.stdout.write(u'^C\n')
sys.stdout.flush()
time.sleep(1.0)
self.context.set('general.switch', 'off')
def on_message(self, item, queue):
"""
Normalizes message for the listener
:param item: attributes of the inbound message
:type item: dict
:param queue: the processing queue
:type queue: Queue
This function prepares a Message and push it to the provided queue.
"""
message = Message(item)
message.from_id = '*user'
message.mentioned_ids = [self.context.get('bot.id')]
message.channel_id = '*local'
logging.debug(u"- putting message to ears")
queue.put(str(message))
| 26.634667
| 80
| 0.580396
| 8,932
| 0.894273
| 1,264
| 0.126552
| 0
| 0
| 0
| 0
| 6,015
| 0.602223
|
0b9e17c3c6711c5899263cca3e86df88aba125ad
| 13,497
|
py
|
Python
|
src/warp/yul/AstTools.py
|
sambarnes/warp
|
f841afa22e665d5554587eaa866c4790698bfc22
|
[
"Apache-2.0"
] | 414
|
2021-07-17T13:06:55.000Z
|
2022-03-31T14:57:10.000Z
|
src/warp/yul/AstTools.py
|
sambarnes/warp
|
f841afa22e665d5554587eaa866c4790698bfc22
|
[
"Apache-2.0"
] | 78
|
2021-07-19T12:33:56.000Z
|
2022-03-29T17:16:27.000Z
|
src/warp/yul/AstTools.py
|
sambarnes/warp
|
f841afa22e665d5554587eaa866c4790698bfc22
|
[
"Apache-2.0"
] | 19
|
2021-08-18T03:55:54.000Z
|
2022-03-29T15:29:48.000Z
|
from __future__ import annotations
import re
from typing import Union
import warp.yul.ast as ast
from warp.yul.AstVisitor import AstVisitor
from warp.yul.WarpException import WarpException
class AstParser:
def __init__(self, text: str):
self.lines = text.splitlines()
if len(self.lines) == 0:
raise WarpException("Text should not be empty")
self.pos = 0
def parse_typed_name(self) -> ast.TypedName:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs)
assert node_type_name == "TypedName:", "This node should be of type TypedNode"
self.pos += 1
assert self.get_tabs() == tabs + 1, "Wrong indentation"
node_name, node_type = self.get_word(tabs + 1).split(":")
self.pos += 1
return ast.TypedName(name=node_name, type=node_type)
def parse_literal(self) -> ast.Literal:
tabs = self.get_tabs()
assert self.get_word(tabs).startswith(
"Literal:"
), "This node should be of type Literal"
value = self.get_word(tabs + 8)
self.pos += 1
try:
value = int(value)
except ValueError:
pass
return ast.Literal(value=value)
def parse_identifier(self) -> ast.Identifier:
tabs = self.get_tabs()
assert self.get_word(tabs).startswith(
"Identifier:"
), "This node should be of type Identifier"
name = self.get_word(tabs + 11)
self.pos += 1
return ast.Identifier(name=name)
def parse_assignment(self) -> ast.Assignment:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "Assignment:"
), "This node should be of type Assignment"
self.pos += 1
assert self.get_word(tabs + 1) == "Variables:"
self.pos += 1
variables_list = self.parse_list(tabs + 1, self.parse_identifier)
assert self.get_word(tabs + 1) == "Value:"
self.pos += 1
return ast.Assignment(
variable_names=variables_list, value=self.parse_expression()
)
def parse_function_call(self) -> ast.FunctionCall:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "FunctionCall:"
), "This node should be of type FunctionCall"
self.pos += 1
return ast.FunctionCall(
function_name=self.parse_identifier(),
arguments=self.parse_list(tabs, self.parse_expression),
)
def parse_expression_statement(self) -> ast.Statement:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "ExpressionStatement:"
), "This node should be of type ExpressionStatement"
self.pos += 1
return ast.ExpressionStatement(expression=self.parse_expression())
def parse_variable_declaration(self) -> ast.VariableDeclaration:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "VariableDeclaration:"
), "This node should be of type VariableDeclaration"
self.pos += 1
assert self.get_tabs() == tabs + 1
assert self.get_word(tabs + 1) == "Variables:"
self.pos += 1
variables = self.parse_list(tabs + 1, self.parse_typed_name)
assert self.get_tabs() == tabs + 1
word = self.get_word(tabs + 1)
self.pos += 1
assert word.startswith("Value")
if word.endswith("None"):
value = None
else:
value = self.parse_expression()
return ast.VariableDeclaration(variables=variables, value=value)
def parse_block(self) -> ast.Block:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Block:", "This node should be of type Block"
self.pos += 1
return ast.Block(statements=tuple(self.parse_list(tabs, self.parse_statement)))
def parse_function_definition(self) -> ast.FunctionDefinition:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "FunctionDefinition:"
), "This node should be of type FunctionDefinition"
self.pos += 1
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1).startswith(
"Name:"
)
fun_name = self.get_word(tabs + 7)
self.pos += 1
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Parameters:"
self.pos += 1
params = self.parse_list(tabs + 1, self.parse_typed_name)
assert (
self.get_tabs() == tabs + 1
and self.get_word(tabs + 1) == "Return Variables:"
)
self.pos += 1
returns = self.parse_list(tabs + 1, self.parse_typed_name)
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Body:"
self.pos += 1
body = self.parse_block()
return ast.FunctionDefinition(
name=fun_name, parameters=params, return_variables=returns, body=body
)
def parse_if(self) -> ast.If:
tabs = self.get_tabs()
assert self.get_word(tabs) == "If:", "This node should be of type If"
self.pos += 1
condition = self.parse_expression()
body = self.parse_block()
else_body = None
if self.get_tabs() > tabs:
else_body = self.parse_block()
return ast.If(condition=condition, body=body, else_body=else_body)
def parse_case(self) -> ast.Case:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Case:", "This node should be of type Case"
self.pos += 1
try:
value = self.parse_literal()
except AssertionError:
assert (
self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Default"
), "The value must be a literal or None (when it's the default case)"
value = None
self.pos += 1
return ast.Case(value=value, body=self.parse_block())
def parse_switch(self) -> ast.Switch:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Switch:", "This node should be of type Switch"
self.pos += 1
return ast.Switch(
expression=self.parse_expression(),
cases=self.parse_list(tabs, self.parse_case),
)
def parse_for_loop(self) -> ast.ForLoop:
tabs = self.get_tabs()
assert self.get_word(tabs) == "ForLoop:", "This node should be of type ForLoop"
self.pos += 1
return ast.ForLoop(
pre=self.parse_block(),
condition=self.parse_expression(),
post=self.parse_block(),
body=self.parse_block(),
)
def parse_break(self) -> ast.Break:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Break", "This node should be of type Break"
self.pos += 1
return ast.Break()
def parse_continue(self) -> ast.Continue:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Continue", "This node should be of type Continue"
self.pos += 1
return ast.Continue()
def parse_leave(self) -> ast.Leave:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Leave", "This node should be of type Leave"
self.pos += 1
return ast.LEAVE
def parse_node(self) -> ast.Node:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
parser_name = f"parse_{self.get_name(node_type_name)}"
parser = getattr(self, parser_name, None)
if parser is None:
raise WarpException("Wrong node type name!")
return parser()
def parse_statement(self) -> ast.Statement:
statements = [
"ExpressionStatement",
"Assignment",
"VariableDeclaration",
"FunctionDefinition",
"If",
"Switch",
"ForLoop",
"Break",
"Continue",
"Leave",
"Block",
]
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
assert node_type_name in statements, "Not a valid statement"
return ast.assert_statement(self.parse_node())
def parse_expression(self) -> ast.Expression:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
assert node_type_name in [
"Literal",
"Identifier",
"FunctionCall",
], "Node type must be an expression"
return ast.assert_expression(self.parse_node())
def parse_list(self, tabs, parser):
items = []
while self.pos < len(self.lines) and self.get_tabs() > tabs:
item = parser()
items.append(item)
return items
def get_tabs(self):
tabs = 0
if self.pos < len(self.lines):
for c in self.lines[self.pos]:
if not c == "\t":
break
tabs += 1
else:
raise WarpException(
"Lines are not supposed to be filled only with tabs"
)
return tabs
def get_word(self, start: int) -> str:
return self.lines[self.pos][start:]
def get_name(self, name):
name = "_".join(re.findall("[A-Z][^A-Z]*", name))
return name.lower()
class YulPrinter(AstVisitor):
def format(self, node: ast.Node, tabs: int = 0) -> str:
return self.visit(node, tabs)
def visit_typed_name(self, node: ast.TypedName, tabs: int = 0) -> str:
return f"{node.name}"
def visit_literal(self, node: ast.Literal, tabs: int = 0) -> str:
return f"{node.value}"
def visit_identifier(self, node: ast.Identifier, tabs: int = 0) -> str:
return f"{node.name}"
def visit_assignment(self, node: ast.Assignment, tabs: int = 0) -> str:
variables = ", ".join(self.visit_list(node.variable_names))
value = self.visit(node.value, 0)
return f"{variables} := {value}"
def visit_function_call(self, node: ast.FunctionCall, tabs: int = 0) -> str:
name = self.visit(node.function_name)
args = ", ".join(self.visit_list(node.arguments))
return f"{name}({args})"
def visit_expression_statement(
self, node: ast.ExpressionStatement, tabs: int = 0
) -> str:
return self.visit(node.expression, tabs)
def visit_variable_declaration(
self, node: ast.VariableDeclaration, tabs: int = 0
) -> str:
variables = ", ".join(self.visit_list(node.variables))
value = ""
if node.value is not None:
value = f" := {self.visit(node.value)}"
return f"let {variables}{value}"
def visit_block(self, node: ast.Block, tabs: int = 0) -> str:
open_block = "{"
close_block = "}"
if self.is_short(node.statements):
statements = "".join(self.visit_list(node.statements))
return " ".join([open_block, statements, close_block])
statements = self.visit_list(node.statements, tabs + 1)
statements = ["\t" * (tabs + 1) + stmt for stmt in statements]
statements = "\n".join(statements)
close_block = "\t" * tabs + close_block
res = "\n".join([open_block, statements, close_block])
return res
def visit_function_definition(
self, node: ast.FunctionDefinition, tabs: int = 0
) -> str:
parameters = ", ".join(self.visit_list(node.parameters, 0))
ret_vars = ", ".join(self.visit_list(node.return_variables, 0))
body = self.visit(node.body, tabs)
res = f"function {node.name}({parameters})"
if len(node.return_variables) > 0:
res += f" -> {ret_vars}"
res += f" {body}"
return res
def visit_if(self, node: ast.If, tabs: int = 0) -> str:
res = f"if {self.visit(node.condition)} "
res += self.visit(node.body, tabs)
if node.else_body is not None:
res += "\n" + "\t" * tabs + "else "
res += self.visit(node.else_body, tabs)
return res
def visit_case(self, node: ast.Case, tabs: int = 0) -> str:
res = "\t" * tabs
if node.value is not None:
res += f"case {self.visit(node.value)} "
else:
res += "default "
res += self.visit(node.body, tabs)
return res
def visit_switch(self, node: ast.Switch, tabs: int = 0) -> str:
res = f"switch {self.visit(node.expression)}\n"
res += "\n".join(self.visit_list(node.cases, tabs))
return res
def visit_for_loop(self, node: ast.ForLoop, tabs: int = 0) -> str:
res = "for "
res += self.visit(node.pre, tabs)
res += f" {self.visit(node.condition)} "
res += self.visit(node.post, tabs)
res += f"\n{self.visit(node.body, tabs)}"
return res
def visit_break(self, node: ast.Break, tabs: int = 0) -> str:
return "break"
def visit_continue(self, node: ast.Continue, tabs: int = 0) -> str:
return "continue"
def visit_leave(self, node: ast.Leave, tabs: int = 0) -> str:
return "leave"
def is_short(self, stmts: tuple) -> bool:
if len(stmts) == 0:
return True
return len(stmts) == 1 and type(stmts[0]).__name__ not in [
"Block",
"FunctionDefinition",
"If",
"Switch",
"ForLoop",
]
| 32.601449
| 88
| 0.572127
| 13,300
| 0.985404
| 0
| 0
| 0
| 0
| 0
| 0
| 1,934
| 0.143291
|
0b9fa6b8eac70139650145aa00e7cb7eb8455c1b
| 5,911
|
py
|
Python
|
srfnef/tools/doc_gen/doc_generator.py
|
twj2417/srf
|
63365cfd75199d70eea2273214a4fa580a9fdf2a
|
[
"Apache-2.0"
] | null | null | null |
srfnef/tools/doc_gen/doc_generator.py
|
twj2417/srf
|
63365cfd75199d70eea2273214a4fa580a9fdf2a
|
[
"Apache-2.0"
] | null | null | null |
srfnef/tools/doc_gen/doc_generator.py
|
twj2417/srf
|
63365cfd75199d70eea2273214a4fa580a9fdf2a
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
'''
@author: Minghao Guo
@contact: mh.guo0111@gmail.com
@software: basenef
@file: doc_generator.py
@date: 4/13/2019
@desc:
'''
import os
import sys
import time
from getpass import getuser
import matplotlib
import numpy as np
import json
from srfnef import Image, MlemFull
matplotlib.use('Agg')
author = getuser()
def title_block_gen():
timestamp = time.time()
datetime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timestamp)))
title_block = f'''
# NEF AutoDoc {datetime}
- Author: {author}
- Generation time: {datetime}
- Operation system: {sys.platform}
- OS language: {os.environ['LANG']}
- Duration: 0.0 sec
- Total errors: 0
- Total warning: 0
- Description:
'''
return title_block
def _text_gen_as_table(dct: dict = {}):
out_text = ['|key|values|\n|:---|:---|\n']
for key, val in dct.items():
if key == 'data':
out_text.append(f"| {key} | Ignored |\n")
elif not isinstance(val, dict):
if isinstance(val, str) and len(val) > 30:
out_text.append(f"| {key} | Ignored |\n")
else:
out_text.append(f"| {key} | {val} |\n")
else:
out_text.append(f"| {key} | {'Ignored'} |\n")
return out_text
def json_block_gen(dct: dict = {}):
if isinstance(dct, str):
dct = json.loads(dct)
dct['image_config']['size'] = np.round(dct['image_config']['size'], decimals = 3).tolist()
if dct['emap'] is not None:
dct['emap']['size'] = np.round(dct['emap']['size'], decimals = 3).tolist()
json_str = json.dumps(dct, indent = 4)
out_text = "## RECON JSON\n"
out_text += "```javascript\n"
out_text += json_str + '\n'
out_text += "```\n"
return out_text
def image_block_gen(img: Image, path: str):
print('Generating text blocks...')
from matplotlib import pyplot as plt
vmax = np.percentile(img.data, 99.99)
midind = [int(img.shape[i] / 2) for i in range(3)]
plt.figure(figsize = (30, 10))
plt.subplot(231)
plt.imshow(img.data[midind[0], :, :], vmax = vmax)
plt.subplot(232)
plt.imshow(img.data[:, midind[1], :].transpose(), vmax = vmax)
plt.subplot(233)
plt.imshow(img.data[:, :, midind[2]].transpose(), vmax = vmax)
plt.subplot(234)
plt.plot(img.data[midind[0], midind[1], :])
plt.subplot(235)
plt.plot(img.data[midind[0], :, midind[2]])
plt.subplot(236)
plt.plot(img.data[:, midind[1], midind[2]])
timestamp = time.time()
datetime_str = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(int(timestamp)))
plt.savefig(path + f'/out_img{datetime_str}.png')
out_text = f'\n'
return out_text
def statistic_block_gen(dct: dict = {}):
out_text = []
key_set = set()
for name, sub_dct in dct.items():
for key, val in sub_dct.items():
if isinstance(val, str) and len(val) < 30:
key_set.add(key)
col_names = ['|name ', '|:---']
for key in key_set:
col_names[0] += '|' + key + ''
else:
col_names[0] += '|\n'
for _ in key_set:
col_names[1] += '|:---'
else:
col_names[1] += '|\n'
out_text += col_names
for name, sub_dct in dct.items():
row = '| ' + name + ' '
for key in key_set:
if key in sub_dct:
row += '|' + str(sub_dct[key]) + ''
else:
row += '|-'
else:
row += '|\n'
out_text += [row]
return out_text
def metric_block_gen(mask: np.ndarray, img: Image):
from srfnef import image_metric as metric
dct = {}
# contrast hot
dct.update(
contrast_hot = {str(ind_): float(val_) for ind_, val_ in metric.contrast_hot(mask, img)})
dct.update(
contrast_cold = {str(ind_): float(val_) for ind_, val_ in metric.contrast_cold(mask, img)})
dct.update(contrast_noise_ratio1 = metric.cnr1(mask, img))
dct.update(contrast_noise_ratio2 = metric.cnr2(mask, img))
dct.update(contrast_recovery_coefficiency1 = metric.crc1(mask, img))
dct.update(contrast_recovery_coefficiency2 = metric.crc2(mask, img))
dct.update(standard_error = metric.standard_error(mask, img))
dct.update(normalized_standard_error = metric.nsd(mask, img))
dct.update(standard_deviation = metric.sd(mask, img))
dct.update(background_visibility = metric.bg_visibility(mask, img))
dct.update(noise1 = metric.noise1(mask, img))
dct.update(noise2 = metric.noise2(mask, img))
dct.update(signal_noise_ratio1 = metric.snr1(mask, img))
dct.update(signal_noise_ratio2 = metric.snr2(mask, img))
dct.update(positive_deviation = metric.pos_dev(mask, img))
for ind, val in dct.items():
if not isinstance(val, dict):
dct[ind] = float(val)
json_str = json.dumps(dct, indent = 4)
out_text = "## IMAGE METRIC JSON\n"
out_text += "```javascript\n"
out_text += json_str + '\n'
out_text += "```\n"
return out_text
def doc_gen(mlem_obj: MlemFull, img: Image, path: str, filename: str = None,
mask: np.ndarray = None):
timestamp = time.time()
datetime_str = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(int(timestamp)))
if filename is None:
filename = 'doc_gen-' + datetime_str + '.md'
out_text = title_block_gen()
out_text += image_block_gen(img, path)
out_text += json_block_gen(mlem_obj.asdict(recurse = True))
if mask is not None:
if isinstance(mask, str):
mask = np.load(mask)
out_text += metric_block_gen(mask, img)
# out_text += statistic_block_gen(dct)
with open(filename, 'w') as fout:
fout.writelines(out_text)
# print('Converting MD to PDF...')
# import pypandoc
# print(filename)
# pypandoc.convert_file(filename, 'pdf', outputfile = filename + '.pdf')
return filename
| 31.110526
| 99
| 0.607511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,102
| 0.186432
|
0b9ff21662fde4d991d952e6a81287147181af9f
| 760
|
py
|
Python
|
prep_scripts/0_join_data.py
|
linas-p/EVDPEP
|
2062e20ef784a76eebaf71ebbe4f9006cde5bbd5
|
[
"CC0-1.0"
] | 5
|
2021-10-05T14:02:52.000Z
|
2021-11-23T07:59:06.000Z
|
prep_scripts/0_join_data.py
|
patrickiswwgp/EVDPEP
|
2062e20ef784a76eebaf71ebbe4f9006cde5bbd5
|
[
"CC0-1.0"
] | 1
|
2021-10-31T14:41:48.000Z
|
2021-10-31T16:23:45.000Z
|
prep_scripts/0_join_data.py
|
patrickiswwgp/EVDPEP
|
2062e20ef784a76eebaf71ebbe4f9006cde5bbd5
|
[
"CC0-1.0"
] | 3
|
2021-11-23T07:59:17.000Z
|
2022-03-31T09:09:03.000Z
|
import pandas as pd
import numpy as np
DATA_PATH = "./data/EVconsumption/"
weather = pd.read_csv(DATA_PATH + "dimweathermeasure.csv", sep = "|")
osm = pd.read_csv(DATA_PATH + "osm_dk_20140101.csv", sep = "|")
data0 = pd.read_csv(DATA_PATH + "2020_11_25_aal_viterbi.csv", sep = ",")
data1 = pd.read_csv(DATA_PATH + "2021_04_06_aal_north_viterbi.csv", sep = ",")
data2 = pd.read_csv(DATA_PATH + "2021_04_06_aal_south_viterbi.csv", sep = ",")
data = pd.concat([data0, data1, data2], axis=0)
data = data.drop_duplicates()
result = pd.merge(data, weather, how="left", on=["weathermeasurekey", "datekey"])
result = pd.merge(result, osm, how="left", on=["segmentkey"])
result.to_csv(DATA_PATH + "data_0_joined_data.csv")
print("Results {}".format(result.shape))
| 38
| 81
| 0.711842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 266
| 0.35
|
0ba17e31d084e4b9249ccb1a58a413d758400527
| 964
|
py
|
Python
|
csuibot/utils/kbbi.py
|
chadmadna/CSUIBot
|
a6f54639c256a3c86a9aa7c3fc094e69ce96a1b8
|
[
"Apache-2.0"
] | null | null | null |
csuibot/utils/kbbi.py
|
chadmadna/CSUIBot
|
a6f54639c256a3c86a9aa7c3fc094e69ce96a1b8
|
[
"Apache-2.0"
] | null | null | null |
csuibot/utils/kbbi.py
|
chadmadna/CSUIBot
|
a6f54639c256a3c86a9aa7c3fc094e69ce96a1b8
|
[
"Apache-2.0"
] | null | null | null |
import requests
import json
class WordDefinition:
def __init__(self, word):
self.word = word
self.definisi = None
self.json_data = None
def url_data(self):
api_url = 'http://kateglo.com/api.php'
r = requests.get(api_url, params={
'format': 'json', 'phrase': self.word})
try:
self.json_data = r.json()
return self.json_data
except json.decoder.JSONDecodeError:
return 'Oooopss, It looks like you type the wrong word!'
@staticmethod
def format_def(data):
def_texts = ['({}){}'.format(i+1, data[i]['def_text']) for i in range(len(data))]
return '\n'.join(def_texts)
def definition(self):
try:
all_definisi = self.url_data()["kateglo"]["definition"]
self.definisi = self.format_def(all_definisi)
return self.definisi
except TypeError:
return self.url_data()
| 28.352941
| 89
| 0.580913
| 933
| 0.967842
| 0
| 0
| 165
| 0.171162
| 0
| 0
| 142
| 0.147303
|
0ba1dc47fec515daa7ce78ab6cbd344fd812af6f
| 113,179
|
py
|
Python
|
networking_vsphere/tests/unit/agent/test_ovsvapp_agent.py
|
Mirantis/vmware-dvs
|
37b874f9bf40b47d0de231c640367275fb3afb9b
|
[
"Apache-2.0"
] | 8
|
2015-04-23T15:36:56.000Z
|
2019-03-06T13:23:28.000Z
|
networking_vsphere/tests/unit/agent/test_ovsvapp_agent.py
|
Mirantis/vmware-dvs
|
37b874f9bf40b47d0de231c640367275fb3afb9b
|
[
"Apache-2.0"
] | 1
|
2016-10-04T13:24:50.000Z
|
2016-10-04T13:24:50.000Z
|
networking_vsphere/tests/unit/agent/test_ovsvapp_agent.py
|
Mirantis/vmware-dvs
|
37b874f9bf40b47d0de231c640367275fb3afb9b
|
[
"Apache-2.0"
] | 19
|
2015-09-15T13:25:01.000Z
|
2019-09-03T08:23:21.000Z
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import time
import logging
from oslo_config import cfg
from networking_vsphere.agent import ovsvapp_agent
from networking_vsphere.common import constants as ovsvapp_const
from networking_vsphere.common import error
from networking_vsphere.tests import base
from networking_vsphere.tests.unit.drivers import fake_manager
from networking_vsphere.utils import resource_util
from neutron.agent.common import ovs_lib
from neutron.common import utils as n_utils
from neutron.plugins.common import constants as p_const
from neutron.plugins.common import utils as p_utils
from neutron.plugins.ml2.drivers.openvswitch.agent import ovs_neutron_agent as ovs_agent # noqa
from neutron.plugins.ml2.drivers.openvswitch.agent import vlanmanager
NETWORK_ID = 'fake_net_id'
VNIC_ADDED = 'VNIC_ADDED'
FAKE_DEVICE_ID = 'fake_device_id'
FAKE_VM = 'fake_vm'
FAKE_HOST_1 = 'fake_host_1'
FAKE_HOST_2 = 'fake_host_2'
FAKE_CLUSTER_MOID = 'fake_cluster_moid'
FAKE_CLUSTER_1 = 'fake_cluster_1'
FAKE_CLUSTER_2 = 'fake_cluster_2'
FAKE_VCENTER = 'fake_vcenter'
FAKE_PORT_1 = 'fake_port_1'
FAKE_PORT_2 = 'fake_port_2'
FAKE_PORT_3 = 'fake_port_3'
FAKE_PORT_4 = 'fake_port_4'
MAC_ADDRESS = '01:02:03:04:05:06'
FAKE_CONTEXT = 'fake_context'
FAKE_SG = {'fake_sg': 'fake_sg_rule'}
FAKE_SG_RULE = {'security_group_source_groups': ['fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'security_group_id': 'fake_id'
}],
'sg_provider_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'source_port_range_min': 67,
'source_port_range_max': 67,
'port_range_min': 68,
'port_range_max': 68
}]
}
FAKE_SG_RULES = {FAKE_PORT_1: FAKE_SG_RULE}
FAKE_SG_RULES_MULTI_PORTS = {FAKE_PORT_1: FAKE_SG_RULE,
FAKE_PORT_2: FAKE_SG_RULE
}
FAKE_SG_RULES_MISSING = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress'
}]
}
}
FAKE_SG_RULES_PARTIAL = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'port_range_min': 22,
'port_range_max': 22
}]
}
}
DEVICE = {'id': FAKE_DEVICE_ID,
'cluster_id': FAKE_CLUSTER_1,
'host': FAKE_HOST_1,
'vcenter': FAKE_VCENTER}
class SampleEvent(object):
def __init__(self, type, host, cluster, srcobj, host_changed=False):
self.event_type = type
self.host_name = host
self.cluster_id = cluster
self.src_obj = srcobj
self.host_changed = host_changed
class VM(object):
def __init__(self, uuid, vnics):
self.uuid = uuid
self.vnics = vnics
class SamplePort(object):
def __init__(self, port_uuid, mac_address=None, pg_id=None):
self.port_uuid = port_uuid
self.mac_address = mac_address
self.pg_id = pg_id
class SamplePortUIDMac(object):
def __init__(self, port_uuid, mac_address):
self.port_uuid = port_uuid
self.mac_address = mac_address
class TestOVSvAppAgentRestart(base.TestCase):
@mock.patch('neutron.common.config.init')
@mock.patch('neutron.common.config.setup_logging')
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')
@mock.patch('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')
@mock.patch('neutron.agent.rpc.PluginReportStateAPI')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')
@mock.patch('neutron.context.get_admin_context_without_session')
@mock.patch('neutron.agent.rpc.create_consumers')
@mock.patch('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.setup_integration_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_ovs_bridges')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_security_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent._init_ovs_flows')
@mock.patch('networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.'
'check_ovs_firewall_restart')
@mock.patch('networking_vsphere.drivers.ovs_firewall.'
'OVSFirewallDriver.setup_base_flows')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.create')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.__init__')
def setUp(self, mock_ovs_init, mock_get_port_ofport,
mock_set_secure_mode, mock_create_ovs_bridge,
mock_setup_base_flows, mock_check_ovs_firewall_restart,
mock_init_ovs_flows, mock_setup_security_br,
mock_setup_ovs_bridges,
mock_setup_integration_br, mock_create_consumers,
mock_get_admin_context_without_session, mock_ovsvapp_pluginapi,
mock_plugin_report_stateapi, mock_securitygroup_server_rpcapi,
mock_rpc_pluginapi, mock_ovsdb_api, mock_setup_logging,
mock_init):
super(TestOVSvAppAgentRestart, self).setUp()
cfg.CONF.set_override('security_bridge_mapping',
"fake_sec_br:fake_if", 'SECURITYGROUP')
mock_get_port_ofport.return_value = 5
mock_ovs_init.return_value = None
self.agent = ovsvapp_agent.OVSvAppAgent()
self.agent.run_refresh_firewall_loop = False
self.LOG = ovsvapp_agent.LOG
self.agent.monitor_log = logging.getLogger('monitor')
def test_check_ovsvapp_agent_restart(self):
self.agent.int_br = mock.Mock()
with mock.patch.object(self.agent.int_br, 'bridge_exists',
return_value=True) as mock_br_exists, \
mock.patch.object(self.agent.int_br, 'dump_flows_for_table',
return_value='') as mock_dump_flows:
self.assertFalse(self.agent.check_ovsvapp_agent_restart())
self.assertTrue(mock_br_exists.called)
self.assertTrue(mock_dump_flows.called)
mock_dump_flows.return_value = 'cookie = 0x0'
self.assertTrue(self.agent.check_ovsvapp_agent_restart())
self.assertTrue(mock_br_exists.called)
self.assertTrue(mock_dump_flows.called)
class TestOVSvAppAgent(base.TestCase):
@mock.patch('neutron.common.config.init')
@mock.patch('neutron.common.config.setup_logging')
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')
@mock.patch('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')
@mock.patch('neutron.agent.rpc.PluginReportStateAPI')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')
@mock.patch('neutron.context.get_admin_context_without_session')
@mock.patch('neutron.agent.rpc.create_consumers')
@mock.patch('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.setup_integration_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.check_ovsvapp_agent_restart')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_ovs_bridges')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_security_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent._init_ovs_flows')
@mock.patch('networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.'
'check_ovs_firewall_restart')
@mock.patch('networking_vsphere.drivers.ovs_firewall.'
'OVSFirewallDriver.setup_base_flows')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.create')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')
def setUp(self, mock_get_port_ofport,
mock_set_secure_mode, mock_create_ovs_bridge,
mock_setup_base_flows, mock_check_ovs_firewall_restart,
mock_init_ovs_flows, mock_setup_security_br,
mock_setup_ovs_bridges, mock_check_ovsvapp_agent_restart,
mock_setup_integration_br, mock_create_consumers,
mock_get_admin_context_without_session, mock_ovsvapp_pluginapi,
mock_plugin_report_stateapi, mock_securitygroup_server_rpcapi,
mock_rpc_pluginapi, mock_ovsdb_api, mock_setup_logging,
mock_init):
super(TestOVSvAppAgent, self).setUp()
cfg.CONF.set_override('security_bridge_mapping',
"fake_sec_br:fake_if", 'SECURITYGROUP')
mock_check_ovsvapp_agent_restart.return_value = False
mock_get_port_ofport.return_value = 5
self.agent = ovsvapp_agent.OVSvAppAgent()
self.agent.run_refresh_firewall_loop = False
self.LOG = ovsvapp_agent.LOG
self.agent.monitor_log = logging.getLogger('monitor')
def _build_port(self, port):
port = {'admin_state_up': False,
'id': port,
'device': DEVICE,
'network_id': NETWORK_ID,
'physical_network': 'physnet1',
'segmentation_id': '1001',
'lvid': 1,
'network_type': 'vlan',
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def _build_update_port(self, port):
port = {'admin_state_up': False,
'id': port,
'network_id': NETWORK_ID,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def test_setup_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn,\
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.setup_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
def test_setup_security_br(self):
cfg.CONF.set_override('security_bridge_mapping',
"br-fake:fake_if", 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
self.agent.int_br = mock.Mock()
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(ovs_lib, "OVSBridge") as mock_ovs_br, \
mock.patch.object(self.agent.sec_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6):
self.agent.setup_security_br()
self.assertTrue(mock_ovs_br.called)
self.assertTrue(self.agent.sec_br.add_patch_port.called)
self.assertTrue(mock_logger_info.called)
def test_recover_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn, \
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.recover_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge')
def test_recover_security_br(self, mock_ovs_bridge):
cfg.CONF.set_override('security_bridge_mapping',
"br-sec:physnet1", 'SECURITYGROUP')
self.agent.int_br = mock.Mock()
self.agent.sec_br = mock.Mock()
mock_br = mock_ovs_bridge.return_value
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(mock_br, 'bridge_exists'), \
mock.patch.object(mock_br, 'add_patch_port') as mock_add_patch_port, \
mock.patch.object(self.agent.int_br,
"get_port_ofport",
return_value=6), \
mock.patch.object(mock_br,
"get_port_ofport",
return_value=6), \
mock.patch.object(mock_br,
"delete_port") as mock_delete_port:
mock_br.get_bridge_for_iface.return_value = 'br-sec'
self.agent.recover_security_br()
self.assertTrue(mock_logger_info.called)
self.assertFalse(mock_delete_port.called)
self.assertFalse(mock_add_patch_port.called)
mock_br.get_bridge_for_iface.return_value = 'br-fake'
self.agent.recover_security_br()
self.assertTrue(mock_logger_info.called)
self.assertTrue(mock_delete_port.called)
self.assertTrue(mock_add_patch_port.called)
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
def test_recover_physical_bridges(self, mock_ovsdb_api):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.LOG, 'error') as mock_logger_error, \
mock.patch.object(self.agent, "br_phys_cls") as mock_ovs_br, \
mock.patch.object(ovs_lib.BaseOVS,
"get_bridges",
return_value=['br-eth1']
), \
mock.patch.object(p_utils, 'get_interface_name'
) as mock_int_name, \
mock.patch.object(self.agent.int_br,
"get_port_ofport",
return_value=6) as mock_get_ofport:
self.agent.recover_physical_bridges(self.agent.bridge_mappings)
self.assertTrue(mock_logger_info.called)
self.assertFalse(mock_logger_error.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(mock_get_ofport.called)
self.assertTrue(mock_int_name.called)
self.assertEqual(self.agent.int_ofports['physnet1'], 6)
def test_init_ovs_flows(self):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
self.agent.patch_sec_ofport = 5
self.agent.int_ofports = {'physnet1': 'br-eth1'}
self.agent.phys_ofports = {"physnet1": "br-eth1"}
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.br = mock.Mock()
with mock.patch.object(self.agent.int_br,
"delete_flows"
) as mock_int_br_delete_flows, \
mock.patch.object(self.agent,
"br_phys_cls") as mock_ovs_br, \
mock.patch.object(self.agent.int_br,
"add_flow") as mock_int_br_add_flow:
self.agent._init_ovs_flows(self.agent.bridge_mappings)
self.assertTrue(mock_int_br_delete_flows.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(br.delete_flows.called)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_int_br_add_flow.called)
def test_update_port_bindings(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port"])
) as mock_update_ports_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._update_port_bindings()
self.assertTrue(mock_update_ports_binding.called)
self.assertFalse(self.agent.ports_to_bind)
self.assertFalse(mock_log_exception.called)
def test_update_port_bindings_rpc_exception(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
side_effect=Exception()
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._update_port_bindings)
self.assertTrue(mock_update_port_binding.called)
self.assertTrue(mock_log_exception.called)
self.assertEqual(set(['fake_port']),
self.agent.ports_to_bind)
def test_update_port_bindings_partial(self):
self.agent.ports_to_bind.add("fake_port1")
self.agent.ports_to_bind.add("fake_port2")
self.agent.ports_to_bind.add("fake_port3")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port1",
"fake_port2"])
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'):
self.agent._update_port_bindings()
self.assertTrue(mock_update_port_binding.called)
self.assertEqual(set(["fake_port3"]),
self.agent.ports_to_bind)
def test_setup_ovs_bridges_vlan(self):
cfg.CONF.set_override('tenant_network_types',
"vlan", 'OVSVAPP')
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
with mock.patch.object(self.agent, 'setup_physical_bridges'
) as mock_phys_brs, \
mock.patch.object(self.agent, '_init_ovs_flows'
) as mock_init_ovs_flows:
self.agent.setup_ovs_bridges()
mock_phys_brs.assert_called_with(self.agent.bridge_mappings)
mock_init_ovs_flows.assert_called_with(self.agent.bridge_mappings)
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
def test_setup_ovs_bridges_vxlan(self, mock_ovsdb_api):
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent, 'setup_tunnel_br'
) as mock_setup_tunnel_br, \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
mock_setup_tunnel_br.assert_called_with("br-tun")
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_setup_ovs_bridges_vxlan_ofport(self):
cfg.CONF.set_override('tenant_network_types',
"vxlan", 'OVSVAPP')
cfg.CONF.set_override('local_ip',
"10.10.10.10", 'OVSVAPP')
cfg.CONF.set_override('tunnel_bridge',
"br-tun", 'OVSVAPP')
self.agent.tun_br = mock.Mock()
self.agent.int_br = mock.Mock()
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.tun_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6), \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
self.assertTrue(self.agent.tun_br.add_patch_port.called)
self.assertEqual(self.agent.patch_tun_ofport, 6)
self.assertEqual(self.agent.patch_int_ofport, 5)
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_mitigate_ovs_restart_vlan(self):
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"
) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"
) as mock_sec_br, \
mock.patch.object(self.agent.sg_agent, "init_firewall"
) as mock_init_fw, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "_init_ovs_flows"
) as mock_init_flows, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertTrue(mock_phys_brs.called)
self.assertTrue(mock_sec_br.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertTrue(mock_init_fw.called)
self.assertTrue(mock_init_flows.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(2, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_vxlan(self):
self.agent.enable_tunneling = True
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"), \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"), \
mock.patch.object(self.agent.sg_agent, "init_firewall"
), \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "tunnel_sync"
) as mock_tun_sync, \
mock.patch.object(self.agent, "_init_ovs_flows"), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_setup_tunnel_br.called)
self.assertTrue(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_phys_brs.called)
self.assertTrue(mock_tun_sync.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(len(self.agent.devices_to_filter), 2)
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_exception(self):
self.agent.enable_tunneling = False
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set()
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, "info") as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br",
side_effect=Exception()) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.LOG, "exception"
) as mock_exception_log, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertFalse(mock_phys_brs.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_logger_info.called)
self.assertTrue(mock_exception_log.called)
self.assertFalse(self.agent.refresh_firewall_required)
self.assertEqual(0, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
self.assertFalse(monitor_info.called)
def _get_fake_port(self, port_id):
return {'id': port_id,
'port_id': port_id,
'mac_address': MAC_ADDRESS,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'security_groups': FAKE_SG,
'segmentation_id': 1232,
'lvid': 1,
'network_id': 'fake_network',
'device_id': FAKE_DEVICE_ID,
'admin_state_up': True,
'physical_network': 'physnet1',
'network_type': 'vlan'}
def _build_phys_brs(self, port):
phys_net = port['physical_network']
self.agent.phys_brs[phys_net] = {}
self.agent.phys_brs[phys_net]['eth_ofport'] = 5
br = self.agent.phys_brs[phys_net]['br'] = mock.Mock()
br.add_flows(port['segmentation_id'],
port['mac_address'],
5)
br.delete_flows(port['mac_address'],
port['segmentation_id'])
return br
def test_process_port(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
mock_prov_local_vlan.assert_called_with(fakeport)
self.assertTrue(br.add_flows.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_port_existing_network(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = {}
self._build_lvm(fakeport)
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
self.assertFalse(mock_prov_local_vlan.called)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_with_few_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(1, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_with_more_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678',
'2123', '2234', '2345', '2456', '2567', '2678',
'3123', '3234', '3345', '3456', '3567', '3678',
'4123', '4234', '4345', '4456', '4567', '4678',
'5123', '5234', '5345', '5456', '5567', '5678',
'6123', '6234', '6345', '6456', '6567', '6678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(2, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_sublist_single_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
self.agent.vlan_manager.mapping = {}
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_provision_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_provision_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_multiple_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_single_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_uncached_devices_sublist_multiple_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
fakeport_2["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
def test_process_uncached_devices_sublist_stale_vm_port(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_3 = self._get_fake_port(FAKE_PORT_3)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self._build_phys_brs(fakeport_3)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.ports_to_bind = set([FAKE_PORT_3, FAKE_PORT_4])
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
self.agent.vnic_info[FAKE_PORT_3] = fakeport_3
devices = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
self.agent.sg_agent.remove_devices_filter = mock.Mock()
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent.sg_agent,
'remove_devices_filter'
)as mock_remove_device_filter, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_3, self.agent.ports_to_bind)
self.assertIn(FAKE_PORT_4, self.agent.ports_to_bind)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_3, self.agent.vnic_info)
mock_remove_device_filter.assert_called_with(FAKE_PORT_3)
def test_update_firewall(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.vnic_info[FAKE_PORT_1] = {}
self.agent.vnic_info[FAKE_PORT_2] = {}
self.agent.refresh_firewall_required = True
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertFalse(self.agent.refresh_firewall_required)
self.assertFalse(self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1,
FAKE_PORT_2]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(2, monitor_info.call_count)
def test_update_firewall_get_ports_exception(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.refresh_firewall_required = True
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
side_effect=Exception()
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(set([FAKE_PORT_2]), self.agent.devices_to_filter)
self.assertNotIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(1, monitor_info.call_count)
def test_check_for_updates_no_updates(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_ovs_restarted(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=0) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertTrue(mock_mitigate.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
@mock.patch.object(ovsvapp_agent.OVSvAppAgent, 'check_ovs_status')
def test_check_for_updates_ovs_dead(self, check_ovs_status):
check_ovs_status.return_value = 2
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(self.agent.ovsvapp_mitigation_required)
self.assertTrue(check_ovs_status.called)
self.assertFalse(mock_mitigate.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
check_ovs_status.return_value = 1
self.agent._check_for_updates()
self.assertTrue(check_ovs_status.called)
self.assertTrue(mock_mitigate.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
self.assertFalse(self.agent.ovsvapp_mitigation_required)
def test_check_for_updates_devices_to_filter(self):
self.agent.refresh_firewall_required = True
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall,\
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_mitigate.called)
self.assertTrue(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_firewall_refresh(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=True
) as mock_firewall_refresh,\
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_port_bindings(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_update_port_bindings.called)
def test_update_devices_up(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
ret_value = {'devices_up': [FAKE_PORT_1],
'failed_devices_up': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertFalse(self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_up_rpc_exception(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
side_effect=Exception()
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertTrue(log_exception.called)
def test_update_devices_up_partial(self):
self.agent.devices_up_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_up': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_up': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_down(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
ret_value = {'devices_down': [FAKE_PORT_1],
'failed_devices_down': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertFalse(self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_update_devices_down_rpc_exception(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
side_effect=Exception()
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_down_list)
self.assertTrue(log_exception.called)
def test_update_devices_down_partial(self):
self.agent.devices_down_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_down': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_down': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_report_state(self):
with mock.patch.object(self.agent.state_rpc,
"report_state") as report_st:
self.agent._report_state()
report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertNotIn("start_flag", self.agent.agent_state)
self.assertFalse(self.agent.use_call)
self.assertEqual(cfg.CONF.host,
self.agent.agent_state["host"])
def test_report_state_fail(self):
with mock.patch.object(self.agent.state_rpc,
"report_state",
side_effect=Exception()) as mock_report_st, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._report_state()
mock_report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertTrue(mock_log_exception.called)
def test_process_event_ignore_event(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(VNIC_ADDED, FAKE_HOST_1,
FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added") as mock_add_vm, \
mock.patch.object(self.agent,
"_notify_device_updated") as mock_update_vm, \
mock.patch.object(self.agent,
"_notify_device_deleted") as mock_del_vm, \
mock.patch.object(self.LOG, 'debug') as mock_log_debug:
self.agent.process_event(event)
self.assertFalse(mock_add_vm.called)
self.assertFalse(mock_update_vm.called)
self.assertFalse(mock_del_vm.called)
self.assertTrue(mock_log_debug.called)
def test_process_event_exception(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added",
side_effect=Exception()) as mock_add_vm, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'error') as mock_log_error:
self.agent.process_event(event)
self.assertTrue(mock_add_vm.called)
self.assertTrue(mock_log_error.called)
self.assertTrue(mock_log_exception.called)
def test_process_event_vm_create_nonics_non_host_non_cluster(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
def test_process_event_vm_create_nonics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
self.assertEqual(FAKE_CLUSTER_MOID, self.agent.cluster_moid)
def test_process_event_vm_create_nics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='mock_flow') as mock_dump_flows:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_other_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
def test_process_event_vm_create_nics_host(self):
self.agent.esx_hostname = FAKE_HOST_1
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='mock_flow') as mock_dump_flows:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_other_ports)
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='') as mock_dump_flows, \
mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=True) as mock_get_ports:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
self.assertTrue(mock_get_ports.called)
def test_process_event_vm_updated_nonhost(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm = VM(FAKE_VM, [vm_port1])
event = SampleEvent(ovsvapp_const.VM_UPDATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm, True)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.process_event(event)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
def test_process_event_vm_delete_hosted_vm_vlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self._build_lvm(port)
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
) as mock_post_del_vm, \
mock.patch.object(self.LOG, 'debug'), \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
self.assertTrue(br.delete_flows.called)
def test_process_event_vm_delete_hosted_vm_vxlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as (post_del_vm):
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(post_del_vm.called)
def test_process_event_vm_delete_non_hosted_vm(self):
self.agent.esx_hostname = FAKE_HOST_2
self.agent.cluster_other_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as mock_post_del_vm, \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid,
self.agent.cluster_other_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
def test_notify_device_added_with_hosted_vm(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=True) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_added_rpc_exception(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
side_effect=Exception()) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
)as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_added, vm, host)
self.assertTrue(mock_log_exception.called)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
def test_notify_device_added_with_retry(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=False) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertTrue(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_migration_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent._add_ports_to_host_ports([FAKE_PORT_1])
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, FAKE_HOST_2, True)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_update_not_found(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
br = self.agent.phys_brs[port['physical_network']]['br']
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
):
self.agent._notify_device_updated(vm, host, True)
self.assertFalse(br.add_drop_flows.called)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
):
self.agent._notify_device_updated(vm, host, True)
self.assertTrue(br.add_drop_flows.called)
def test_notify_device_updated_host_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
br = self.agent.phys_brs[port['physical_network']]['br']
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(br.add_flows.called)
def test_notify_device_updated_vlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_notify_device_updated_host_vlan_multiple_nic(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
br1 = self._build_phys_brs(port1)
br2 = self._build_phys_brs(port2)
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
self.agent.ports_dict[port2['id']] = self.agent._build_port_info(port2)
self._build_lvm(port1)
self._build_lvm(port2)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
self.assertEqual(1, mock_update_device_binding.call_count)
self.assertTrue(br1.add_flows.called)
self.assertTrue(br2.add_flows.called)
def _build_lvm(self, port):
try:
self.agent.vlan_manager.add(port['network_id'], port['lvid'],
port['network_type'],
port['physical_network'], '1234')
except vlanmanager.MappingAlreadyExists:
return None
def test_notify_device_updated_host_vxlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
port1 = self._build_port(FAKE_PORT_1)
port1['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_vxlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_map_port_to_common_model_vlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
network, port = self.agent._map_port_to_common_model(expected_port)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_map_port_to_common_model_vxlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
network, port = self.agent._map_port_to_common_model(expected_port, 1)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_device_create_cluster_mismatch(self):
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_2
with mock.patch.object(self.agent,
'_process_create_ports',
return_value=True) as mock_create_ports, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE)
self.assertTrue(mock_logger_debug.called)
self.assertFalse(mock_create_ports.called)
def test_device_create_non_hosted_vm(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.esx_hostname = FAKE_HOST_2
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
mock_add_devices_fn.assert_called_with(ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(self.agent.devices_up_list)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_partial_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.devices_to_filter = set()
self.agent.vlan_manager.mapping = {}
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_PARTIAL
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vxlan(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vlan_manager.mapping = {}
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_vxlan_sg_rule_missing(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_create_port_exception(self):
ports = [self._build_port(FAKE_PORT_1)]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().create_port = mock.Mock(
side_effect=Exception())
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
), \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug, \
mock.patch.object(self.LOG, 'exception') as mock_log_excep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent.device_create,
FAKE_CONTEXT, device=DEVICE,
ports=ports, sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_log_excep.called)
def test_port_update_admin_state_up(self):
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports = set([port['id']])
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
updated_port = self._build_update_port(FAKE_PORT_1)
updated_port['admin_state_up'] = True
self.devices_up_list = []
neutron_port = {'port': updated_port,
'segmentation_id': port['segmentation_id']}
with mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.port_update(FAKE_CONTEXT, **neutron_port)
self.assertEqual(neutron_port['port']['admin_state_up'],
self.agent.ports_dict[port['id']].
admin_state_up)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertFalse(mock_log_exception.called)
self.assertTrue(mock_logger_debug.called)
def test_device_update_maintenance_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_shutdown_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = False
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertFalse(power_off.called)
self.assertFalse(maintenance_mode.called)
self.assertTrue(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_ovsvapp_alreadly_powered_off(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertTrue(log_exception.called)
def test_device_update_maintenance_mode_exception(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode",
side_effect=Exception()
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep') as time_sleep:
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=False)
self.assertTrue(log_exception.called)
self.assertTrue(time_sleep.called)
def test_enhanced_sg_provider_updated(self):
kwargs = {'network_id': NETWORK_ID}
with mock.patch.object(self.LOG, 'info') as log_info, \
mock.patch.object(self.agent.sg_agent, "sg_provider_updated"
) as mock_sg_provider_updated:
self.agent.enhanced_sg_provider_updated(FAKE_CONTEXT, **kwargs)
self.assertTrue(log_info.called)
mock_sg_provider_updated.assert_called_with(NETWORK_ID)
def test_device_create_hosted_vm_vlan_multiple_physnet(self):
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
port2['physical_network'] = "physnet2"
port2['segmentation_id'] = "2005"
port2['network_id'] = "fake_net2"
ports = [port1, port2]
self._build_phys_brs(port1)
self._build_phys_brs(port2)
self.agent.phys_ofports = {}
self.agent.phys_ofports[port1['physical_network']] = 4
self.agent.phys_ofports[port2['physical_network']] = 5
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.int_br = mock.Mock()
self.agent.vlan_manager.mapping = {}
self.agent.patch_sec_ofport = 1
self.agent.int_ofports = {'physnet1': 2, 'physnet2': 3}
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
), \
mock.patch.object(self.agent.int_br, 'provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MULTI_PORTS
), \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertEqual([FAKE_PORT_1, FAKE_PORT_2],
self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_prov_local_vlan.called)
mock_prov_local_vlan.assert_any_call(
port1['network_type'],
port1['lvid'],
port1['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet1'], None)
mock_prov_local_vlan.assert_any_call(
port2['network_type'],
port2['lvid'],
port2['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet2'], None)
| 52.54364
| 96
| 0.594227
| 108,876
| 0.961981
| 0
| 0
| 10,667
| 0.094249
| 0
| 0
| 12,212
| 0.1079
|
0ba2dfd95ee79027d8c63a0c75d4bd279b8d3f02
| 30
|
py
|
Python
|
yolov3/utils/__init__.py
|
hysts/pytorch_yolov3
|
6d4c7a1e42d366894effac8ca52f7116f891b5ab
|
[
"MIT"
] | 13
|
2019-03-22T15:22:22.000Z
|
2021-09-30T21:15:37.000Z
|
yolov3/utils/__init__.py
|
hysts/pytorch_yolov3
|
6d4c7a1e42d366894effac8ca52f7116f891b5ab
|
[
"MIT"
] | null | null | null |
yolov3/utils/__init__.py
|
hysts/pytorch_yolov3
|
6d4c7a1e42d366894effac8ca52f7116f891b5ab
|
[
"MIT"
] | null | null | null |
from yolov3.utils import data
| 15
| 29
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0ba3c4d7d4d48cd32673696a0d4ce0dedcefcaca
| 21,354
|
py
|
Python
|
pootlestuff/watchables.py
|
pootle/pootles_utils
|
bb47103e71ccc4fa01269259b73ca1932184af84
|
[
"UPL-1.0"
] | null | null | null |
pootlestuff/watchables.py
|
pootle/pootles_utils
|
bb47103e71ccc4fa01269259b73ca1932184af84
|
[
"UPL-1.0"
] | null | null | null |
pootlestuff/watchables.py
|
pootle/pootles_utils
|
bb47103e71ccc4fa01269259b73ca1932184af84
|
[
"UPL-1.0"
] | null | null | null |
"""
This module provides classes that support observers, smart value handling and debug functions
All changes to values nominate an agent, and observers nominate the agent making changes they
are interested in.
It supercedes the pvars module
"""
import logging, sys, threading, pathlib, math, json
from enum import Enum, auto as enumauto, Flag
class loglvls(Enum):
"""
A class for logging levels so data is self identfying
"""
VAST = logging.DEBUG-1
DEBUG = logging.DEBUG
INFO = logging.INFO
WARN = logging.WARN
ERROR = logging.ERROR
FATAL = logging.FATAL
NONE = 0
class myagents(Flag):
NONE = 0
app = enumauto()
user = enumauto()
class wflags(Flag):
NONE = 0
DISABLED = enumauto()
class watchable():
"""
provides a 'smart' object that provides basic observer functionality around an object.
Changes to the value can be policed, and updates have to provide an agent that is
performing the update. Observers can then request to be notified when the value is changed
by specific agents.
"""
def __init__(self, value, app, flags=wflags.NONE, loglevel=loglvls.INFO):
"""
creates a new watchable. Initialises the internal value and sets an empty observers list
value: the initial value for the object. Not validated!
app : the app instance for this. Used for logging and for validating agents
"""
self._val=value
self.app=app
self.observers=None
self.oblock=threading.Lock()
self.flags=flags
self.loglevel=loglevel
self.log(loglvls.DEBUG, 'watchable type %s setup with value %s' % (type(self).__name__, self._val))
def setValue(self, value, agent):
"""
Updates the value of a watchable or the loglevel.
if not a loglevel, this validates and converts (if relevant) the requested value.
If the value is valid and different from the current value, checks for and calls
any observers interested in changes by the given agent.
"""
if isinstance(value, loglvls):
self.loglevel = value
return False
if isinstance(value, wflags):
self.flags=value
return False
assert isinstance(agent, self.app.agentclass), 'unexpected value %s of type %s in setValue' % (value, type(value).__name__)
newvalue=self.validValue(value, agent)
if newvalue != self._val:
self.notify(newvalue, agent)
return True
else:
self.log(loglvls.DEBUG,'value unchanged (%s)' % self._val)
return False
def getValue(self):
return self._val
def validValue(self, value, agent=None):
"""
validates the given value and returns the canonical value which will be stored.
Raise an exception if the value is invalid
'Real' classes must implement this
"""
raise NotImplementedError()
def notify(self, newvalue, agent):
if self.observers:
clist=None
with self.oblock:
if agent in self.observers:
clist=self.observers[agent].copy()
oldvalue=self._val
self._val=newvalue
if clist:
for ob in clist:
ob(oldValue=oldvalue, newValue=newvalue, agent=agent, watched=self)
self.log(loglvls.DEBUG,'value changed (%s)- observers called' % self._val)
else:
self._val=newvalue
self.log(loglvls.DEBUG,'value changed (%s)- no observers' % self._val)
def addNotify(self, callback, agent):
assert callable(callback)
assert isinstance(agent, self.app.agentclass)
self.log(loglvls.DEBUG,'added watcher %s' % callback.__name__)
with self.oblock:
if self.observers is None:
self.observers={agent:[callback]}
elif agent in self.observers:
self.observers[agent].append(callback)
else:
self.observers[agent]=[callback]
def dropNotify(self, callback, agent):
with self.oblock:
aglist=self.observers[agent]
ix = aglist.index(callback)
aglist.pop(ix)
def log(self, loglevel, *args, **kwargs):
"""
request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object
"""
if loglevel.value >= self.loglevel.value:
self.app.log(loglevel, *args, **kwargs)
class textWatch(watchable):
"""
A refinement of watchable for text strings.
"""
def validValue(self, value, agent):
"""
value : the requested new value for the field, can be anything that str() takes, but None will fail.
agent : who asked for then change (ignored here)
returns : the valid new value (this is always a str)
raises : Any error that str() can raise
"""
if value is None:
raise ValueError('None is not a valid textVar value')
return str(value)
class floatWatch(watchable):
"""
A refinement of watchable that restricts the value to numbers - simple floating point.
"""
def __init__(self, *, maxv=sys.float_info.max, minv=-sys.float_info.max, clamp=False, allowNaN=True, **kwargs):
"""
Makes a float given min and max values. The value can be set clamped to prevent failures
minv : the lowest allowed value - use 0 to allow only positive numbers
maxv : the highest value allowed
clamp : if True all values that can float() are accepted for updating, but are restricted to be between minv and maxv
"""
self.maxv=float(maxv)
self.minv=float(minv)
self.clamp=clamp==True
self.allowNaN=allowNaN
super().__init__(**kwargs)
def validValue(self, value, agent):
"""
value : the requested new value for the field, can be anything that float(x) can handle that is between minv and maxv
- or if clamp is True, any value
agent : who asked for then change (ignored here)
returns : the valid new value (this is always a float)
raises : ValueError if the provided value is invalid
"""
av=float(value)
if math.isnan(av) and self.allowNaN:
return av
if self.clamp:
return self.minv if av < self.minv else self.maxv if av > self.maxv else av
if self.minv <= av <= self.maxv:
return av
raise ValueError('value {} is outside range {} to {}'.format(value, self.minv, self.maxv))
class intWatch(watchable):
"""
A refinement of watchable that restricts the field value to integer numbers optionally within a range.
"""
def __init__(self, maxv=None, minv=None, clamp=False, **kwargs):
"""
creates an integer var
maxv: None if unbounded maximum else anything that int() accepts
minv: None if unbounded minimum else anything that int() accepts
clamp: if True then value is clamped to maxv and minv (either can be None for unbounded in either 'direction'
"""
self.maxv=maxv if maxv is None else int(maxv)
self.minv=minv if minv is None else int(minv)
self.clamp=clamp==True
super().__init__(**kwargs)
def validValue(self, value, agent):
"""
value : the requested new value for the field, can be anything that int() can handle that is between minv and maxv
- or if clamp is True, any value
agent : who asked for then change (ignored here)
returns : the valid new value (this is always an int)
raises : ValueError if the provided value is invalid
"""
av=int(value)
if self.clamp:
if not self.minv is None and av < self.minv:
return self.minv
if not self.maxv is None and av > self.maxv:
return self.maxv
return av
if (self.minv is None or av >= self.minv) and (self.maxv is None or av <= self.maxv):
return av
raise ValueError('value {} is outside range {} to {} for watchable'.format(value, self.minv, self.maxv))
def increment(self, agent, count=1):
incer=int(count)
newval=self.getValue()+incer
self.setValue(newval, agent)
return newval
class enumWatch(watchable):
"""
a watchable that can only take a specific set of values, and can wrap / clamp values.
It also allows values to be cycled through
"""
def __init__(self, vlist, wrap=True, clamp=False, **kwargs):
self.wrap=wrap == True
self.clamp=clamp == True
self.vlist=vlist
super().__init__(**kwargs)
def validValue(self, value, agent):
if not value in self.vlist:
raise ValueError('value (%s) not valid' % value)
return value
def getIndex(self):
return self.vlist.index(self._val)
def increment(self, agent, inc=1):
newi=self.getIndex()+inc
if 0 <= newi < len(self.vlist):
return self.setValue(self.vlist[newi], agent)
elif self.wrap:
if newi < 0:
useval = self.vlist[-1]
else:
useval = self.vlist[0]
elif self.clamp:
if newi < 0:
useval = self.vlist[0]
else:
useval = self.vlist[-1]
else:
raise ValueError('operation exceeds list boundary')
self.setValue(useval, agent)
def setIndex(self, ival, agent):
if 0 <= ival < len(self.vlist):
return self.setValue(self.vlist[ival], agent)
else:
raise ValueError('index out of range')
class btnWatch(watchable):
"""
For simple click buttons that always notify
"""
def setValue(self, value, agent):
if isinstance(value, loglvls):
self.loglevel = value
return False
if isinstance(value, wflags):
self.flags=value
return False
assert isinstance(agent, self.app.agentclass)
self.notify(self._val, agent)
return True
class folderWatch(watchable):
"""
Internally. the value is a pathlib path to a folder (subfolders are created automatically).
"""
def __init__(self, value, **kwargs):
super().__init__(value=self.validValue(value, None), **kwargs)
def validValue(self, value, agent):
tp=pathlib.Path(value).expanduser()
if tp.exists():
if tp.is_dir():
return tp
else:
raise ValueError('%s is not a folder' % str(tp))
else:
tp.mkdir(parents=True, exist_ok=True)
return tp
def getValue(self):
return str(self._val)
def getFolder(self):
return self._val
def currentfilenames(self, includes=None, excludes=None):
"""
returns names of files currently in this folder
"""
return [pp.name for pp in self.getValue().iterdir() if pp.is_file() and
(True if includes is None else [1 for x in includes if pp.name.endswith(x)]) and
(True if excludes is None else [1 for x in excludes if not pp.name.endswith(x)])]
class watchablegroup(object):
def __init__(self, value, wabledefs, loglevel=None):
"""
value : dict of preferred values for watchables in this activity (e.g. from saved settings file)
wabledefs: a list of 5-tuples that define each watchable with the following entries:
0: name of the watchable
1: class of the watchable
2: default value of the watchable
3: True if the watchable is returned by fetchsettings (as a dict member)
4: kwargs to use when setting up the watchable
"""
self.perslist=[]
self.loglevel=loglvls.INFO if loglevel is None else loglevel
for awable in wabledefs:
ch=self.makeChild(defn=awable, value=awable[2] if value is None else value.get(awable[0], awable[2]))
if ch is None:
raise ValueError('child construction failed - see log')
setattr(self, awable[0], ch)
if awable[3]:
self.perslist.append(awable[0])
def makeChild(self, value, defn):
"""
returns a new object with this object as the app using a definition list
value : value for the
defn: a list of 5-tuples that define each watchable with the following entries:
0: name of the watchable - not used
1: class of the watchable
2: default value of the watchable - only used if value is None
3: True if then watchable is returned by fetchsettings (as a dict member) - not used
4: kwargs to use when setting up the watchable
"""
deflen=len(defn)
if deflen==4:
params={}
elif deflen==5:
params=defn[4]
else:
raise ValueError('there are not 4 or 5 entries in this definition for class %s: %s' % (type(self).__name__, defn))
try:
vv=defn[2] if value is None else value
return defn[1](app=self, value=vv, **params)
except:
print('Exception in makeChild for class %s' % defn[1], ('using defn value (%s)' % defn[2]) if value is None else str(vv))
print('extra keyword args', params)
print('input values:', value)
self.log(loglvls.ERROR,'class %s exception making variable %s' % (type(self).__name__, defn[0]), exc_info=True, stack_info=True)
return None
def fetchsettings(self):
return {kv: getattr(self,kv).getValue() for kv in self.perslist}
def applysettings(self, settings, agent):
for k,v in settings:
if k in self.perslist:
getattr(self, k).setValue(v, agent)
class watchablesmart(watchablegroup):
"""
This class can act as a complete app, or as a part of an app.
For a complete app:
sets up logging for the app
for a component of an app:
passes logging calls up to the app.
value: for the top level (app is None), if a string, this is the file name for json file which should yield a dict with the settings to be applied in construction
otherwise id should be a dict with the settings
lower levels always expect a dict
app: If app is None, this node is the app, otherwise it should be the app object (which provides logging and save / restore settings
"""
def __init__(self, value, app=None, loglevel=loglvls.INFO, **kwargs):
if app==None: # this is the real (top level) app
if loglevel is None or loglevel is loglvls.NONE:
self.logger=None
print('%s no logging' % type(self).__name__)
else:
self.agentclass=myagents
self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__)
chandler=logging.StreamHandler()
chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= "%M:%S"))
self.logger.addHandler(chandler)
self.logger.setLevel(loglevel.value)
self.log(loglvls.INFO,'logging level is %s' % loglevel)
self.startsettings, lmsg, self.settingsfrom = loadsettings(value)
self.log(loglvls.INFO, lmsg)
else:
self.app=app
self.agentclass=app.agentclass
self.startsettings=value
super().__init__(value=self.startsettings, loglevel=loglevel, **kwargs)
def log(self, level, msg, *args, **kwargs):
if hasattr(self,'app'):
if self.loglevel.value <= level.value:
self.app.log(level, msg, *args, **kwargs)
else:
if self.logger:
self.logger.log(level.value, msg, *args, **kwargs)
elif level.value >= loglvls.WARN:
print(msg)
def savesettings(self, oldValue, newValue, agent, watched):
if hasattr(self, 'app'):
raise ValueError('only the app level can save settings')
try:
setts = self.fetchsettings()
except:
self.log(loglvls.WARN,'fetchsettings failed', exc_info=True, stack_info=True)
setts = None
if not setts is None:
try:
settstr=json.dumps(setts, indent=4)
except:
self.log(loglvls.WARN,'json conversion of these settings failed', exc_info=True, stack_info=True)
self.log(loglvls.WARN,str(setts))
settstr=None
if not settstr is None:
try:
with self.settingsfrom.open('w') as sfo:
sfo.write(settstr)
except:
self.log(loglvls.WARN,'save settings failed to write file', exc_info=True, stack_info=True)
return
self.log(loglvls.INFO,'settings saved to file %s' % str(self.settingsfrom))
class watchablepigpio(watchablesmart):
"""
a root class that adds in pigpio setup to watchablesmart
"""
def __init__(self, app=None, pigp=None, **kwargs):
"""
if the app has a pio attribute, (an instance of pigpio.pi), that is used otherwise one is set up.
"""
if not app is None and hasattr(app,'pio'):
self.pio=app.pio
self.mypio=False
elif pigp is None:
import pigpio
ptest=pigpio.pi()
if not ptest.connected:
raise ValueError('pigpio failed to initialise')
self.pio=ptest
self.mypio=True
else:
self.pio=pigp
self.mypio=False
if not self.pio.connected:
raise ValueError('pigpio is not connected')
super().__init__(app=app, **kwargs)
def close(self):
if self.mypio:
self.pio.stop()
self.mypio=False
self.pio=None
class watchableAct(watchablegroup):
"""
An app can have a number of optional activities (that can have their own threads, watched vars etc.
This class provides useful common bits for such activities. It provides:
A way to set up the watchable variables for the class, using passed in values (for saved settings for example)
with defaults if a value isn't passed.
A way to automatically retrieve values for a subset of watchable variables (e.g. to save values as a known config)
logging via the parent app using Python's standard logging module
"""
def __init__(self, app, **kwargs):
self.app=app
self.agentclass=app.agentclass
super().__init__(**kwargs)
def log(self, loglevel, *args, **kwargs):
"""
request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object
"""
if self.loglevel.value <= loglevel.value:
self.app.log(loglevel, *args, **kwargs)
class watchableApp(object):
def __init__(self, agentclass=myagents, loglevel=None):
self.agentclass=agentclass
if loglevel is None or loglevel is loglvls.NONE:
self.logger=None
print('%s no logging' % type(self).__name__)
else:
self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__)
chandler=logging.StreamHandler()
chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= "%M:%S"))
self.logger.addHandler(chandler)
self.logger.setLevel(loglevel.value)
def log(self, level, msg, *args, **kwargs):
if self.logger:
self.logger.log(level.value, msg, *args, **kwargs)
def loadsettings(value):
if isinstance(value, str):
spath=pathlib.Path(value).expanduser()
settingsfrom=spath
if spath.is_file():
try:
with spath.open('r') as spo:
startsettings=json.load(spo)
return startsettings, 'app settings loaded from file %s' % spath, spath
except:
return {}, 'failed to load settings from %s - default values used' % spath, spath
else:
return {}, 'app settings file %s not found - default values used' % str(spath), spath
elif hasattr(value,'keys'):
return value, 'using settings from passed object', None
elif value is None:
return {}, 'settings not specified, default values used', None
else:
return {}, 'setings not processed from passed %s' % type(values).__name__, None
| 38.475676
| 177
| 0.594924
| 20,093
| 0.940948
| 0
| 0
| 0
| 0
| 0
| 0
| 8,184
| 0.383254
|
0ba40eb83c69821a416e50be4bddb8886aa2cb30
| 578
|
py
|
Python
|
tests/test_codecs.py
|
reece/et
|
41977444a95ac8b8af7a73706f1e18634914d37f
|
[
"MIT"
] | null | null | null |
tests/test_codecs.py
|
reece/et
|
41977444a95ac8b8af7a73706f1e18634914d37f
|
[
"MIT"
] | null | null | null |
tests/test_codecs.py
|
reece/et
|
41977444a95ac8b8af7a73706f1e18634914d37f
|
[
"MIT"
] | null | null | null |
import et.codecs
tests = [
{
"data": 0,
"e_data": {
1: b'\x00\x010',
2: b'\x00\x02x\x9c3\x00\x00\x001\x001'
}
},
{
"data": {},
"e_data": {
1: b'\x00\x01{}',
2: b'\x00\x02x\x9c\xab\xae\x05\x00\x01u\x00\xf9'
}
},
]
def test_all():
for test in tests:
for fmt in sorted(test["e_data"].keys()):
assert test["e_data"][fmt] == et.codecs.encode(test["data"], fmt)
assert (test["data"], fmt) == et.codecs.decode(test["e_data"][fmt])
| 19.931034
| 79
| 0.448097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 169
| 0.292388
|
0ba563c5f1a8e8092fcd4bece03a89610c759cd4
| 2,801
|
py
|
Python
|
tests/test_sa.py
|
mariushelf/sa2django
|
936b0a70b0ccc8faf3ca26ff241b0b6dac13f204
|
[
"MIT"
] | null | null | null |
tests/test_sa.py
|
mariushelf/sa2django
|
936b0a70b0ccc8faf3ca26ff241b0b6dac13f204
|
[
"MIT"
] | null | null | null |
tests/test_sa.py
|
mariushelf/sa2django
|
936b0a70b0ccc8faf3ca26ff241b0b6dac13f204
|
[
"MIT"
] | null | null | null |
import sqlite3
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import tests.testsite.testapp.models as dm
from tests.sa_models import Base, Car, Child, Dog, Parent
@pytest.fixture(scope="session")
def engine():
print("NEW ENGINE")
engine = create_engine(
"sqlite://",
creator=lambda: sqlite3.connect(
"file:memorydb?mode=memory&cache=shared", uri=True
),
)
yield engine
engine.dispose()
@pytest.fixture(scope="session")
def session(engine):
print("CREATE TABLES")
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
yield session
session.close()
@pytest.fixture(scope="session")
def mock_data_session(session):
parent = Parent(name="Peter")
parent2 = Parent(name="Hugo")
child1 = Child(name="Hans", age=3, parent=parent, boolfield=True)
child2 = Child(name="Franz", age=5, parent=parent, boolfield=False)
dog1 = Dog(name="Rex")
dog1.owners = [child2]
car1 = Car(horsepower=560)
car2 = Car(horsepower=32)
parent.cars = [car1, car2]
session.add_all([parent, parent2, child1, child2, dog1])
session.commit()
return session
def test_data(mock_data_session):
assert len(mock_data_session.query(Parent).all()) == 2
assert len(mock_data_session.query(Child).all()) == 2
@pytest.mark.django_db
def test_django_orm(mock_data_session):
parents = dm.Parent.objects.order_by("pk")
assert len(parents) == 2
assert parents[0].name == "Peter"
assert parents[1].name == "Hugo"
def test_nullable(mock_data_session):
assert dm.Child._meta.get_field("boolfield").null == False
assert dm.Child._meta.get_field("citextfield").null == True
@pytest.mark.django_db
def test_fk(mock_data_session):
parent = dm.Parent.objects.get(name="Peter")
dm_child = dm.Child.objects.get(name="Hans")
assert dm_child.parent_id == parent.id
assert dm_child.parent == parent
# test back reference
assert len(parent.children.all()) == 2
assert dm_child in parent.children.all()
@pytest.mark.django_db
def test_pk(mock_data_session):
assert dm.Child._meta.pk.name == "key"
assert dm.Parent._meta.pk.name == "id"
@pytest.mark.django_db
def test_many_to_many(mock_data_session):
peter = dm.Parent.objects.get(name="Peter")
assert len(peter.cars.all()) == 2
car0 = dm.Car.objects.all()[0]
assert car0.drivers.all()[0].name == "Peter"
car1 = dm.Car.objects.all()[1]
assert car1.drivers.all()[0].name == "Peter"
@pytest.mark.django_db
def test_relation_without_fk(mock_data_session):
franz = dm.Child.objects.get(name="Franz")
rex = dm.Dog.objects.get(name="Rex")
assert franz.dog == rex
assert list(rex.owners.all()) == [franz]
| 27.194175
| 71
| 0.687612
| 0
| 0
| 409
| 0.146019
| 2,248
| 0.802571
| 0
| 0
| 253
| 0.090325
|
0ba58112cd9b83adb66bbb157c35557326dccf99
| 10,409
|
py
|
Python
|
src/github4/session.py
|
staticdev/github3.py
|
b9af598dcf1771c083dcc512a2aa8e5008bf4ea8
|
[
"MIT"
] | null | null | null |
src/github4/session.py
|
staticdev/github3.py
|
b9af598dcf1771c083dcc512a2aa8e5008bf4ea8
|
[
"MIT"
] | 32
|
2021-02-17T19:46:21.000Z
|
2021-05-12T05:56:03.000Z
|
src/github4/session.py
|
staticdev/github3.py
|
b9af598dcf1771c083dcc512a2aa8e5008bf4ea8
|
[
"MIT"
] | null | null | null |
"""Module containing session and auth logic."""
import collections.abc as abc_collections
import datetime
from contextlib import contextmanager
from logging import getLogger
import dateutil.parser
import requests
from . import __version__
from . import exceptions as exc
__url_cache__ = {}
__logs__ = getLogger(__package__)
def requires_2fa(response):
"""Determine whether a response requires us to prompt the user for 2FA."""
if (
response.status_code == 401
and "X-GitHub-OTP" in response.headers
and "required" in response.headers["X-GitHub-OTP"]
):
return True
return False
class BasicAuth(requests.auth.HTTPBasicAuth):
"""Sub-class requests's class so we have a nice repr."""
def __repr__(self):
"""Use the username as the representation."""
return "basic {}".format(self.username)
class TokenAuth(requests.auth.AuthBase):
"""Auth class that handles simple tokens."""
header_format_str = "token {}"
def __init__(self, token):
"""Store our token."""
self.token = token
def __repr__(self):
"""Return a nice view of the token in use."""
return "token {}...".format(self.token[:4])
def __ne__(self, other):
"""Test for equality, or the lack thereof."""
return not self == other
def __eq__(self, other):
"""Test for equality, or the lack thereof."""
return self.token == getattr(other, "token", None)
def __call__(self, request):
"""Add the authorization header and format it."""
request.headers["Authorization"] = self.header_format_str.format(self.token)
return request
class GitHubSession(requests.Session):
"""Our slightly specialized Session object.
Normally this is created automatically by
:class:`~github4.github.GitHub`. To use alternate values for
network timeouts, this class can be instantiated directly and
passed to the GitHub object. For example:
.. code-block:: python
gh = github.GitHub(session=session.GitHubSession(
default_connect_timeout=T, default_read_timeout=N))
:param default_connect_timeout:
the number of seconds to wait when establishing a connection to
GitHub
:type default_connect_timeout:
float
:param default_read_timeout:
the number of seconds to wait for a response from GitHub
:type default_read_timeout:
float
"""
auth = None
__attrs__ = requests.Session.__attrs__ + [
"base_url",
"two_factor_auth_cb",
"default_connect_timeout",
"default_read_timeout",
"request_counter",
]
def __init__(self, default_connect_timeout=4, default_read_timeout=10):
"""Slightly modify how we initialize our session."""
super(GitHubSession, self).__init__()
self.default_connect_timeout = default_connect_timeout
self.default_read_timeout = default_read_timeout
self.headers.update(
{
# Only accept JSON responses
"Accept": "application/vnd.github.v3.full+json",
# Only accept UTF-8 encoded data
"Accept-Charset": "utf-8",
# Always sending JSON
"Content-Type": "application/json",
# Set our own custom User-Agent string
"User-Agent": f"github4.py/{__version__}",
}
)
self.base_url = "https://api.github.com"
self.two_factor_auth_cb = None
self.request_counter = 0
@property
def timeout(self):
"""Return the timeout tuple as expected by Requests"""
return (self.default_connect_timeout, self.default_read_timeout)
def basic_auth(self, username, password):
"""Set the Basic Auth credentials on this Session.
:param str username: Your GitHub username
:param str password: Your GitHub password
"""
if not (username and password):
return
self.auth = BasicAuth(username, password)
def build_url(self, *args, **kwargs):
"""Build a new API url from scratch."""
parts = [kwargs.get("base_url") or self.base_url]
parts.extend(args)
parts = [str(p) for p in parts]
key = tuple(parts)
__logs__.info("Building a url from %s", key)
if key not in __url_cache__:
__logs__.info("Missed the cache building the url")
__url_cache__[key] = "/".join(parts)
return __url_cache__[key]
def handle_two_factor_auth(self, args, kwargs):
"""Handler for when the user has 2FA turned on."""
headers = kwargs.pop("headers", {})
headers.update({"X-GitHub-OTP": str(self.two_factor_auth_cb())})
kwargs.update(headers=headers)
return super(GitHubSession, self).request(*args, **kwargs)
def has_auth(self):
"""Check for whether or not the user has authentication configured."""
return self.auth or self.headers.get("Authorization")
def oauth2_auth(self, client_id, client_secret):
"""Use OAuth2 for authentication.
It is suggested you install requests-oauthlib to use this.
:param str client_id: Client ID retrieved from GitHub
:param str client_secret: Client secret retrieved from GitHub
"""
raise NotImplementedError("These features are not implemented yet")
def request(self, *args, **kwargs):
"""Make a request, count it, and handle 2FA if necessary."""
kwargs.setdefault("timeout", self.timeout)
response = super(GitHubSession, self).request(*args, **kwargs)
self.request_counter += 1
if requires_2fa(response) and self.two_factor_auth_cb:
# No need to flatten and re-collect the args in
# handle_two_factor_auth
new_response = self.handle_two_factor_auth(args, kwargs)
new_response.history.append(response)
response = new_response
return response
def retrieve_client_credentials(self):
"""Return the client credentials.
:returns: tuple(client_id, client_secret)
"""
client_id = self.params.get("client_id")
client_secret = self.params.get("client_secret")
return (client_id, client_secret)
def two_factor_auth_callback(self, callback):
"""Register our 2FA callback specified by the user."""
if not callback:
return
if not isinstance(callback, abc_collections.Callable):
raise ValueError("Your callback should be callable")
self.two_factor_auth_cb = callback
def token_auth(self, token):
"""Use an application token for authentication.
:param str token: Application token retrieved from GitHub's
/authorizations endpoint
"""
if not token:
return
self.auth = TokenAuth(token)
def app_bearer_token_auth(self, headers, expire_in):
"""Authenticate as an App to be able to view its metadata."""
if not headers:
return
self.auth = AppBearerTokenAuth(headers, expire_in)
def app_installation_token_auth(self, json):
"""Use an access token generated by an App's installation."""
if not json:
return
self.auth = AppInstallationTokenAuth(json["token"], json["expires_at"])
@contextmanager
def temporary_basic_auth(self, *auth):
"""Allow us to temporarily swap out basic auth credentials."""
old_basic_auth = self.auth
old_token_auth = self.headers.get("Authorization")
self.basic_auth(*auth)
yield
self.auth = old_basic_auth
if old_token_auth:
self.headers["Authorization"] = old_token_auth
@contextmanager
def no_auth(self):
"""Unset authentication temporarily as a context manager."""
old_basic_auth, self.auth = self.auth, None
old_token_auth = self.headers.pop("Authorization", None)
yield
self.auth = old_basic_auth
if old_token_auth:
self.headers["Authorization"] = old_token_auth
def _utcnow():
return datetime.datetime.now(dateutil.tz.UTC)
class AppInstallationTokenAuth(TokenAuth):
"""Use token authentication but throw an exception on expiration."""
def __init__(self, token, expires_at):
"""Set-up our authentication handler."""
super(AppInstallationTokenAuth, self).__init__(token)
self.expires_at_str = expires_at
self.expires_at = dateutil.parser.parse(expires_at)
def __repr__(self):
"""Return a nice view of the token in use."""
return "app installation token {}... expiring at {}".format(
self.token[:4], self.expires_at_str
)
@property
def expired(self):
"""Indicate whether our token is expired or not."""
now = _utcnow()
return now > self.expires_at
def __call__(self, request):
"""Add the authorization header and format it."""
if self.expired:
raise exc.AppInstallationTokenExpired(
"Your app installation token expired at {}".format(self.expires_at_str)
)
return super(AppInstallationTokenAuth, self).__call__(request)
class AppBearerTokenAuth(TokenAuth):
"""Use JWT authentication but throw an exception on expiration."""
header_format_str = "Bearer {}"
def __init__(self, token, expire_in):
"""Set-up our authentication handler."""
super(AppBearerTokenAuth, self).__init__(token)
expire_in = datetime.timedelta(seconds=expire_in)
self.expires_at = _utcnow() + expire_in
def __repr__(self):
"""Return a helpful view of the token."""
return "app bearer token {} expiring at {}".format(
self.token[:4], str(self.expires_at)
)
@property
def expired(self):
"""Indicate whether our token is expired or not."""
now = _utcnow()
return now > self.expires_at
def __call__(self, request):
"""Add the authorization header and format it."""
if self.expired:
raise exc.AppTokenExpired(
"Your app token expired at {}".format(str(self.expires_at))
)
return super(AppBearerTokenAuth, self).__call__(request)
| 33.149682
| 87
| 0.639639
| 9,695
| 0.931406
| 712
| 0.068402
| 1,226
| 0.117783
| 0
| 0
| 4,006
| 0.384859
|
0ba6ccc9869c36c54441983043be28e4255463c3
| 3,046
|
py
|
Python
|
models/ffn_ace.py
|
MilesQLi/Theano-Lights
|
59864f4a1b089c04ff0403a6036ee052078fcd7d
|
[
"MIT"
] | 313
|
2015-03-23T15:19:58.000Z
|
2021-05-17T15:40:09.000Z
|
models/ffn_ace.py
|
MilesQLi/Theano-Lights
|
59864f4a1b089c04ff0403a6036ee052078fcd7d
|
[
"MIT"
] | 2
|
2015-08-31T06:35:31.000Z
|
2016-04-04T11:55:43.000Z
|
models/ffn_ace.py
|
Ivaylo-Popov/Theano-Lights
|
3c9de807e42e3875b1e3f4c1e8d02ad1242ddc94
|
[
"MIT"
] | 68
|
2015-05-16T03:26:17.000Z
|
2018-08-19T08:40:18.000Z
|
import theano
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.tensor.nnet.conv import conv2d
from theano.tensor.signal.downsample import max_pool_2d
from theano.tensor.shared_randomstreams import RandomStreams
import numpy as np
from toolbox import *
from modelbase import *
import itertools
class FFN_ace(ModelSLBase):
"""
Auto-classifier-encoder (Georgiev, 2015)
"""
def save(self):
if not os.path.exists('savedmodels\\'):
os.makedirs('savedmodels\\')
self.params.save(self.filename)
def __init__(self, data, hp):
super(FFN_ace, self).__init__(self.__class__.__name__, data, hp)
# batch_size: 10000; learning_rate = 0.0015; lr_halflife = 200, 500
self.epsilon = 0.0001
self.params = Parameters()
self.shared_vars = Parameters()
n_x = self.data['n_x']
n_y = self.data['n_y']
n_h1 = 1200
n_h2 = 1000
n_h3 = 800
n_h4 = 800
scale = hp.init_scale
if hp.load_model and os.path.isfile(self.filename):
self.params.load(self.filename)
else:
with self.params:
w_h = shared_normal((n_x, n_h1), scale=scale)
b_h = shared_zeros((n_h1,))
w_h2 = shared_normal((n_h1, n_h2), scale=scale)
b_h2 = shared_zeros((n_h2,))
w_h3 = shared_normal((n_h2, n_h3), scale=scale)
b_h3 = shared_zeros((n_h3,))
w_h4 = shared_normal((n_h3, n_h4), scale=scale)
b_h4 = shared_zeros((n_h4,))
w_o = shared_normal((n_h4, n_y), scale=scale)
def batch_norm(h):
m = T.mean(h, axis=0, keepdims=True)
std = T.sqrt(T.var(h, axis=0, keepdims=True) + self.epsilon)
h = (h - m) / std
return h
def model(X, params, p_drop_input, p_drop_hidden):
X_noise = X + gaussian(X.shape, p_drop_input)
h = batch_norm(dropout(rectify(T.dot(X_noise, params.w_h) + params.b_h), p_drop_hidden))
# Dual reconstruction error
phx = T.nnet.sigmoid(T.dot(h, T.dot(h.T, X_noise)) / self.hp.batch_size)
log_phx = T.nnet.binary_crossentropy(phx, X_noise).sum()
h2 = dropout(rectify(T.dot(h, params.w_h2) + params.b_h2), p_drop_hidden)
h3 = batch_norm(dropout(rectify(T.dot(h2, params.w_h3) + params.b_h3), p_drop_hidden))
h4 = dropout(rectify(T.dot(h3, params.w_h4) + params.b_h4), p_drop_hidden)
py_x = softmax(T.dot(h4, params.w_o))
return [py_x, log_phx]
noise_py_x, cost_recon = model(self.X, self.params, 0.2, 0.5)
cost_y2 = -T.sum(self.Y * T.log(noise_py_x))
cost = cost_y2 + cost_recon
pyx, _ = model(self.X, self.params, 0., 0.)
map_pyx = T.argmax(pyx, axis=1)
error_map_pyx = T.sum(T.neq(map_pyx, T.argmax(self.Y, axis=1)))
self.compile(cost, error_map_pyx)
| 34.613636
| 97
| 0.591924
| 2,705
| 0.88805
| 0
| 0
| 0
| 0
| 0
| 0
| 190
| 0.062377
|
0ba8551d5076fefbf31d48a58d0338bdc2763c0b
| 181
|
py
|
Python
|
src/misc/helpers.py
|
dnmarkon/kaggle_elo_merchant
|
92ac552d72234455455c95f53e6091017f048504
|
[
"MIT"
] | null | null | null |
src/misc/helpers.py
|
dnmarkon/kaggle_elo_merchant
|
92ac552d72234455455c95f53e6091017f048504
|
[
"MIT"
] | null | null | null |
src/misc/helpers.py
|
dnmarkon/kaggle_elo_merchant
|
92ac552d72234455455c95f53e6091017f048504
|
[
"MIT"
] | null | null | null |
import pandas as pd
def one_hot(df, column):
df = pd.concat([df, pd.get_dummies(df[column], prefix=column)], axis=1)
df.drop([column], axis=1, inplace=True)
return df
| 22.625
| 75
| 0.662983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0ba8bac551a05bebe5ab8cdbe7162fe74234100b
| 1,019
|
py
|
Python
|
1306_Jump_Game_III.py
|
imguozr/LC-Solutions
|
5e5e7098d2310c972314c9c9895aafd048047fe6
|
[
"WTFPL"
] | null | null | null |
1306_Jump_Game_III.py
|
imguozr/LC-Solutions
|
5e5e7098d2310c972314c9c9895aafd048047fe6
|
[
"WTFPL"
] | null | null | null |
1306_Jump_Game_III.py
|
imguozr/LC-Solutions
|
5e5e7098d2310c972314c9c9895aafd048047fe6
|
[
"WTFPL"
] | null | null | null |
from typing import List
class Solution:
"""
BFS
"""
def canReach_1(self, arr: List[int], start: int) -> bool:
"""
Recursively.
"""
seen = set()
def helper(pos):
if not 0 <= pos < len(arr) or pos in seen:
return False
if not arr[pos]:
return True
seen.add(pos)
return helper(pos + arr[pos]) or helper(pos - arr[pos])
return helper(start)
def canReach_2(self, arr: List[int], start: int) -> bool:
"""
Iteratively
"""
from collections import deque
queue, seen = deque([start]), {start}
while queue:
curr = queue.popleft()
if not arr[curr]:
return True
for nxt in [curr + arr[curr], curr - arr[curr]]:
if 0 <= nxt < len(arr) and nxt not in seen:
seen.add(nxt)
queue.append(nxt)
return False
| 24.261905
| 67
| 0.459274
| 992
| 0.973503
| 0
| 0
| 0
| 0
| 0
| 0
| 102
| 0.100098
|
0baa80eb9ba40c6d66b4f05785427f91545460d9
| 81
|
py
|
Python
|
Extensions/BabaGUI/config.py
|
siva-msft/baba-is-auto
|
3237b5b70167130558827979bde7dcee14ef39f3
|
[
"MIT"
] | 108
|
2019-09-11T06:31:35.000Z
|
2022-03-28T13:02:56.000Z
|
Extensions/BabaGUI/config.py
|
siva-msft/baba-is-auto
|
3237b5b70167130558827979bde7dcee14ef39f3
|
[
"MIT"
] | 29
|
2019-09-12T00:28:04.000Z
|
2022-02-20T14:56:27.000Z
|
Extensions/BabaGUI/config.py
|
utilForever/baba-is-auto
|
11937742e25b37b1e27de87fe74d10f09062d6ce
|
[
"MIT"
] | 14
|
2020-02-24T05:41:43.000Z
|
2022-03-28T12:43:34.000Z
|
import pygame
FPS = 60
BLOCK_SIZE = 48
COLOR_BACKGROUND = pygame.Color(0, 0, 0)
| 13.5
| 40
| 0.728395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0bab9c96a95c9a1b5bf24f9a433d56e2555e1a77
| 392
|
py
|
Python
|
simulation/strategies/bucketing.py
|
kantai/hyperbolic-caching
|
884c466c311bb5b9fbdd09791d829b04032f3947
|
[
"MIT"
] | 15
|
2017-07-13T17:30:01.000Z
|
2021-05-18T11:51:13.000Z
|
simulation/strategies/bucketing.py
|
kantai/hyperbolic-caching
|
884c466c311bb5b9fbdd09791d829b04032f3947
|
[
"MIT"
] | null | null | null |
simulation/strategies/bucketing.py
|
kantai/hyperbolic-caching
|
884c466c311bb5b9fbdd09791d829b04032f3947
|
[
"MIT"
] | 6
|
2017-07-13T21:09:04.000Z
|
2021-04-12T15:22:57.000Z
|
class AveragingBucketUpkeep:
def __init__(self):
self.numer = 0.0
self.denom = 0
def add_cost(self, cost):
self.numer += cost
self.denom += 1
return self.numer / self.denom
def rem_cost(self, cost):
self.numer -= cost
self.denom -= 1
if self.denom == 0:
return 0
return self.numer / self.denom
| 23.058824
| 38
| 0.543367
| 391
| 0.997449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0bae78fa6080de85a0feb221980172f577f30cf7
| 42,674
|
py
|
Python
|
packstack/plugins/neutron_350.py
|
melroyr/havana-packstack
|
72cdb0e5e29df4cccb81844ec8b365dfededf4f7
|
[
"Apache-2.0"
] | null | null | null |
packstack/plugins/neutron_350.py
|
melroyr/havana-packstack
|
72cdb0e5e29df4cccb81844ec8b365dfededf4f7
|
[
"Apache-2.0"
] | null | null | null |
packstack/plugins/neutron_350.py
|
melroyr/havana-packstack
|
72cdb0e5e29df4cccb81844ec8b365dfededf4f7
|
[
"Apache-2.0"
] | null | null | null |
"""
Installs and configures neutron
"""
import logging
import os
import re
import uuid
from packstack.installer import utils
from packstack.installer import validators
from packstack.installer.utils import split_hosts
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
# Controller object will be initialized from main flow
controller = None
# Plugin name
PLUGIN_NAME = "OS-NEUTRON"
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding OpenStack Neutron configuration")
conf_params = {
"NEUTRON" : [
{"CMD_OPTION" : "neutron-server-host",
"USAGE" : "The IP addresses of the server on which to install the Neutron server",
"PROMPT" : "Enter the IP address of the Neutron server",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_ip, validators.validate_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_SERVER_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ks-password",
"USAGE" : "The password to use for Neutron to authenticate with Keystone",
"PROMPT" : "Enter the password for Neutron Keystone access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_KS_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-db-password",
"USAGE" : "The password to use for Neutron to access DB",
"PROMPT" : "Enter the password for Neutron DB access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_DB_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l3-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron L3 agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install the Neutron L3 agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_L3_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l3-ext-bridge",
"USAGE" : "The name of the bridge that the Neutron L3 agent will use for external traffic, or 'provider' if using provider networks",
"PROMPT" : "Enter the bridge the Neutron L3 agent will use for external traffic, or 'provider' if using provider networks",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : "br-ex",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_L3_EXT_BRIDGE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-dhcp-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron DHCP agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install Neutron DHCP agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_DHCP_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-lbaas-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron LBaaS agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install Neutron LBaaS agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_LBAAS_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l2-plugin",
"USAGE" : "The name of the L2 plugin to be used with Neutron",
"PROMPT" : "Enter the name of the L2 plugin to be used with Neutron",
"OPTION_LIST" : ["linuxbridge", "openvswitch", "ml2"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "openvswitch",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_L2_PLUGIN",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-metadata-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron metadata agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install the Neutron metadata agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_METADATA_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-metadata-pw",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron metadata agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install the Neutron metadata agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_METADATA_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
],
"NEUTRON_LB_PLUGIN" : [
{"CMD_OPTION" : "neutron-lb-tenant-network-type",
"USAGE" : "The type of network to allocate for tenant networks (eg. vlan, local)",
"PROMPT" : "Enter the type of network to allocate for tenant networks",
"OPTION_LIST" : ["local", "vlan"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_LB_TENANT_NETWORK_TYPE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-lb-vlan-ranges",
"USAGE" : "A comma separated list of VLAN ranges for the Neutron linuxbridge plugin (eg. physnet1:1:4094,physnet2,physnet3:3000:3999)",
"PROMPT" : "Enter a comma separated list of VLAN ranges for the Neutron linuxbridge plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_LB_VLAN_RANGES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_LB_PLUGIN_AND_AGENT" : [
{"CMD_OPTION" : "neutron-lb-interface-mappings",
"USAGE" : "A comma separated list of interface mappings for the Neutron linuxbridge plugin (eg. physnet1:br-eth1,physnet2:br-eth2,physnet3:br-eth3)",
"PROMPT" : "Enter a comma separated list of interface mappings for the Neutron linuxbridge plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_LB_INTERFACE_MAPPINGS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN" : [
{"CMD_OPTION" : "neutron-ovs-tenant-network-type",
"USAGE" : "Type of network to allocate for tenant networks (eg. vlan, local, gre, vxlan)",
"PROMPT" : "Enter the type of network to allocate for tenant networks",
"OPTION_LIST" : ["local", "vlan", "gre", "vxlan"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ovs-vlan-ranges",
"USAGE" : "A comma separated list of VLAN ranges for the Neutron openvswitch plugin (eg. physnet1:1:4094,physnet2,physnet3:3000:3999)",
"PROMPT" : "Enter a comma separated list of VLAN ranges for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_VLAN_RANGES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_AND_AGENT" : [
{"CMD_OPTION" : "neutron-ovs-bridge-mappings",
"USAGE" : "A comma separated list of bridge mappings for the Neutron openvswitch plugin (eg. physnet1:br-eth1,physnet2:br-eth2,physnet3:br-eth3)",
"PROMPT" : "Enter a comma separated list of bridge mappings for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ovs-bridge-interfaces",
"USAGE" : "A comma separated list of colon-separated OVS bridge:interface pairs. The interface will be added to the associated bridge.",
"PROMPT" : "Enter a comma separated list of OVS bridge:interface pairs for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_BRIDGE_IFACES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_TUNNEL" : [
{"CMD_OPTION" : "neutron-ovs-tunnel-ranges",
"USAGE" : "A comma separated list of tunnel ranges for the Neutron openvswitch plugin (eg. 1:1000)",
"PROMPT" : "Enter a comma separated list of tunnel ranges for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_TUNNEL_RANGES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_AND_AGENT_TUNNEL" : [
{"CMD_OPTION" : "neutron-ovs-tunnel-if",
"USAGE" : "The interface for the OVS tunnel. Packstack will override the IP address used for tunnels on this hypervisor to the IP found on the specified interface. (eg. eth1) ",
"PROMPT" : "Enter interface with IP to override the default tunnel local_ip",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_TUNNEL_IF",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_AND_AGENT_VXLAN" : [
{"CMD_OPTION" : "neutron-ovs-vxlan-udp-port",
"CONF_NAME" : "CONFIG_NEUTRON_OVS_VXLAN_UDP_PORT",
"USAGE" : "VXLAN UDP port",
"PROMPT" : "Enter VXLAN UDP port number",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_port],
"DEFAULT_VALUE" : 4789,
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_ML2_PLUGIN" : [
{"CMD_OPTION" : "neutron-ml2-type-drivers",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_TYPE_DRIVERS",
"USAGE" : ("A comma separated list of network type "
"driver entrypoints to be loaded from the "
"neutron.ml2.type_drivers namespace."),
"PROMPT" : ("Enter a comma separated list of network "
"type driver entrypoints"),
"OPTION_LIST" : ["local", "flat", "vlan", "gre", "vxlan"],
"VALIDATORS" : [validators.validate_multi_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-tenant-network-types",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES",
"USAGE" : ("A comma separated ordered list of "
"network_types to allocate as tenant "
"networks. The value 'local' is only useful "
"for single-box testing but provides no "
"connectivity between hosts."),
"PROMPT" : ("Enter a comma separated ordered list of "
"network_types to allocate as tenant "
"networks"),
"OPTION_LIST" : ["local", "vlan", "gre", "vxlan"],
"VALIDATORS" : [validators.validate_multi_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-mechanism-drivers",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_MECHANISM_DRIVERS",
"USAGE" : ("A comma separated ordered list of "
"networking mechanism driver entrypoints "
"to be loaded from the "
"neutron.ml2.mechanism_drivers namespace."),
"PROMPT" : ("Enter a comma separated ordered list of "
"networking mechanism driver entrypoints"),
"OPTION_LIST" : ["logger", "test", "linuxbridge",
"openvswitch", "hyperv", "ncs", "arista",
"cisco_nexus", "l2population"],
"VALIDATORS" : [validators.validate_multi_options],
"DEFAULT_VALUE" : "openvswitch",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-flat-networks",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_FLAT_NETWORKS",
"USAGE" : ("A comma separated list of physical_network"
" names with which flat networks can be "
"created. Use * to allow flat networks with "
"arbitrary physical_network names."),
"PROMPT" : ("Enter a comma separated list of "
"physical_network names with which flat "
"networks can be created"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "*",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-vlan-ranges",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_VLAN_RANGES",
"USAGE" : ("A comma separated list of "
"<physical_network>:<vlan_min>:<vlan_max> "
"or <physical_network> specifying "
"physical_network names usable for VLAN "
"provider and tenant networks, as well as "
"ranges of VLAN tags on each available for "
"allocation to tenant networks."),
"PROMPT" : ("Enter a comma separated list of "
"physical_network names usable for VLAN"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-tunnel-id-ranges",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_TUNNEL_ID_RANGES",
"USAGE" : ("A comma separated list of <tun_min>:"
"<tun_max> tuples enumerating ranges of GRE "
"tunnel IDs that are available for tenant "
"network allocation. Should be an array with"
" tun_max +1 - tun_min > 1000000"),
"PROMPT" : ("Enter a comma separated list of <tun_min>:"
"<tun_max> tuples enumerating ranges of GRE "
"tunnel IDs that are available for tenant "
"network allocation"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-vxlan-group",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_VXLAN_GROUP",
"USAGE" : ("Multicast group for VXLAN. If unset, "
"disables VXLAN enable sending allocate "
"broadcast traffic to this multicast group. "
"When left unconfigured, will disable "
"multicast VXLAN mode. Should be an "
"Multicast IP (v4 or v6) address."),
"PROMPT" : "Enter a multicast group for VXLAN",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-vni-ranges",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_VNI_RANGES",
"USAGE" : ("A comma separated list of <vni_min>:"
"<vni_max> tuples enumerating ranges of "
"VXLAN VNI IDs that are available for tenant"
" network allocation. Min value is 0 and Max"
" value is 16777215."),
"PROMPT" : ("Enter a comma separated list of <vni_min>:"
"<vni_max> tuples enumerating ranges of "
"VXLAN VNI IDs that are available for tenant"
" network allocation"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l2-agent", # We need to ask for this only in case of ML2 plugins
"USAGE" : "The name of the L2 agent to be used with Neutron",
"PROMPT" : "Enter the name of the L2 agent to be used with Neutron",
"OPTION_LIST" : ["linuxbridge", "openvswitch"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "openvswitch",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_L2_AGENT",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
}
conf_groups = [
{ "GROUP_NAME" : "NEUTRON",
"DESCRIPTION" : "Neutron config",
"PRE_CONDITION" : "CONFIG_NEUTRON_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_ML2_PLUGIN",
"DESCRIPTION" : "Neutron ML2 plugin config",
"PRE_CONDITION" : use_ml2_plugin,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_LB_PLUGIN",
"DESCRIPTION" : "Neutron LB plugin config",
"PRE_CONDITION" : use_linuxbridge_plugin,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_LB_PLUGIN_AND_AGENT",
"DESCRIPTION" : "Neutron LB agent config",
"PRE_CONDITION" : use_linuxbridge_agent,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN",
"DESCRIPTION" : "Neutron OVS plugin config",
"PRE_CONDITION" : use_openvswitch_plugin,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_AND_AGENT",
"DESCRIPTION" : "Neutron OVS agent config",
"PRE_CONDITION" : use_openvswitch_agent,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_TUNNEL",
"DESCRIPTION" : "Neutron OVS plugin config for tunnels",
"PRE_CONDITION" : use_openvswitch_plugin_tunnel,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_AND_AGENT_TUNNEL",
"DESCRIPTION" : "Neutron OVS agent config for tunnels",
"PRE_CONDITION" : use_openvswitch_agent_tunnel,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_AND_AGENT_VXLAN",
"DESCRIPTION" : "Neutron OVS agent config for VXLAN",
"PRE_CONDITION" : use_openvswitch_vxlan,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
]
for group in conf_groups:
paramList = conf_params[group["GROUP_NAME"]]
controller.addGroup(group, paramList)
def use_ml2_plugin(config):
return (config['CONFIG_NEUTRON_INSTALL'] == 'y' and
config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2')
def use_linuxbridge_plugin(config):
result = (config['CONFIG_NEUTRON_INSTALL'] == 'y' and
config['CONFIG_NEUTRON_L2_PLUGIN'] == 'linuxbridge')
if result:
config["CONFIG_NEUTRON_L2_AGENT"] = 'linuxbridge'
return result
def use_linuxbridge_agent(config):
ml2_used = (use_ml2_plugin(config) and
config["CONFIG_NEUTRON_L2_AGENT"] == 'linuxbridge')
return use_linuxbridge_plugin(config) or ml2_used
def use_openvswitch_plugin(config):
result = (config['CONFIG_NEUTRON_INSTALL'] == 'y' and
config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch')
if result:
config["CONFIG_NEUTRON_L2_AGENT"] = 'openvswitch'
return result
def use_openvswitch_plugin_tunnel(config):
tun_types = ('gre', 'vxlan')
return (use_openvswitch_plugin(config) and
config['CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'] in tun_types)
def use_ml2_with_ovs(config):
return (use_ml2_plugin(config) and
config["CONFIG_NEUTRON_L2_AGENT"] == 'openvswitch')
def use_openvswitch_agent(config):
return use_openvswitch_plugin(config) or use_ml2_with_ovs(config)
def use_openvswitch_agent_tunnel(config):
return use_openvswitch_plugin_tunnel(config) or use_ml2_with_ovs(config)
def use_openvswitch_vxlan(config):
return ((use_openvswitch_plugin_tunnel(config) and
config['CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'] == 'vxlan')
or
(use_ml2_with_ovs(config) and
'vxlan' in config['CONFIG_NEUTRON_ML2_TYPE_DRIVERS']))
def use_openvswitch_gre(config):
ovs_vxlan = (
use_openvswitch_plugin_tunnel(config) and
config['CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'] == 'gre'
)
ml2_vxlan = (
use_ml2_with_ovs(config) and
'gre' in config['CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES']
)
return ovs_vxlan or ml2_vxlan
def get_if_driver(config):
agent = config['CONFIG_NEUTRON_L2_AGENT']
if agent == "openvswitch":
return 'neutron.agent.linux.interface.OVSInterfaceDriver'
elif agent == 'linuxbridge':
return 'neutron.agent.linux.interface.BridgeInterfaceDriver'
def initSequences(controller):
config = controller.CONF
if config['CONFIG_NEUTRON_INSTALL'] != 'y':
return
if config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch':
plugin_db = 'ovs_neutron'
plugin_path = ('neutron.plugins.openvswitch.ovs_neutron_plugin.'
'OVSNeutronPluginV2')
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'linuxbridge':
plugin_db = 'neutron_linux_bridge'
plugin_path = ('neutron.plugins.linuxbridge.lb_neutron_plugin.'
'LinuxBridgePluginV2')
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2':
plugin_db = 'neutron'
plugin_path = 'neutron.plugins.ml2.plugin.Ml2Plugin'
# values modification
for key in ('CONFIG_NEUTRON_ML2_TYPE_DRIVERS',
'CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES',
'CONFIG_NEUTRON_ML2_MECHANISM_DRIVERS',
'CONFIG_NEUTRON_ML2_FLAT_NETWORKS',
'CONFIG_NEUTRON_ML2_VLAN_RANGES',
'CONFIG_NEUTRON_ML2_TUNNEL_ID_RANGES',
'CONFIG_NEUTRON_ML2_VNI_RANGES'):
config[key] = str([i.strip() for i in config[key].split(',') if i])
key = 'CONFIG_NEUTRON_ML2_VXLAN_GROUP'
config[key] = "'%s'" % config[key] if config[key] else 'undef'
config['CONFIG_NEUTRON_L2_DBNAME'] = plugin_db
config['CONFIG_NEUTRON_CORE_PLUGIN'] = plugin_path
global api_hosts, l3_hosts, dhcp_hosts, lbaas_hosts, compute_hosts, meta_hosts, q_hosts
api_hosts = split_hosts(config['CONFIG_NEUTRON_SERVER_HOST'])
l3_hosts = split_hosts(config['CONFIG_NEUTRON_L3_HOSTS'])
dhcp_hosts = split_hosts(config['CONFIG_NEUTRON_DHCP_HOSTS'])
lbaas_hosts = split_hosts(config['CONFIG_NEUTRON_LBAAS_HOSTS'])
meta_hosts = split_hosts(config['CONFIG_NEUTRON_METADATA_HOSTS'])
compute_hosts = set()
if config['CONFIG_NOVA_INSTALL'] == 'y':
compute_hosts = split_hosts(config['CONFIG_NOVA_COMPUTE_HOSTS'])
q_hosts = api_hosts | l3_hosts | dhcp_hosts | lbaas_hosts | compute_hosts | meta_hosts
neutron_steps = [
{'title': 'Adding Neutron API manifest entries',
'functions': [create_manifests]},
{'title': 'Adding Neutron Keystone manifest entries',
'functions': [create_keystone_manifest]},
{'title': 'Adding Neutron L3 manifest entries',
'functions': [create_l3_manifests]},
{'title': 'Adding Neutron L2 Agent manifest entries',
'functions': [create_l2_agent_manifests]},
{'title': 'Adding Neutron DHCP Agent manifest entries',
'functions': [create_dhcp_manifests]},
{'title': 'Adding Neutron LBaaS Agent manifest entries',
'functions': [create_lbaas_manifests]},
{'title': 'Adding Neutron Metadata Agent manifest entries',
'functions': [create_metadata_manifests]},
]
controller.addSequence("Installing OpenStack Neutron", [], [],
neutron_steps)
def create_manifests(config):
global q_hosts
service_plugins = []
if config['CONFIG_NEUTRON_LBAAS_HOSTS']:
service_plugins.append(
'neutron.services.loadbalancer.plugin.LoadBalancerPlugin'
)
if config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2':
# ML2 uses the L3 Router service plugin to implement l3 agent
service_plugins.append(
'neutron.services.l3_router.l3_router_plugin.L3RouterPlugin'
)
config['SERVICE_PLUGINS'] = (str(service_plugins) if service_plugins
else 'undef')
if config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch':
nettype = config.get("CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE", "local")
plugin_manifest = 'neutron_ovs_plugin_%s.pp' % nettype
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'linuxbridge':
plugin_manifest = 'neutron_lb_plugin.pp'
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2':
plugin_manifest = 'neutron_ml2_plugin.pp'
# host to which allow neutron server
allowed_hosts = set(q_hosts)
if config['CONFIG_CLIENT_INSTALL'] == 'y':
allowed_hosts.add(config['CONFIG_OSCLIENT_HOST'])
if config['CONFIG_HORIZON_INSTALL'] == 'y':
allowed_hosts.add(config['CONFIG_HORIZON_HOST'])
if config['CONFIG_NOVA_INSTALL'] == 'y':
allowed_hosts.add(config['CONFIG_NOVA_API_HOST'])
for host in q_hosts:
manifest_file = "%s_neutron.pp" % (host,)
manifest_data = getManifestTemplate("neutron.pp")
appendManifestFile(manifest_file, manifest_data, 'neutron')
if host in api_hosts:
manifest_file = "%s_neutron.pp" % (host,)
manifest_data = getManifestTemplate("neutron_api.pp")
# Firewall Rules
for f_host in allowed_hosts:
config['FIREWALL_SERVICE_NAME'] = "neutron server"
config['FIREWALL_PORTS'] = "'9696'"
config['FIREWALL_CHAIN'] = "INPUT"
config['FIREWALL_PROTOCOL'] = 'tcp'
config['FIREWALL_ALLOWED'] = "'%s'" % f_host
config['FIREWALL_SERVICE_ID'] = "neutron_server_%s_%s" % (host, f_host)
manifest_data += getManifestTemplate("firewall.pp")
appendManifestFile(manifest_file, manifest_data, 'neutron')
# Set up any l2 plugin configs we need anywhere we install neutron
# XXX I am not completely sure about this, but it seems necessary:
manifest_data = getManifestTemplate(plugin_manifest)
# We also need to open VXLAN/GRE port for agent
if use_openvswitch_vxlan(config) or use_openvswitch_gre(config):
if use_openvswitch_vxlan(config):
config['FIREWALL_PROTOCOL'] = 'udp'
tunnel_port = ("'%s'"
% config['CONFIG_NEUTRON_OVS_VXLAN_UDP_PORT'])
else:
config['FIREWALL_PROTOCOL'] = 'gre'
tunnel_port = 'undef'
config['FIREWALL_ALLOWED'] = "'ALL'"
config['FIREWALL_SERVICE_NAME'] = "neutron tunnel port"
config['FIREWALL_SERVICE_ID'] = ("neutron_tunnel")
config['FIREWALL_PORTS'] = tunnel_port
config['FIREWALL_CHAIN'] = "INPUT"
manifest_data += getManifestTemplate('firewall.pp')
appendManifestFile(manifest_file, manifest_data, 'neutron')
def create_keystone_manifest(config):
manifestfile = "%s_keystone.pp" % config['CONFIG_KEYSTONE_HOST']
manifestdata = getManifestTemplate("keystone_neutron.pp")
appendManifestFile(manifestfile, manifestdata)
def find_mapping(haystack, needle):
return needle in [x.split(':')[1].strip() for x in get_values(haystack)]
def create_l3_manifests(config):
global l3_hosts
plugin = config['CONFIG_NEUTRON_L2_PLUGIN']
if config['CONFIG_NEUTRON_L3_EXT_BRIDGE'] == 'provider':
config['CONFIG_NEUTRON_L3_EXT_BRIDGE'] = ''
for host in l3_hosts:
config['CONFIG_NEUTRON_L3_HOST'] = host
config['CONFIG_NEUTRON_L3_INTERFACE_DRIVER'] = get_if_driver(config)
manifestdata = getManifestTemplate("neutron_l3.pp")
manifestfile = "%s_neutron.pp" % (host,)
appendManifestFile(manifestfile, manifestdata + '\n')
if (config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch' and
config['CONFIG_NEUTRON_L3_EXT_BRIDGE'] and
not find_mapping(config['CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS'],
config['CONFIG_NEUTRON_L3_EXT_BRIDGE'])):
config['CONFIG_NEUTRON_OVS_BRIDGE'] = config['CONFIG_NEUTRON_L3_EXT_BRIDGE']
manifestdata = getManifestTemplate('neutron_ovs_bridge.pp')
appendManifestFile(manifestfile, manifestdata + '\n')
def create_dhcp_manifests(config):
global dhcp_hosts
plugin = config['CONFIG_NEUTRON_L2_PLUGIN']
for host in dhcp_hosts:
config["CONFIG_NEUTRON_DHCP_HOST"] = host
config['CONFIG_NEUTRON_DHCP_INTERFACE_DRIVER'] = get_if_driver(config)
manifest_data = getManifestTemplate("neutron_dhcp.pp")
manifest_file = "%s_neutron.pp" % (host,)
# Firewall Rules
config['FIREWALL_PROTOCOL'] = 'tcp'
for f_host in q_hosts:
config['FIREWALL_ALLOWED'] = "'%s'" % f_host
config['FIREWALL_SERVICE_NAME'] = "neutron dhcp in"
config['FIREWALL_SERVICE_ID'] = "neutron_dhcp_in_%s_%s" % (host, f_host)
config['FIREWALL_PORTS'] = "'67'"
config['FIREWALL_CHAIN'] = "INPUT"
manifest_data += getManifestTemplate("firewall.pp")
config['FIREWALL_SERVICE_NAME'] = "neutron dhcp out"
config['FIREWALL_SERVICE_ID'] = "neutron_dhcp_out_%s_%s" % (host, f_host)
config['FIREWALL_PORTS'] = "'68'"
config['FIREWALL_CHAIN'] = "OUTPUT"
manifest_data += getManifestTemplate("firewall.pp")
appendManifestFile(manifest_file, manifest_data, 'neutron')
def create_lbaas_manifests(config):
global lbaas_hosts
for host in lbaas_hosts:
controller.CONF['CONFIG_NEUTRON_LBAAS_INTERFACE_DRIVER'] = get_if_driver(config)
manifestdata = getManifestTemplate("neutron_lbaas.pp")
manifestfile = "%s_neutron.pp" % (host,)
appendManifestFile(manifestfile, manifestdata + "\n")
def get_values(val):
return [x.strip() for x in val.split(',')] if val else []
def get_agent_type(config):
# The only real use case I can think of for multiples right now is to list
# "vlan,gre" or "vlan,vxlan" so that VLANs are used if available,
# but tunnels are used if not.
tenant_types = config.get('CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES',
"['local']").strip('[]')
tenant_types = [i.strip('"\'') for i in tenant_types.split(',')]
for i in ['gre', 'vxlan', 'vlan']:
if i in tenant_types:
return i
return tenant_types[0]
def create_l2_agent_manifests(config):
global api_hosts, compute_hosts, dhcp_host, l3_hosts
plugin = config['CONFIG_NEUTRON_L2_PLUGIN']
agent = config["CONFIG_NEUTRON_L2_AGENT"]
if agent == "openvswitch":
host_var = 'CONFIG_NEUTRON_OVS_HOST'
if plugin == agent:
# monolithic plugin installation
ovs_type = 'CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'
ovs_type = config.get(ovs_type, 'local')
elif plugin == 'ml2':
ovs_type = get_agent_type(config)
else:
raise RuntimeError('Invalid combination of plugin and agent.')
template_name = "neutron_ovs_agent_%s.pp" % ovs_type
bm_arr = get_values(config["CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS"])
iface_arr = get_values(config["CONFIG_NEUTRON_OVS_BRIDGE_IFACES"])
# The CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS parameter contains a
# comma-separated list of bridge mappings. Since the puppet module
# expects this parameter to be an array, this parameter must be properly
# formatted by packstack, then consumed by the puppet module.
# For example, the input string 'A, B, C' should formatted as '['A','B','C']'.
config["CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS"] = str(bm_arr)
elif agent == "linuxbridge":
host_var = 'CONFIG_NEUTRON_LB_HOST'
template_name = 'neutron_lb_agent.pp'
else:
raise KeyError("Unknown layer2 agent")
# Install l2 agents on every compute host in addition to any hosts listed
# specifically for the l2 agent
for host in api_hosts | compute_hosts | dhcp_hosts | l3_hosts:
config[host_var] = host
manifestfile = "%s_neutron.pp" % (host,)
manifestdata = getManifestTemplate(template_name)
appendManifestFile(manifestfile, manifestdata + "\n")
# neutron ovs port only on network hosts
if (
agent == "openvswitch" and (
(host in l3_hosts and ovs_type in ['vxlan', 'gre'])
or ovs_type == 'vlan')
):
bridge_key = 'CONFIG_NEUTRON_OVS_BRIDGE'
iface_key = 'CONFIG_NEUTRON_OVS_IFACE'
for if_map in iface_arr:
config[bridge_key], config[iface_key] = if_map.split(':')
manifestdata = getManifestTemplate("neutron_ovs_port.pp")
appendManifestFile(manifestfile, manifestdata + "\n")
# Additional configurations required for compute hosts and
# network hosts.
manifestdata = getManifestTemplate('neutron_bridge_module.pp')
appendManifestFile(manifestfile, manifestdata + '\n')
def create_metadata_manifests(config):
global meta_hosts
if config.get('CONFIG_NOVA_INSTALL') == 'n':
return
for host in meta_hosts:
controller.CONF['CONFIG_NEUTRON_METADATA_HOST'] = host
manifestdata = getManifestTemplate('neutron_metadata.pp')
manifestfile = "%s_neutron.pp" % (host,)
appendManifestFile(manifestfile, manifestdata + "\n")
| 49.334104
| 200
| 0.552819
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 19,763
| 0.463116
|
0baeb09b96866048e3277bdd11b177c6f437a60e
| 1,217
|
py
|
Python
|
01-Exercicios/Aula001/Ex2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
01-Exercicios/Aula001/Ex2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
01-Exercicios/Aula001/Ex2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
#--- Exercício 2 - Variáveis
#--- Crie um menu para um sistema de cadastro de funcionários
#--- O menu deve ser impresso com a função format()
#--- As opções devem ser variáveis do tipo inteiro
#--- As descrições das opções serão:
#--- Cadastrar funcionário
#--- Listar funcionários
#--- Editar funcionário
#--- Deletar funcionário
#--- Sair
#--- Além das opções o menu deve conter um cabeçalho e um rodapé
#--- Entre o cabeçalho e o menu e entre o menu e o rodapé deverá ter espaçamento de 3 linhas
#--- Deve ser utilizado os caracteres especiais de quebra de linha e de tabulação
opcao = int(input("""
SISTEMA DE CADASTRO DE FUNCIONARIO\n\n\n
{} - Cadastrar Funcionário
{} - Listar Funcinários
{} - Editar Funcionário
{} - Deletar Funcionário
{} - Sair\n\n\n
Escolha uma opção: """.format(1,2,3,4,5)))
if opcao == 1:
print("A opção escolhida foi 'Cadastrar funcionário'")
elif opcao == 2:
print("A opção escolhida foi 'Listar funcionários'")
elif opcao == 3:
print("A opção escolhida foi 'Editar funcionário'")
elif opcao == 4:
print("A opção escolhida foi 'Deletar funcionário'")
elif opcao == 5:
print("A opção escolhida foi 'Sair'")
else:
pass
| 32.891892
| 92
| 0.676253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,052
| 0.831621
|
0baf09cfe2bff0d7d8fbadcb0dcd9e76b3f75e76
| 284
|
py
|
Python
|
venv/lib/python3.8/site-packages/crispy_forms/templates/uni_form/uni_form.html.py
|
Solurix/Flashcards-Django
|
03c863f6722936093927785a2b20b6b668bb743d
|
[
"MIT"
] | 1
|
2021-05-16T03:20:23.000Z
|
2021-05-16T03:20:23.000Z
|
venv/lib/python3.8/site-packages/crispy_forms/templates/uni_form/uni_form.html.py
|
Solurix/Flashcards-Django
|
03c863f6722936093927785a2b20b6b668bb743d
|
[
"MIT"
] | 4
|
2021-03-30T14:06:09.000Z
|
2021-09-22T19:26:31.000Z
|
venv/lib/python3.8/site-packages/crispy_forms/templates/uni_form/uni_form.html.py
|
Solurix/Flashcards-Django
|
03c863f6722936093927785a2b20b6b668bb743d
|
[
"MIT"
] | null | null | null |
BB BBBBBBBBBBBBBBBBBB
BB BBBBBBBBBBBBBBBB
BBBBBBB BBBBBBBBBBBBBBBBBBBBBB
BBBBB
BB BBBBBBBBBB BB BB BB BBBBBBBBBB
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXX
BBBBB
BBB BBBBB BB BBBB
BBBBBBB BBBBBBBBBBBBBB
BBBBBB
BB BBBBBBBBBB BB BB BB BBBBBBBBBB
XXXXXXXXXXX
BBBBB
| 15.777778
| 34
| 0.792254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0bb224e01ebd658b05fd1ae3164a24c7e6a95713
| 1,137
|
py
|
Python
|
baybars/timber.py
|
dkanarek12/baybars
|
72f4cff706c11d25ce537cf0fed61bc3ef89da30
|
[
"Apache-2.0"
] | 9
|
2018-10-16T19:20:35.000Z
|
2020-06-02T13:27:29.000Z
|
baybars/timber.py
|
dkanarek12/baybars
|
72f4cff706c11d25ce537cf0fed61bc3ef89da30
|
[
"Apache-2.0"
] | 10
|
2018-07-29T08:56:18.000Z
|
2019-03-21T18:31:15.000Z
|
baybars/timber.py
|
dkanarek12/baybars
|
72f4cff706c11d25ce537cf0fed61bc3ef89da30
|
[
"Apache-2.0"
] | 9
|
2018-07-29T08:59:53.000Z
|
2019-12-31T07:50:57.000Z
|
# Copyright 2018 Jet.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
def get_logger(name):
logger = logging.getLogger(name)
stream_stdout = logging.StreamHandler(sys.stdout)
stream_stdout.setLevel(logging.INFO)
stream_stderr = logging.StreamHandler(sys.stderr)
stream_stderr.setLevel(logging.ERROR)
formatter = logging.Formatter('[baybars][%(name)s][%(asctime)s][%(levelname)s]:%(message)s')
stream_stdout.setFormatter(formatter)
stream_stderr.setFormatter(formatter)
logger.addHandler(stream_stdout)
logger.addHandler(stream_stderr)
logger.setLevel(logging.INFO)
return logger
| 33.441176
| 94
| 0.769569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 620
| 0.545295
|
0bb4673a2136b7bf006e51e515e0e3d35ea020dd
| 417
|
py
|
Python
|
nlu_hyperopt/space.py
|
JulianGerhard21/nlu-hyperopt
|
3d16fda97fa7cf1337b19395a57780e6e2dc9bd3
|
[
"Apache-2.0"
] | null | null | null |
nlu_hyperopt/space.py
|
JulianGerhard21/nlu-hyperopt
|
3d16fda97fa7cf1337b19395a57780e6e2dc9bd3
|
[
"Apache-2.0"
] | null | null | null |
nlu_hyperopt/space.py
|
JulianGerhard21/nlu-hyperopt
|
3d16fda97fa7cf1337b19395a57780e6e2dc9bd3
|
[
"Apache-2.0"
] | 1
|
2021-07-08T11:40:27.000Z
|
2021-07-08T11:40:27.000Z
|
from hyperopt import hp
# Define the search space here, e.g.
# from hyperopt.pyll.base import scope
# search_space = {
# 'epochs': hp.qloguniform('epochs', 0, 4, 2),
# 'max_df': hp.uniform('max_df', 1, 2),
# 'max_ngrams': scope.int(hp.quniform('max_ngram', 3, 9, 1))
# }
# Default search space: Try different numbers of training epochs.
search_space = {"epochs": hp.qloguniform("epochs", 0, 4, 2)}
| 29.785714
| 65
| 0.654676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 337
| 0.808153
|
0bb51dc78ddd2967ca706bd880e3869f1feac056
| 4,633
|
py
|
Python
|
lib/taurus/qt/qtgui/panel/report/basicreport.py
|
MikeFalowski/taurus
|
ef041bf35dd847caf08a7efbe072f4020d35522e
|
[
"CC-BY-3.0"
] | 1
|
2016-10-19T13:54:08.000Z
|
2016-10-19T13:54:08.000Z
|
lib/taurus/qt/qtgui/panel/report/basicreport.py
|
MikeFalowski/taurus
|
ef041bf35dd847caf08a7efbe072f4020d35522e
|
[
"CC-BY-3.0"
] | 27
|
2016-05-25T08:56:58.000Z
|
2019-01-21T09:18:08.000Z
|
lib/taurus/qt/qtgui/panel/report/basicreport.py
|
MikeFalowski/taurus
|
ef041bf35dd847caf08a7efbe072f4020d35522e
|
[
"CC-BY-3.0"
] | 8
|
2015-07-24T09:16:50.000Z
|
2018-06-12T12:33:59.000Z
|
#!/usr/bin/env python
#############################################################################
##
# This file is part of Taurus
##
# http://taurus-scada.org
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Taurus is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Taurus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Taurus. If not, see <http://www.gnu.org/licenses/>.
##
#############################################################################
"""This module provides a panel to display taurus messages"""
__all__ = ["ClipboardReportHandler", "SMTPReportHandler"]
__docformat__ = 'restructuredtext'
from taurus.core.util.report import TaurusMessageReportHandler
from taurus.external.qt import Qt
from taurus.qt.qtgui.util.ui import UILoadable
class ClipboardReportHandler(TaurusMessageReportHandler):
"""Report a message by copying it to the clipboard"""
Label = "Copy to Clipboard"
def report(self, message):
app = Qt.QApplication.instance()
clipboard = app.clipboard()
clipboard.setText(message)
Qt.QMessageBox.information(None, "Done!",
"Message Copied to clipboard")
@UILoadable(with_ui='ui')
class SendMailDialog(Qt.QDialog):
def __init__(self, parent=None):
Qt.QDialog.__init__(self, parent)
self.loadUi(filename="SendMailForm.ui")
self.ui.buttonBox.accepted.connect(self.accept)
self.ui.buttonBox.rejected.connect(self.reject)
self.ui.editMessage.setFont(Qt.QFont("Monospace"))
def setFrom(self, efrom):
self.ui.lineEditFrom.setText(efrom)
def setTo(self, eto):
self.ui.editTo.setText(eto)
def setSubject(self, subject):
self.ui.editSubject.setText(subject)
def setMessage(self, message):
self.ui.editMessage.setPlainText(message)
def getFrom(self):
return str(self.ui.editFrom.text())
def getTo(self):
return str(self.ui.editTo.text())
def getSubject(self):
return str(self.ui.editSubject.text())
def getMessage(self):
return str(self.ui.editMessage.toPlainText())
def getMailInfo(self):
return self.getFrom(), self.getTo(), self.getSubject(), \
self.getMessage()
class SMTPReportHandler(TaurusMessageReportHandler):
"""Report a message by sending an email"""
Label = "Send email"
def report(self, message):
app = Qt.QApplication.instance()
subject = "Error in " + app.applicationName()
dialog = self.createDialog(subject=subject, message=message)
if not dialog.exec_():
return
mail_info = dialog.getMailInfo()
try:
self.sendMail(*mail_info)
Qt.QMessageBox.information(None, "Done!",
"Email has been sent!")
except:
import sys
import traceback
einfo = sys.exc_info()[:2]
msg = "".join(traceback.format_exception_only(*einfo))
Qt.QMessageBox.warning(None, "Failed to send email",
"Failed to send email. Reason:\n\n" + msg)
def sendMail(self, efrom, eto, subject, message):
import smtplib
import email.mime.text
msg = email.mime.text.MIMEText(message)
msg['From'] = efrom
msg['To'] = eto
msg['Subject'] = subject
s = smtplib.SMTP('localhost')
s.sendmail(efrom, eto, msg.as_string())
s.quit()
def getDialogClass(self):
return SendMailDialog
def createDialog(self, efrom=None, eto=None, subject=None, message=None):
dialog = self.getDialogClass()()
dialog.setWindowTitle("Compose message")
if efrom is not None:
dialog.setFrom(efrom)
if eto is not None:
dialog.setFrom(eto)
if subject is not None:
dialog.setSubject(subject)
if message is not None:
dialog.setMessage(message)
return dialog
def main():
app = Qt.QApplication([])
w = SendMailDialog()
w.exec_()
if __name__ == "__main__":
main()
| 29.890323
| 77
| 0.618821
| 3,223
| 0.695662
| 0
| 0
| 1,074
| 0.231815
| 0
| 0
| 1,398
| 0.301748
|
0bb56b74527c4ab3380dff7d3851c648cd78de0c
| 347
|
py
|
Python
|
src/workflows/__init__.py
|
stufisher/python-workflows
|
f1f67bb56a0f8a6820762f68e2e59ade2da60a95
|
[
"BSD-3-Clause"
] | null | null | null |
src/workflows/__init__.py
|
stufisher/python-workflows
|
f1f67bb56a0f8a6820762f68e2e59ade2da60a95
|
[
"BSD-3-Clause"
] | null | null | null |
src/workflows/__init__.py
|
stufisher/python-workflows
|
f1f67bb56a0f8a6820762f68e2e59ade2da60a95
|
[
"BSD-3-Clause"
] | null | null | null |
__version__ = "2.18"
def version():
"""Returns the version number of the installed workflows package."""
return __version__
class Error(Exception):
"""Common class for exceptions deliberately raised by workflows package."""
class Disconnected(Error):
"""Indicates the connection could not be established or has been lost."""
| 23.133333
| 79
| 0.723343
| 207
| 0.596542
| 0
| 0
| 0
| 0
| 0
| 0
| 222
| 0.639769
|
0bb5af4cb0e1469e03fc6ee0d14c4d8bfb035eff
| 18,592
|
py
|
Python
|
autoarray/structures/grids/grid_decorators.py
|
jonathanfrawley/PyAutoArray_copy
|
c21e8859bdb20737352147b9904797ac99985b73
|
[
"MIT"
] | null | null | null |
autoarray/structures/grids/grid_decorators.py
|
jonathanfrawley/PyAutoArray_copy
|
c21e8859bdb20737352147b9904797ac99985b73
|
[
"MIT"
] | null | null | null |
autoarray/structures/grids/grid_decorators.py
|
jonathanfrawley/PyAutoArray_copy
|
c21e8859bdb20737352147b9904797ac99985b73
|
[
"MIT"
] | null | null | null |
import numpy as np
from functools import wraps
from autoconf import conf
from autoarray.structures.grids.one_d import abstract_grid_1d
from autoarray.structures.grids.two_d import grid_2d
from autoarray.structures.grids.two_d import grid_2d_interpolate
from autoarray.structures.grids.two_d import grid_2d_iterate
from autoarray.structures.grids.two_d import grid_2d_irregular
from autoarray.structures.arrays.one_d import array_1d
from autoarray.structures.arrays import values
from autoarray import exc
from typing import Union
def grid_1d_to_structure(func):
"""
Homogenize the inputs and outputs of functions that take 2D grids of (y,x) coordinates that return the results
as a NumPy array.
Parameters
----------
func : (obj, grid, *args, **kwargs) -> Object
A function which computes a set of values from a 2D grid of (y,x) coordinates.
Returns
-------
A function that can except cartesian or transformed coordinates
"""
@wraps(func)
def wrapper(
obj, grid, *args, **kwargs
) -> Union[array_1d.Array1D, values.ValuesIrregular]:
"""
This decorator homogenizes the input of a "grid_like" 2D structure (`Grid2D`, `Grid2DIterate`,
`Grid2DInterpolate`, `Grid2DIrregular` or `AbstractGrid1D`) into a function. It allows these classes to be
interchangeably input into a function, such that the grid is used to evaluate the function at every (y,x)
coordinates of the grid using specific functionality of the input grid.
The grid_like objects `Grid2D` and `Grid2DIrregular` are input into the function as a slimmed 2D NumPy array
of shape [total_coordinates, 2] where the second dimension stores the (y,x) values. If a `Grid2DIterate` is
input, the function is evaluated using the appropriate iterated_*_from_func* function.
The outputs of the function are converted from a 1D or 2D NumPy Array2D to an `Array2D`, `Grid2D`,
`ValuesIrregular` or `Grid2DIrregular` objects, whichever is applicable as follows:
- If the function returns (y,x) coordinates at every input point, the returned results are a `Grid2D`
or `Grid2DIrregular` structure, the same structure as the input.
- If the function returns scalar values at every input point and a `Grid2D` is input, the returned results are
an `Array2D` structure which uses the same dimensions and mask as the `Grid2D`.
- If the function returns scalar values at every input point and `Grid2DIrregular` are input, the returned
results are a `ValuesIrregular` object with structure resembling that of the `Grid2DIrregular`.
If the input array is not a `Grid2D` structure (e.g. it is a 2D NumPy array) the output is a NumPy array.
Parameters
----------
obj : object
An object whose function uses grid_like inputs to compute quantities at every coordinate on the grid.
grid : Grid2D or Grid2DIrregular
A grid_like object of (y,x) coordinates on which the function values are evaluated.
Returns
-------
The function values evaluated on the grid with the same structure as the input grid_like object.
"""
centre = (0.0, 0.0)
if hasattr(obj, "centre"):
if obj.centre is not None:
centre = obj.centre
angle = 0.0
if hasattr(obj, "angle"):
if obj.angle is not None:
angle = obj.angle + 90.0
if (
isinstance(grid, grid_2d.Grid2D)
or isinstance(grid, grid_2d_iterate.Grid2DIterate)
or isinstance(grid, grid_2d_interpolate.Grid2DInterpolate)
):
grid_2d_projected = grid.grid_2d_radial_projected_from(
centre=centre, angle=angle
)
result = func(obj, grid_2d_projected, *args, **kwargs)
return array_1d.Array1D.manual_slim(
array=result, pixel_scales=grid.pixel_scale
)
elif isinstance(grid, grid_2d_irregular.Grid2DIrregular):
result = func(obj, grid, *args, **kwargs)
return grid.structure_2d_from_result(result=result)
elif isinstance(grid, abstract_grid_1d.AbstractGrid1D):
grid_2d_radial = grid.project_to_radial_grid_2d(angle=angle)
result = func(obj, grid_2d_radial, *args, **kwargs)
return array_1d.Array1D.manual_slim(
array=result, pixel_scales=grid.pixel_scale
)
raise exc.GridException(
"You cannot input a NumPy array to a `quantity_1d_from_grid` method."
)
return wrapper
def grid_1d_output_structure(func):
"""
Homogenize the inputs and outputs of functions that take 2D grids of (y,x) coordinates that return the results
as a NumPy array.
Parameters
----------
func : (obj, grid, *args, **kwargs) -> Object
A function which computes a set of values from a 2D grid of (y,x) coordinates.
Returns
-------
A function that can except cartesian or transformed coordinates
"""
@wraps(func)
def wrapper(
obj, grid, *args, **kwargs
) -> Union[array_1d.Array1D, values.ValuesIrregular]:
"""
This decorator homogenizes the input of a "grid_like" 2D structure (`Grid2D`, `Grid2DIterate`,
`Grid2DInterpolate`, `Grid2DIrregular` or `AbstractGrid1D`) into a function. It allows these classes to be
interchangeably input into a function, such that the grid is used to evaluate the function at every (y,x)
coordinates of the grid using specific functionality of the input grid.
The grid_like objects `Grid2D` and `Grid2DIrregular` are input into the function as a slimmed 2D NumPy array
of shape [total_coordinates, 2] where the second dimension stores the (y,x) values. If a `Grid2DIterate` is
input, the function is evaluated using the appropriate iterated_*_from_func* function.
The outputs of the function are converted from a 1D or 2D NumPy Array2D to an `Array2D`, `Grid2D`,
`ValuesIrregular` or `Grid2DIrregular` objects, whichever is applicable as follows:
- If the function returns (y,x) coordinates at every input point, the returned results are a `Grid2D`
or `Grid2DIrregular` structure, the same structure as the input.
- If the function returns scalar values at every input point and a `Grid2D` is input, the returned results are
an `Array2D` structure which uses the same dimensions and mask as the `Grid2D`.
- If the function returns scalar values at every input point and `Grid2DIrregular` are input, the returned
results are a `ValuesIrregular` object with structure resembling that of the `Grid2DIrregular`.
If the input array is not a `Grid2D` structure (e.g. it is a 2D NumPy array) the output is a NumPy array.
Parameters
----------
obj : object
An object whose function uses grid_like inputs to compute quantities at every coordinate on the grid.
grid : Grid2D or Grid2DIrregular
A grid_like object of (y,x) coordinates on which the function values are evaluated.
Returns
-------
The function values evaluated on the grid with the same structure as the input grid_like object.
"""
result = func(obj, grid, *args, **kwargs)
if (
isinstance(grid, grid_2d.Grid2D)
or isinstance(grid, grid_2d_iterate.Grid2DIterate)
or isinstance(grid, grid_2d_interpolate.Grid2DInterpolate)
):
return array_1d.Array1D.manual_slim(
array=result, pixel_scales=grid.pixel_scale
)
elif isinstance(grid, grid_2d_irregular.Grid2DIrregular):
return grid.structure_2d_from_result(result=result)
elif isinstance(grid, abstract_grid_1d.AbstractGrid1D):
return array_1d.Array1D.manual_slim(
array=result, pixel_scales=grid.pixel_scale
)
raise exc.GridException(
"You cannot input a NumPy array to a `quantity_1d_from_grid` method."
)
return wrapper
def grid_2d_to_structure(func):
"""
Homogenize the inputs and outputs of functions that take 2D grids of (y,x) coordinates that return the results
as a NumPy array.
Parameters
----------
func : (obj, grid, *args, **kwargs) -> Object
A function which computes a set of values from a 2D grid of (y,x) coordinates.
Returns
-------
A function that can except cartesian or transformed coordinates
"""
@wraps(func)
def wrapper(obj, grid, *args, **kwargs):
"""
This decorator homogenizes the input of a "grid_like" 2D structure (`Grid2D`, `Grid2DIterate`,
`Grid2DInterpolate`, `Grid2DIrregular` or `AbstractGrid1D`) into a function. It allows these classes to be
interchangeably input into a function, such that the grid is used to evaluate the function at every (y,x)
coordinates of the grid using specific functionality of the input grid.
The grid_like objects `Grid2D` and `Grid2DIrregular` are input into the function as a slimmed 2D NumPy array
of shape [total_coordinates, 2] where the second dimension stores the (y,x) values. If a `Grid2DIterate` is
input, the function is evaluated using the appropriate iterated_*_from_func* function.
The outputs of the function are converted from a 1D or 2D NumPy Array2D to an `Array2D`, `Grid2D`,
`ValuesIrregular` or `Grid2DIrregular` objects, whichever is applicable as follows:
- If the function returns (y,x) coordinates at every input point, the returned results are a `Grid2D`
or `Grid2DIrregular` structure, the same structure as the input.
- If the function returns scalar values at every input point and a `Grid2D` is input, the returned results are
an `Array2D` structure which uses the same dimensions and mask as the `Grid2D`.
- If the function returns scalar values at every input point and `Grid2DIrregular` are input, the returned
results are a `ValuesIrregular` object with structure resembling that of the `Grid2DIrregular`.
If the input array is not a `Grid2D` structure (e.g. it is a 2D NumPy array) the output is a NumPy array.
Parameters
----------
obj : object
An object whose function uses grid_like inputs to compute quantities at every coordinate on the grid.
grid : Grid2D or Grid2DIrregular
A grid_like object of (y,x) coordinates on which the function values are evaluated.
Returns
-------
The function values evaluated on the grid with the same structure as the input grid_like object.
"""
if isinstance(grid, grid_2d_iterate.Grid2DIterate):
return grid.iterated_result_from_func(func=func, cls=obj)
elif isinstance(grid, grid_2d_interpolate.Grid2DInterpolate):
return grid.result_from_func(func=func, cls=obj)
elif isinstance(grid, grid_2d_irregular.Grid2DIrregular):
result = func(obj, grid, *args, **kwargs)
return grid.structure_2d_from_result(result=result)
elif isinstance(grid, grid_2d.Grid2D):
result = func(obj, grid, *args, **kwargs)
return grid.structure_2d_from_result(result=result)
elif isinstance(grid, abstract_grid_1d.AbstractGrid1D):
grid_2d_radial = grid.project_to_radial_grid_2d()
result = func(obj, grid_2d_radial, *args, **kwargs)
return grid.structure_2d_from_result(result=result)
if not isinstance(grid, grid_2d_irregular.Grid2DIrregular) and not isinstance(
grid, grid_2d.Grid2D
):
return func(obj, grid, *args, **kwargs)
return wrapper
def grid_2d_to_structure_list(func):
"""
Homogenize the inputs and outputs of functions that take 2D grids of (y,x) coordinates and return the results as
a list of NumPy arrays.
Parameters
----------
func : (obj, grid, *args, **kwargs) -> Object
A function which computes a set of values from a 2D grid of (y,x) coordinates.
Returns
-------
A function that can except cartesian or transformed coordinates
"""
@wraps(func)
def wrapper(obj, grid, *args, **kwargs):
"""
This decorator serves the same purpose as the `grid_2d_to_structure` decorator, but it deals with functions whose
output is a list of results as opposed to a single NumPy array. It simply iterates over these lists to perform
the same conversions as `grid_2d_to_structure`.
Parameters
----------
obj : object
An object whose function uses grid_like inputs to compute quantities at every coordinate on the grid.
grid : Grid2D or Grid2DIrregular
A grid_like object of (y,x) coordinates on which the function values are evaluated.
Returns
-------
The function values evaluated on the grid with the same structure as the input grid_like object in a list
of NumPy arrays.
"""
if isinstance(grid, grid_2d_iterate.Grid2DIterate):
mask = grid.mask.mask_new_sub_size_from(
mask=grid.mask, sub_size=max(grid.sub_steps)
)
grid_compute = grid_2d.Grid2D.from_mask(mask=mask)
result_list = func(obj, grid_compute, *args, **kwargs)
result_list = [
grid_compute.structure_2d_from_result(result=result)
for result in result_list
]
result_list = [result.binned for result in result_list]
return grid.grid.structure_2d_list_from_result_list(result_list=result_list)
elif isinstance(grid, grid_2d_interpolate.Grid2DInterpolate):
return func(obj, grid, *args, **kwargs)
elif isinstance(grid, grid_2d_irregular.Grid2DIrregular):
result_list = func(obj, grid, *args, **kwargs)
return grid.structure_2d_list_from_result_list(result_list=result_list)
elif isinstance(grid, grid_2d.Grid2D):
result_list = func(obj, grid, *args, **kwargs)
return grid.structure_2d_list_from_result_list(result_list=result_list)
elif isinstance(grid, abstract_grid_1d.AbstractGrid1D):
grid_2d_radial = grid.project_to_radial_grid_2d()
result_list = func(obj, grid_2d_radial, *args, **kwargs)
return grid.structure_2d_list_from_result_list(result_list=result_list)
if not isinstance(grid, grid_2d_irregular.Grid2DIrregular) and not isinstance(
grid, grid_2d.Grid2D
):
return func(obj, grid, *args, **kwargs)
return wrapper
def transform(func):
"""Checks whether the input Grid2D of (y,x) coordinates have previously been transformed. If they have not \
been transformed then they are transformed.
Parameters
----------
func : (profile, grid *args, **kwargs) -> Object
A function where the input grid is the grid whose coordinates are transformed.
Returns
-------
A function that can except cartesian or transformed coordinates
"""
@wraps(func)
def wrapper(cls, grid, *args, **kwargs):
"""
Parameters
----------
cls : Profile
The class that owns the function.
grid : grid_like
The (y, x) coordinates in the original reference frame of the grid.
Returns
-------
A grid_like object whose coordinates may be transformed.
"""
if not isinstance(
grid,
(
grid_2d.Grid2DTransformed,
grid_2d.Grid2DTransformedNumpy,
grid_2d_irregular.Grid2DIrregularTransformed,
),
):
result = func(
cls, cls.transform_grid_to_reference_frame(grid), *args, **kwargs
)
return result
else:
return func(cls, grid, *args, **kwargs)
return wrapper
def relocate_to_radial_minimum(func):
""" Checks whether any coordinates in the grid are radially near (0.0, 0.0), which can lead to numerical faults in \
the evaluation of a function (e.g. numerical integration reaching a singularity at (0.0, 0.0)). If any coordinates
are radially within the the radial minimum threshold, their (y,x) coordinates are shifted to that value to ensure
they are evaluated at that coordinate.
The value the (y,x) coordinates are rounded to is set in the 'radial_min.ini' config.
Parameters
----------
func : (profile, *args, **kwargs) -> Object
A function that takes a grid of coordinates which may have a singularity as (0.0, 0.0)
Returns
-------
A function that can except cartesian or transformed coordinates
"""
@wraps(func)
def wrapper(cls, grid, *args, **kwargs):
"""
Parameters
----------
cls : Profile
The class that owns the function.
grid : grid_like
The (y, x) coordinates which are to be radially moved from (0.0, 0.0).
Returns
-------
The grid_like object whose coordinates are radially moved from (0.0, 0.0).
"""
grid_radial_minimum = conf.instance["grids"]["radial_minimum"][
"radial_minimum"
][cls.__class__.__name__]
with np.errstate(all="ignore"): # Division by zero fixed via isnan
grid_radii = cls.grid_to_grid_radii(grid=grid)
grid_radial_scale = np.where(
grid_radii < grid_radial_minimum, grid_radial_minimum / grid_radii, 1.0
)
grid = np.multiply(grid, grid_radial_scale[:, None])
grid[np.isnan(grid)] = grid_radial_minimum
return func(cls, grid, *args, **kwargs)
return wrapper
| 42.254545
| 122
| 0.64119
| 0
| 0
| 0
| 0
| 14,670
| 0.789049
| 0
| 0
| 11,199
| 0.602356
|
0bb89e9bc4b11618566c516b525db418c9d0a1b7
| 742
|
py
|
Python
|
079_039_189/ngram_2/get_10_summary.py
|
Aditya-AS/Question-Answering-System
|
22c3fe549c03a3b5ba1f86befef3c9f91278d3fc
|
[
"MIT"
] | null | null | null |
079_039_189/ngram_2/get_10_summary.py
|
Aditya-AS/Question-Answering-System
|
22c3fe549c03a3b5ba1f86befef3c9f91278d3fc
|
[
"MIT"
] | null | null | null |
079_039_189/ngram_2/get_10_summary.py
|
Aditya-AS/Question-Answering-System
|
22c3fe549c03a3b5ba1f86befef3c9f91278d3fc
|
[
"MIT"
] | null | null | null |
"""
Sanjay Reddy S-2013A7PS189P
Aditya Sarma -2013A7PS079P
Vamsi T -2013A7PS039P
Artificial Intelligence Term Project
"""
import pickle
import BeautifulSoup
import re
import boto
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from google import search
def get_10_summary(query, source="google"):
"""
This function returns the first ten (or less, if 10 are not present) summaries when the query (a string) is run on the source (here google).
The return type is a beautifulSoup module's object and is similar to a list
"""
result = search(query) #calls query on google
#print "---------------------------" + str(type(results)) + "---------------------------"
return result
| 25.586207
| 144
| 0.669811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 484
| 0.652291
|
0bb8e30ded6e839a96a8ac9f64f609621cb56e4a
| 2,055
|
py
|
Python
|
S4/S4 Library/simulation/careers/pick_career_by_agent_interaction.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/simulation/careers/pick_career_by_agent_interaction.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/simulation/careers/pick_career_by_agent_interaction.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
from event_testing.resolver import SingleSimResolver
from sims4.resources import Types
from sims4.tuning.tunable import TunableList, TunableReference
from sims4.tuning.tunable_base import GroupNames
from traits.trait_tracker import TraitPickerSuperInteraction
import services
class PickCareerByAgentInteraction(TraitPickerSuperInteraction):
INSTANCE_TUNABLES = {'pickable_careers': TunableList(description='\n A list of careers whose available agents will be used to populate\n the picker. When an available agent is selected, the sim actor will\n be placed in the associated career. A career may have multiple\n agents, in which case each will appear and each will correspond to\n that career.\n ', tunable=TunableReference(manager=services.get_instance_manager(Types.CAREER), pack_safe=True), tuning_group=GroupNames.PICKERTUNING, unique_entries=True)}
@classmethod
def _get_agent_traits_for_career_gen(cls, sim_info, career):
career_history = sim_info.career_tracker.career_history
(entry_level, _, career_track) = career.get_career_entry_level(career_history, SingleSimResolver(sim_info))
for agent_trait in career_track.career_levels[entry_level].agents_available:
yield agent_trait
@classmethod
def _trait_selection_gen(cls, target):
for career in cls.pickable_careers:
if target.sim_info.career_tracker.has_career_by_uid(career.guid64):
continue
yield from cls._get_agent_traits_for_career_gen(target.sim_info, career)
def on_choice_selected(self, choice_tag, **kwargs):
if choice_tag is None:
return
sim_info = self.target.sim_info
for career in self.pickable_careers:
if choice_tag in self._get_agent_traits_for_career_gen(sim_info, career):
sim_info.career_tracker.add_career(career(sim_info), post_quit_msg=False)
super().on_choice_selected(choice_tag, **kwargs)
return
| 60.441176
| 583
| 0.734793
| 1,777
| 0.86472
| 627
| 0.305109
| 661
| 0.321655
| 0
| 0
| 376
| 0.182968
|
0bb9728183f6cd95e86f2c16d976742c14283f39
| 149
|
py
|
Python
|
api/urls.py
|
kirklennon/Clickbait
|
9ce97d38b3dce78ce151b285a0cc55ddbb7b58be
|
[
"MIT"
] | 1
|
2020-08-29T09:31:22.000Z
|
2020-08-29T09:31:22.000Z
|
api/urls.py
|
kirklennon/Clickbait
|
9ce97d38b3dce78ce151b285a0cc55ddbb7b58be
|
[
"MIT"
] | null | null | null |
api/urls.py
|
kirklennon/Clickbait
|
9ce97d38b3dce78ce151b285a0cc55ddbb7b58be
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('json', views.api, name='api'),
]
| 21.285714
| 40
| 0.644295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 20
| 0.134228
|
0bba1e28f68dedeccae5371afea0ac4ab68e2473
| 68,549
|
py
|
Python
|
tests/examples/minlplib/waterno2_03.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 2
|
2021-07-03T13:19:10.000Z
|
2022-02-06T10:48:13.000Z
|
tests/examples/minlplib/waterno2_03.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 1
|
2021-07-04T14:52:14.000Z
|
2021-07-15T10:17:11.000Z
|
tests/examples/minlplib/waterno2_03.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | null | null | null |
# MINLP written by GAMS Convert at 04/21/18 13:55:18
#
# Equation counts
# Total E G L N X C B
# 617 367 103 147 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 499 472 27 0 0 0 0 0
# FX 6 6 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 1636 1333 303 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.b2 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b3 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b4 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b5 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b6 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b7 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b8 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b9 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b10 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b11 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b12 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b13 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b14 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b15 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b16 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b17 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b18 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b19 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b20 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b21 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b22 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b23 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b24 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b25 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b26 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b27 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b28 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x29 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x30 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x33 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x37 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x41 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x45 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x48 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x54 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x58 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x62 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x66 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x69 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x72 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x75 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x79 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x83 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x87 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x90 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x93 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x122 = Var(within=Reals,bounds=(3.5,3.5),initialize=3.5)
m.x123 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x124 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x125 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x126 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x127 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x128 = Var(within=Reals,bounds=(4.1,4.1),initialize=4.1)
m.x129 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x130 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x131 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x132 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x133 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x134 = Var(within=Reals,bounds=(4,4),initialize=4)
m.x135 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x136 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x137 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x138 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x139 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x140 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x141 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x143 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x145 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x147 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x149 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x151 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x153 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x155 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x157 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x159 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x161 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x163 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x165 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x167 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x169 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x171 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x173 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x175 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x177 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x179 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x181 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x183 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x185 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x187 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x189 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x191 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x193 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x195 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x196 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x197 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x198 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x199 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x200 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x201 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x202 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x203 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x204 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x206 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x208 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x211 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x214 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x217 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x219 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x221 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x223 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x224 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x225 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x226 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x227 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x228 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x229 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x230 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x231 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x232 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x233 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x234 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x235 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x236 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x237 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x238 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x239 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x240 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x241 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x242 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x243 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x244 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x245 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x246 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x247 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x248 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x249 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x250 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x251 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x252 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x253 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x254 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x255 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x256 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x257 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x258 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x259 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x260 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x261 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x262 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x263 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x264 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x265 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x266 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x267 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x268 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x269 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x270 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x271 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x272 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x273 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x274 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x275 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x276 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x277 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x278 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x279 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x284 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x289 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x294 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x299 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x304 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x309 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x314 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x319 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x324 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x329 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x334 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x339 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x346 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x349 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x354 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x359 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x364 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x369 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x374 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x379 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x384 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x389 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x394 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x399 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x404 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x409 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x476 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x477 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x478 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x479 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x480 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x481 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x482 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x483 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x484 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x485 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x486 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x487 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x488 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x489 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x490 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x491 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x492 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x493 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x494 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x495 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x496 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x497 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x498 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x499 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.obj = Objective(expr= m.x278 + m.x283 + m.x288 + m.x293 + m.x298 + m.x303 + m.x308 + m.x313 + m.x318 + m.x323
+ m.x328 + m.x333 + m.x338 + m.x345 + m.x348 + m.x353 + m.x358 + m.x363 + m.x368 + m.x373
+ m.x378 + m.x383 + m.x388 + m.x393 + m.x398 + m.x403 + m.x408, sense=minimize)
m.c2 = Constraint(expr= m.x141 + 27.42831624*m.x143 + 37.5407324*m.x145 - 57.2814121*m.x147 == 0)
m.c3 = Constraint(expr= m.x149 + 27.42831624*m.x151 - 57.2814121*m.x153 + 37.5407324*m.x155 == 0)
m.c4 = Constraint(expr= m.x157 + 27.42831624*m.x159 - 57.2814121*m.x161 + 37.5407324*m.x163 == 0)
m.c5 = Constraint(expr= - 57.2814121*m.x147 + m.x165 + 27.42831624*m.x167 + 37.5407324*m.x169 == 0)
m.c6 = Constraint(expr= - 57.2814121*m.x153 + m.x171 + 37.5407324*m.x173 + 27.42831624*m.x175 == 0)
m.c7 = Constraint(expr= - 57.2814121*m.x161 + m.x177 + 37.5407324*m.x179 + 27.42831624*m.x181 == 0)
m.c8 = Constraint(expr= - 57.2814121*m.x147 + m.x183 + 37.5407324*m.x185 + 27.42831624*m.x187 == 0)
m.c9 = Constraint(expr= - 57.2814121*m.x153 + m.x189 + 27.42831624*m.x191 + 37.5407324*m.x193 == 0)
m.c10 = Constraint(expr= m.x29 + 27.42831624*m.x30 + 37.5407324*m.x31 - 57.2814121*m.x161 == 0)
m.c11 = Constraint(expr= m.x32 - 76.45219958*m.x33 + 43.14087708*m.x34 + 50.37356589*m.x35 == 0)
m.c12 = Constraint(expr= m.x36 + 50.37356589*m.x37 - 76.45219958*m.x38 + 43.14087708*m.x39 == 0)
m.c13 = Constraint(expr= m.x40 + 43.14087708*m.x41 + 50.37356589*m.x42 - 76.45219958*m.x43 == 0)
m.c14 = Constraint(expr= - 76.45219958*m.x33 + m.x44 + 43.14087708*m.x45 + 50.37356589*m.x46 == 0)
m.c15 = Constraint(expr= - 76.45219958*m.x38 + m.x47 + 50.37356589*m.x48 + 43.14087708*m.x49 == 0)
m.c16 = Constraint(expr= - 76.45219958*m.x43 + m.x50 + 43.14087708*m.x51 + 50.37356589*m.x52 == 0)
m.c17 = Constraint(expr= m.x53 + 58.31011875*m.x54 - 69.39622571*m.x55 - 25.39911174*m.x56 == 0)
m.c18 = Constraint(expr= m.x57 - 25.39911174*m.x58 + 58.31011875*m.x59 - 69.39622571*m.x60 == 0)
m.c19 = Constraint(expr= m.x61 - 69.39622571*m.x62 + 58.31011875*m.x63 - 25.39911174*m.x64 == 0)
m.c20 = Constraint(expr= - 69.39622571*m.x55 + m.x65 + 58.31011875*m.x66 - 25.39911174*m.x67 == 0)
m.c21 = Constraint(expr= - 69.39622571*m.x60 + m.x68 - 25.39911174*m.x69 + 58.31011875*m.x70 == 0)
m.c22 = Constraint(expr= - 69.39622571*m.x62 + m.x71 + 58.31011875*m.x72 - 25.39911174*m.x73 == 0)
m.c23 = Constraint(expr= m.x74 - 2.03724124*m.x75 + 63.61644904*m.x76 - 34.92732674*m.x77 == 0)
m.c24 = Constraint(expr= m.x78 - 2.03724124*m.x79 - 34.92732674*m.x80 + 63.61644904*m.x81 == 0)
m.c25 = Constraint(expr= m.x82 - 2.03724124*m.x83 - 34.92732674*m.x84 + 63.61644904*m.x85 == 0)
m.c26 = Constraint(expr= - 34.92732674*m.x77 + m.x86 + 63.61644904*m.x87 - 2.03724124*m.x88 == 0)
m.c27 = Constraint(expr= - 34.92732674*m.x80 + m.x89 + 63.61644904*m.x90 - 2.03724124*m.x91 == 0)
m.c28 = Constraint(expr= - 34.92732674*m.x84 + m.x92 - 2.03724124*m.x93 + 63.61644904*m.x94 == 0)
m.c29 = Constraint(expr= m.x95 + m.x96 + m.x97 >= 0.875)
m.c30 = Constraint(expr= - m.x98 + m.x99 == 0)
m.c31 = Constraint(expr= - m.x100 + m.x101 == 0)
m.c32 = Constraint(expr= - m.x102 + m.x103 == 0)
m.c33 = Constraint(expr= - m.x104 + m.x105 == 0)
m.c34 = Constraint(expr= - m.x106 + m.x107 == 0)
m.c35 = Constraint(expr= - m.x108 + m.x109 == 0)
m.c36 = Constraint(expr= m.x104 - m.x110 == 0)
m.c37 = Constraint(expr= m.x106 - m.x111 == 0)
m.c38 = Constraint(expr= m.x108 - m.x112 == 0)
m.c39 = Constraint(expr= - m.x113 + m.x114 == 0)
m.c40 = Constraint(expr= - m.x115 + m.x116 == 0)
m.c41 = Constraint(expr= - m.x117 + m.x118 == 0)
m.c42 = Constraint(expr= m.x119 == 0.296666667)
m.c43 = Constraint(expr= m.x120 == 0.294444444)
m.c44 = Constraint(expr= m.x121 == 0.283888889)
m.c45 = Constraint(expr= m.x95 - m.x99 == 0)
m.c46 = Constraint(expr= m.x96 - m.x101 == 0)
m.c47 = Constraint(expr= m.x97 - m.x103 == 0)
m.c48 = Constraint(expr= 3600*m.x98 - 3600*m.x105 + 1800*m.x122 - 1800*m.x123 == 0)
m.c49 = Constraint(expr= 3600*m.x100 - 3600*m.x107 + 1800*m.x124 - 1800*m.x125 == 0)
m.c50 = Constraint(expr= 3600*m.x102 - 3600*m.x109 + 1800*m.x126 - 1800*m.x127 == 0)
m.c51 = Constraint(expr= 3600*m.x110 - 3600*m.x114 + 720*m.x128 - 720*m.x129 == 0)
m.c52 = Constraint(expr= 3600*m.x111 - 3600*m.x116 + 720*m.x130 - 720*m.x131 == 0)
m.c53 = Constraint(expr= 3600*m.x112 - 3600*m.x118 + 720*m.x132 - 720*m.x133 == 0)
m.c54 = Constraint(expr= 3600*m.x113 - 3600*m.x119 + 1600*m.x134 - 1600*m.x135 == 0)
m.c55 = Constraint(expr= 3600*m.x115 - 3600*m.x120 + 1600*m.x136 - 1600*m.x137 == 0)
m.c56 = Constraint(expr= 3600*m.x117 - 3600*m.x121 + 1600*m.x138 - 1600*m.x139 == 0)
m.c57 = Constraint(expr= - m.x123 + m.x124 == 0)
m.c58 = Constraint(expr= - m.x125 + m.x126 == 0)
m.c59 = Constraint(expr= - m.x129 + m.x130 == 0)
m.c60 = Constraint(expr= - m.x131 + m.x132 == 0)
m.c61 = Constraint(expr= - m.x135 + m.x136 == 0)
m.c62 = Constraint(expr= - m.x137 + m.x138 == 0)
m.c63 = Constraint(expr= - 0.2*m.b2 + m.x140 >= 0)
m.c64 = Constraint(expr= - 0.2*m.b3 + m.x142 >= 0)
m.c65 = Constraint(expr= - 0.2*m.b4 + m.x144 >= 0)
m.c66 = Constraint(expr= - 0.2*m.b5 + m.x146 >= 0)
m.c67 = Constraint(expr= - 0.2*m.b6 + m.x148 >= 0)
m.c68 = Constraint(expr= - 0.2*m.b7 + m.x150 >= 0)
m.c69 = Constraint(expr= - 0.2*m.b8 + m.x152 >= 0)
m.c70 = Constraint(expr= - 0.2*m.b9 + m.x154 >= 0)
m.c71 = Constraint(expr= - 0.2*m.b10 + m.x156 >= 0)
m.c72 = Constraint(expr= - 0.25*m.b11 + m.x158 >= 0)
m.c73 = Constraint(expr= - 0.25*m.b12 + m.x160 >= 0)
m.c74 = Constraint(expr= - 0.25*m.b13 + m.x162 >= 0)
m.c75 = Constraint(expr= - 0.25*m.b14 + m.x164 >= 0)
m.c76 = Constraint(expr= - 0.25*m.b15 + m.x166 >= 0)
m.c77 = Constraint(expr= - 0.25*m.b16 + m.x168 >= 0)
m.c78 = Constraint(expr= - 0.4*m.b17 + m.x170 >= 0)
m.c79 = Constraint(expr= - 0.4*m.b18 + m.x172 >= 0)
m.c80 = Constraint(expr= - 0.4*m.b19 + m.x174 >= 0)
m.c81 = Constraint(expr= - 0.4*m.b20 + m.x176 >= 0)
m.c82 = Constraint(expr= - 0.4*m.b21 + m.x178 >= 0)
m.c83 = Constraint(expr= - 0.4*m.b22 + m.x180 >= 0)
m.c84 = Constraint(expr= - 0.24*m.b23 + m.x182 >= 0)
m.c85 = Constraint(expr= - 0.24*m.b24 + m.x184 >= 0)
m.c86 = Constraint(expr= - 0.24*m.b25 + m.x186 >= 0)
m.c87 = Constraint(expr= - 0.24*m.b26 + m.x188 >= 0)
m.c88 = Constraint(expr= - 0.24*m.b27 + m.x190 >= 0)
m.c89 = Constraint(expr= - 0.24*m.b28 + m.x192 >= 0)
m.c90 = Constraint(expr= - 0.8*m.b2 + m.x140 <= 0)
m.c91 = Constraint(expr= - 0.8*m.b3 + m.x142 <= 0)
m.c92 = Constraint(expr= - 0.8*m.b4 + m.x144 <= 0)
m.c93 = Constraint(expr= - 0.8*m.b5 + m.x146 <= 0)
m.c94 = Constraint(expr= - 0.8*m.b6 + m.x148 <= 0)
m.c95 = Constraint(expr= - 0.8*m.b7 + m.x150 <= 0)
m.c96 = Constraint(expr= - 0.8*m.b8 + m.x152 <= 0)
m.c97 = Constraint(expr= - 0.8*m.b9 + m.x154 <= 0)
m.c98 = Constraint(expr= - 0.8*m.b10 + m.x156 <= 0)
m.c99 = Constraint(expr= - 0.5*m.b11 + m.x158 <= 0)
m.c100 = Constraint(expr= - 0.5*m.b12 + m.x160 <= 0)
m.c101 = Constraint(expr= - 0.5*m.b13 + m.x162 <= 0)
m.c102 = Constraint(expr= - 0.5*m.b14 + m.x164 <= 0)
m.c103 = Constraint(expr= - 0.5*m.b15 + m.x166 <= 0)
m.c104 = Constraint(expr= - 0.5*m.b16 + m.x168 <= 0)
m.c105 = Constraint(expr= - 0.7*m.b17 + m.x170 <= 0)
m.c106 = Constraint(expr= - 0.7*m.b18 + m.x172 <= 0)
m.c107 = Constraint(expr= - 0.7*m.b19 + m.x174 <= 0)
m.c108 = Constraint(expr= - 0.7*m.b20 + m.x176 <= 0)
m.c109 = Constraint(expr= - 0.7*m.b21 + m.x178 <= 0)
m.c110 = Constraint(expr= - 0.7*m.b22 + m.x180 <= 0)
m.c111 = Constraint(expr= - 0.58*m.b23 + m.x182 <= 0)
m.c112 = Constraint(expr= - 0.58*m.b24 + m.x184 <= 0)
m.c113 = Constraint(expr= - 0.58*m.b25 + m.x186 <= 0)
m.c114 = Constraint(expr= - 0.58*m.b26 + m.x188 <= 0)
m.c115 = Constraint(expr= - 0.58*m.b27 + m.x190 <= 0)
m.c116 = Constraint(expr= - 0.58*m.b28 + m.x192 <= 0)
m.c117 = Constraint(expr= - m.x122 + m.x194 == 60)
m.c118 = Constraint(expr= - m.x124 + m.x195 == 60)
m.c119 = Constraint(expr= - m.x126 + m.x196 == 60)
m.c120 = Constraint(expr= - m.x128 + m.x197 == 90)
m.c121 = Constraint(expr= - m.x130 + m.x198 == 90)
m.c122 = Constraint(expr= - m.x132 + m.x199 == 90)
m.c123 = Constraint(expr= - m.x134 + m.x200 == 103)
m.c124 = Constraint(expr= - m.x136 + m.x201 == 103)
m.c125 = Constraint(expr= - m.x138 + m.x202 == 103)
m.c126 = Constraint(expr= - m.x194 + m.x203 - m.x204 == 0)
m.c127 = Constraint(expr= - m.x195 + m.x205 - m.x206 == 0)
m.c128 = Constraint(expr= - m.x196 + m.x207 - m.x208 == 0)
m.c129 = Constraint(expr= m.x209 - m.x210 - m.x211 == 0)
m.c130 = Constraint(expr= m.x212 - m.x213 - m.x214 == 0)
m.c131 = Constraint(expr= m.x215 - m.x216 - m.x217 == 0)
m.c132 = Constraint(expr= - m.x200 + m.x218 - m.x219 == 0)
m.c133 = Constraint(expr= - m.x201 + m.x220 - m.x221 == 0)
m.c134 = Constraint(expr= - m.x202 + m.x222 - m.x223 == 0)
m.c135 = Constraint(expr= m.x203 - m.x224 - m.x225 == 0)
m.c136 = Constraint(expr= m.x205 - m.x226 - m.x227 == 0)
m.c137 = Constraint(expr= m.x207 - m.x228 - m.x229 == 0)
m.c138 = Constraint(expr= - m.x194 + m.x209 - m.x230 == 0)
m.c139 = Constraint(expr= - m.x195 + m.x212 - m.x231 == 0)
m.c140 = Constraint(expr= - m.x196 + m.x215 - m.x232 == 0)
m.c141 = Constraint(expr= - m.x197 + m.x218 - m.x233 == 0)
m.c142 = Constraint(expr= - m.x198 + m.x220 - m.x234 == 0)
m.c143 = Constraint(expr= - m.x199 + m.x222 - m.x235 == 0)
m.c144 = Constraint(expr= 0.2*m.b2 - m.x140 + m.x236 <= 0.2)
m.c145 = Constraint(expr= 0.2*m.b3 - m.x142 + m.x237 <= 0.2)
m.c146 = Constraint(expr= 0.2*m.b4 - m.x144 + m.x238 <= 0.2)
m.c147 = Constraint(expr= 0.2*m.b5 - m.x146 + m.x239 <= 0.2)
m.c148 = Constraint(expr= 0.2*m.b6 - m.x148 + m.x240 <= 0.2)
m.c149 = Constraint(expr= 0.2*m.b7 - m.x150 + m.x241 <= 0.2)
m.c150 = Constraint(expr= 0.2*m.b8 - m.x152 + m.x242 <= 0.2)
m.c151 = Constraint(expr= 0.2*m.b9 - m.x154 + m.x243 <= 0.2)
m.c152 = Constraint(expr= 0.2*m.b10 - m.x156 + m.x244 <= 0.2)
m.c153 = Constraint(expr= 0.25*m.b11 - m.x158 + m.x245 <= 0.25)
m.c154 = Constraint(expr= 0.25*m.b12 - m.x160 + m.x246 <= 0.25)
m.c155 = Constraint(expr= 0.25*m.b13 - m.x162 + m.x247 <= 0.25)
m.c156 = Constraint(expr= 0.25*m.b14 - m.x164 + m.x248 <= 0.25)
m.c157 = Constraint(expr= 0.25*m.b15 - m.x166 + m.x249 <= 0.25)
m.c158 = Constraint(expr= 0.25*m.b16 - m.x168 + m.x250 <= 0.25)
m.c159 = Constraint(expr= 0.4*m.b17 - m.x170 + m.x251 <= 0.4)
m.c160 = Constraint(expr= 0.4*m.b18 - m.x172 + m.x252 <= 0.4)
m.c161 = Constraint(expr= 0.4*m.b19 - m.x174 + m.x253 <= 0.4)
m.c162 = Constraint(expr= 0.4*m.b20 - m.x176 + m.x254 <= 0.4)
m.c163 = Constraint(expr= 0.4*m.b21 - m.x178 + m.x255 <= 0.4)
m.c164 = Constraint(expr= 0.4*m.b22 - m.x180 + m.x256 <= 0.4)
m.c165 = Constraint(expr= 0.24*m.b23 - m.x182 + m.x257 <= 0.24)
m.c166 = Constraint(expr= 0.24*m.b24 - m.x184 + m.x258 <= 0.24)
m.c167 = Constraint(expr= 0.24*m.b25 - m.x186 + m.x259 <= 0.24)
m.c168 = Constraint(expr= 0.24*m.b26 - m.x188 + m.x260 <= 0.24)
m.c169 = Constraint(expr= 0.24*m.b27 - m.x190 + m.x261 <= 0.24)
m.c170 = Constraint(expr= 0.24*m.b28 - m.x192 + m.x262 <= 0.24)
m.c171 = Constraint(expr= - m.x140 + m.x236 >= 0)
m.c172 = Constraint(expr= - m.x142 + m.x237 >= 0)
m.c173 = Constraint(expr= - m.x144 + m.x238 >= 0)
m.c174 = Constraint(expr= - m.x146 + m.x239 >= 0)
m.c175 = Constraint(expr= - m.x148 + m.x240 >= 0)
m.c176 = Constraint(expr= - m.x150 + m.x241 >= 0)
m.c177 = Constraint(expr= - m.x152 + m.x242 >= 0)
m.c178 = Constraint(expr= - m.x154 + m.x243 >= 0)
m.c179 = Constraint(expr= - m.x156 + m.x244 >= 0)
m.c180 = Constraint(expr= - m.x158 + m.x245 >= 0)
m.c181 = Constraint(expr= - m.x160 + m.x246 >= 0)
m.c182 = Constraint(expr= - m.x162 + m.x247 >= 0)
m.c183 = Constraint(expr= - m.x164 + m.x248 >= 0)
m.c184 = Constraint(expr= - m.x166 + m.x249 >= 0)
m.c185 = Constraint(expr= - m.x168 + m.x250 >= 0)
m.c186 = Constraint(expr= - m.x170 + m.x251 >= 0)
m.c187 = Constraint(expr= - m.x172 + m.x252 >= 0)
m.c188 = Constraint(expr= - m.x174 + m.x253 >= 0)
m.c189 = Constraint(expr= - m.x176 + m.x254 >= 0)
m.c190 = Constraint(expr= - m.x178 + m.x255 >= 0)
m.c191 = Constraint(expr= - m.x180 + m.x256 >= 0)
m.c192 = Constraint(expr= - m.x182 + m.x257 >= 0)
m.c193 = Constraint(expr= - m.x184 + m.x258 >= 0)
m.c194 = Constraint(expr= - m.x186 + m.x259 >= 0)
m.c195 = Constraint(expr= - m.x188 + m.x260 >= 0)
m.c196 = Constraint(expr= - m.x190 + m.x261 >= 0)
m.c197 = Constraint(expr= - m.x192 + m.x262 >= 0)
m.c198 = Constraint(expr= - 0.6*m.b2 + m.x236 <= 0.2)
m.c199 = Constraint(expr= - 0.6*m.b3 + m.x237 <= 0.2)
m.c200 = Constraint(expr= - 0.6*m.b4 + m.x238 <= 0.2)
m.c201 = Constraint(expr= - 0.6*m.b5 + m.x239 <= 0.2)
m.c202 = Constraint(expr= - 0.6*m.b6 + m.x240 <= 0.2)
m.c203 = Constraint(expr= - 0.6*m.b7 + m.x241 <= 0.2)
m.c204 = Constraint(expr= - 0.6*m.b8 + m.x242 <= 0.2)
m.c205 = Constraint(expr= - 0.6*m.b9 + m.x243 <= 0.2)
m.c206 = Constraint(expr= - 0.6*m.b10 + m.x244 <= 0.2)
m.c207 = Constraint(expr= - 0.25*m.b11 + m.x245 <= 0.25)
m.c208 = Constraint(expr= - 0.25*m.b12 + m.x246 <= 0.25)
m.c209 = Constraint(expr= - 0.25*m.b13 + m.x247 <= 0.25)
m.c210 = Constraint(expr= - 0.25*m.b14 + m.x248 <= 0.25)
m.c211 = Constraint(expr= - 0.25*m.b15 + m.x249 <= 0.25)
m.c212 = Constraint(expr= - 0.25*m.b16 + m.x250 <= 0.25)
m.c213 = Constraint(expr= - 0.3*m.b17 + m.x251 <= 0.4)
m.c214 = Constraint(expr= - 0.3*m.b18 + m.x252 <= 0.4)
m.c215 = Constraint(expr= - 0.3*m.b19 + m.x253 <= 0.4)
m.c216 = Constraint(expr= - 0.3*m.b20 + m.x254 <= 0.4)
m.c217 = Constraint(expr= - 0.3*m.b21 + m.x255 <= 0.4)
m.c218 = Constraint(expr= - 0.3*m.b22 + m.x256 <= 0.4)
m.c219 = Constraint(expr= - 0.34*m.b23 + m.x257 <= 0.24)
m.c220 = Constraint(expr= - 0.34*m.b24 + m.x258 <= 0.24)
m.c221 = Constraint(expr= - 0.34*m.b25 + m.x259 <= 0.24)
m.c222 = Constraint(expr= - 0.34*m.b26 + m.x260 <= 0.24)
m.c223 = Constraint(expr= - 0.34*m.b27 + m.x261 <= 0.24)
m.c224 = Constraint(expr= - 0.34*m.b28 + m.x262 <= 0.24)
m.c225 = Constraint(expr= - 0.4*m.b2 + m.x263 <= 0.6)
m.c226 = Constraint(expr= - 0.4*m.b3 + m.x264 <= 0.6)
m.c227 = Constraint(expr= - 0.4*m.b4 + m.x265 <= 0.6)
m.c228 = Constraint(expr= - 0.2*m.b11 + m.x266 <= 0.8)
m.c229 = Constraint(expr= - 0.2*m.b12 + m.x267 <= 0.8)
m.c230 = Constraint(expr= - 0.2*m.b13 + m.x268 <= 0.8)
m.c231 = Constraint(expr= - 0.15*m.b17 + m.x269 <= 0.85)
m.c232 = Constraint(expr= - 0.15*m.b18 + m.x270 <= 0.85)
m.c233 = Constraint(expr= - 0.15*m.b19 + m.x271 <= 0.85)
m.c234 = Constraint(expr= - 0.3*m.b23 + m.x272 <= 0.7)
m.c235 = Constraint(expr= - 0.3*m.b24 + m.x273 <= 0.7)
m.c236 = Constraint(expr= - 0.3*m.b25 + m.x274 <= 0.7)
m.c237 = Constraint(expr= m.b2 - m.b5 >= 0)
m.c238 = Constraint(expr= m.b3 - m.b6 >= 0)
m.c239 = Constraint(expr= m.b4 - m.b7 >= 0)
m.c240 = Constraint(expr= m.b5 - m.b8 >= 0)
m.c241 = Constraint(expr= m.b6 - m.b9 >= 0)
m.c242 = Constraint(expr= m.b7 - m.b10 >= 0)
m.c243 = Constraint(expr= m.b11 - m.b14 >= 0)
m.c244 = Constraint(expr= m.b12 - m.b15 >= 0)
m.c245 = Constraint(expr= m.b13 - m.b16 >= 0)
m.c246 = Constraint(expr= m.b17 - m.b20 >= 0)
m.c247 = Constraint(expr= m.b18 - m.b21 >= 0)
m.c248 = Constraint(expr= m.b19 - m.b22 >= 0)
m.c249 = Constraint(expr= m.b23 - m.b26 >= 0)
m.c250 = Constraint(expr= m.b24 - m.b27 >= 0)
m.c251 = Constraint(expr= m.b25 - m.b28 >= 0)
m.c252 = Constraint(expr= m.x99 - m.x140 - m.x146 - m.x152 == 0)
m.c253 = Constraint(expr= m.x101 - m.x142 - m.x148 - m.x154 == 0)
m.c254 = Constraint(expr= m.x103 - m.x144 - m.x150 - m.x156 == 0)
m.c255 = Constraint(expr= m.x105 - m.x158 - m.x164 - m.x170 - m.x176 == 0)
m.c256 = Constraint(expr= m.x107 - m.x160 - m.x166 - m.x172 - m.x178 == 0)
m.c257 = Constraint(expr= m.x109 - m.x162 - m.x168 - m.x174 - m.x180 == 0)
m.c258 = Constraint(expr= m.x114 - m.x182 - m.x188 == 0)
m.c259 = Constraint(expr= m.x116 - m.x184 - m.x190 == 0)
m.c260 = Constraint(expr= m.x118 - m.x186 - m.x192 == 0)
m.c261 = Constraint(expr= - 2000*m.b2 + m.x141 - m.x225 >= -2000)
m.c262 = Constraint(expr= - 2000*m.b3 + m.x149 - m.x227 >= -2000)
m.c263 = Constraint(expr= - 2000*m.b4 + m.x157 - m.x229 >= -2000)
m.c264 = Constraint(expr= - 2000*m.b5 + m.x165 - m.x225 >= -2000)
m.c265 = Constraint(expr= - 2000*m.b6 + m.x171 - m.x227 >= -2000)
m.c266 = Constraint(expr= - 2000*m.b7 + m.x177 - m.x229 >= -2000)
m.c267 = Constraint(expr= - 2000*m.b8 + m.x183 - m.x225 >= -2000)
m.c268 = Constraint(expr= - 2000*m.b9 + m.x189 - m.x227 >= -2000)
m.c269 = Constraint(expr= - 2000*m.b10 + m.x29 - m.x229 >= -2000)
m.c270 = Constraint(expr= - 2000*m.b11 + m.x32 - m.x230 >= -2000)
m.c271 = Constraint(expr= - 2000*m.b12 + m.x36 - m.x231 >= -2000)
m.c272 = Constraint(expr= - 2000*m.b13 + m.x40 - m.x232 >= -2000)
m.c273 = Constraint(expr= - 2000*m.b14 + m.x44 - m.x230 >= -2000)
m.c274 = Constraint(expr= - 2000*m.b15 + m.x47 - m.x231 >= -2000)
m.c275 = Constraint(expr= - 2000*m.b16 + m.x50 - m.x232 >= -2000)
m.c276 = Constraint(expr= - 2000*m.b17 + m.x53 - m.x230 >= -2000)
m.c277 = Constraint(expr= - 2000*m.b18 + m.x57 - m.x231 >= -2000)
m.c278 = Constraint(expr= - 2000*m.b19 + m.x61 - m.x232 >= -2000)
m.c279 = Constraint(expr= - 2000*m.b20 + m.x65 - m.x230 >= -2000)
m.c280 = Constraint(expr= - 2000*m.b21 + m.x68 - m.x231 >= -2000)
m.c281 = Constraint(expr= - 2000*m.b22 + m.x71 - m.x232 >= -2000)
m.c282 = Constraint(expr= - 2000*m.b23 + m.x74 - m.x233 >= -2000)
m.c283 = Constraint(expr= - 2000*m.b24 + m.x78 - m.x234 >= -2000)
m.c284 = Constraint(expr= - 2000*m.b25 + m.x82 - m.x235 >= -2000)
m.c285 = Constraint(expr= - 2000*m.b26 + m.x86 - m.x233 >= -2000)
m.c286 = Constraint(expr= - 2000*m.b27 + m.x89 - m.x234 >= -2000)
m.c287 = Constraint(expr= - 2000*m.b28 + m.x92 - m.x235 >= -2000)
m.c288 = Constraint(expr= 1049*m.b2 + m.x141 - m.x225 <= 1049)
m.c289 = Constraint(expr= 1049*m.b3 + m.x149 - m.x227 <= 1049)
m.c290 = Constraint(expr= 1049*m.b4 + m.x157 - m.x229 <= 1049)
m.c291 = Constraint(expr= 1049*m.b5 + m.x165 - m.x225 <= 1049)
m.c292 = Constraint(expr= 1049*m.b6 + m.x171 - m.x227 <= 1049)
m.c293 = Constraint(expr= 1049*m.b7 + m.x177 - m.x229 <= 1049)
m.c294 = Constraint(expr= 1049*m.b8 + m.x183 - m.x225 <= 1049)
m.c295 = Constraint(expr= 1049*m.b9 + m.x189 - m.x227 <= 1049)
m.c296 = Constraint(expr= 1049*m.b10 + m.x29 - m.x229 <= 1049)
m.c297 = Constraint(expr= 1065*m.b11 + m.x32 - m.x230 <= 1065)
m.c298 = Constraint(expr= 1065*m.b12 + m.x36 - m.x231 <= 1065)
m.c299 = Constraint(expr= 1065*m.b13 + m.x40 - m.x232 <= 1065)
m.c300 = Constraint(expr= 1065*m.b14 + m.x44 - m.x230 <= 1065)
m.c301 = Constraint(expr= 1065*m.b15 + m.x47 - m.x231 <= 1065)
m.c302 = Constraint(expr= 1065*m.b16 + m.x50 - m.x232 <= 1065)
m.c303 = Constraint(expr= 1065*m.b17 + m.x53 - m.x230 <= 1065)
m.c304 = Constraint(expr= 1065*m.b18 + m.x57 - m.x231 <= 1065)
m.c305 = Constraint(expr= 1065*m.b19 + m.x61 - m.x232 <= 1065)
m.c306 = Constraint(expr= 1065*m.b20 + m.x65 - m.x230 <= 1065)
m.c307 = Constraint(expr= 1065*m.b21 + m.x68 - m.x231 <= 1065)
m.c308 = Constraint(expr= 1065*m.b22 + m.x71 - m.x232 <= 1065)
m.c309 = Constraint(expr= 1095*m.b23 + m.x74 - m.x233 <= 1095)
m.c310 = Constraint(expr= 1095*m.b24 + m.x78 - m.x234 <= 1095)
m.c311 = Constraint(expr= 1095*m.b25 + m.x82 - m.x235 <= 1095)
m.c312 = Constraint(expr= 1095*m.b26 + m.x86 - m.x233 <= 1095)
m.c313 = Constraint(expr= 1095*m.b27 + m.x89 - m.x234 <= 1095)
m.c314 = Constraint(expr= 1095*m.b28 + m.x92 - m.x235 <= 1095)
m.c315 = Constraint(expr= - m.x197 + m.x210 >= 0)
m.c316 = Constraint(expr= - m.x198 + m.x213 >= 0)
m.c317 = Constraint(expr= - m.x199 + m.x216 >= 0)
m.c318 = Constraint(expr= m.x200 - m.x275 >= 0)
m.c319 = Constraint(expr= m.x201 - m.x276 >= 0)
m.c320 = Constraint(expr= m.x202 - m.x277 >= 0)
m.c321 = Constraint(expr= - 0.309838295393634*m.x278 + 13.94696158*m.x279 + 24.46510819*m.x280 - 7.28623839*m.x281
- 23.57687014*m.x282 <= 0)
m.c322 = Constraint(expr= - 0.309838295393634*m.x283 + 13.94696158*m.x284 + 24.46510819*m.x285 - 7.28623839*m.x286
- 23.57687014*m.x287 <= 0)
m.c323 = Constraint(expr= - 0.309838295393634*m.x288 + 13.94696158*m.x289 + 24.46510819*m.x290 - 7.28623839*m.x291
- 23.57687014*m.x292 <= 0)
m.c324 = Constraint(expr= - 0.309838295393634*m.x293 + 13.94696158*m.x294 + 24.46510819*m.x295 - 7.28623839*m.x296
- 23.57687014*m.x297 <= 0)
m.c325 = Constraint(expr= - 0.309838295393634*m.x298 + 13.94696158*m.x299 + 24.46510819*m.x300 - 7.28623839*m.x301
- 23.57687014*m.x302 <= 0)
m.c326 = Constraint(expr= - 0.309838295393634*m.x303 + 13.94696158*m.x304 + 24.46510819*m.x305 - 7.28623839*m.x306
- 23.57687014*m.x307 <= 0)
m.c327 = Constraint(expr= - 0.309838295393634*m.x308 + 13.94696158*m.x309 + 24.46510819*m.x310 - 7.28623839*m.x311
- 23.57687014*m.x312 <= 0)
m.c328 = Constraint(expr= - 0.309838295393634*m.x313 + 13.94696158*m.x314 + 24.46510819*m.x315 - 7.28623839*m.x316
- 23.57687014*m.x317 <= 0)
m.c329 = Constraint(expr= - 0.309838295393634*m.x318 + 13.94696158*m.x319 + 24.46510819*m.x320 - 7.28623839*m.x321
- 23.57687014*m.x322 <= 0)
m.c330 = Constraint(expr= - 0.309838295393634*m.x323 + 29.29404529*m.x324 - 108.39408287*m.x325 + 442.21990639*m.x326
- 454.58448169*m.x327 <= 0)
m.c331 = Constraint(expr= - 0.309838295393634*m.x328 + 29.29404529*m.x329 - 108.39408287*m.x330 + 442.21990639*m.x331
- 454.58448169*m.x332 <= 0)
m.c332 = Constraint(expr= - 0.309838295393634*m.x333 + 29.29404529*m.x334 - 108.39408287*m.x335 + 442.21990639*m.x336
- 454.58448169*m.x337 <= 0)
m.c333 = Constraint(expr= - 0.309838295393634*m.x338 + 29.29404529*m.x339 - 108.39408287*m.x340 + 442.21990639*m.x341
- 454.58448169*m.x342 <= 0)
m.c334 = Constraint(expr= 442.21990639*m.x343 - 454.58448169*m.x344 - 0.309838295393634*m.x345 + 29.29404529*m.x346
- 108.39408287*m.x347 <= 0)
m.c335 = Constraint(expr= - 0.309838295393634*m.x348 + 29.29404529*m.x349 - 108.39408287*m.x350 + 442.21990639*m.x351
- 454.58448169*m.x352 <= 0)
m.c336 = Constraint(expr= - 0.309838295393634*m.x353 + 25.92674585*m.x354 + 18.13482123*m.x355 + 22.12766012*m.x356
- 42.68950769*m.x357 <= 0)
m.c337 = Constraint(expr= - 0.309838295393634*m.x358 + 25.92674585*m.x359 + 18.13482123*m.x360 + 22.12766012*m.x361
- 42.68950769*m.x362 <= 0)
m.c338 = Constraint(expr= - 0.309838295393634*m.x363 + 25.92674585*m.x364 + 18.13482123*m.x365 + 22.12766012*m.x366
- 42.68950769*m.x367 <= 0)
m.c339 = Constraint(expr= - 0.309838295393634*m.x368 + 25.92674585*m.x369 + 18.13482123*m.x370 + 22.12766012*m.x371
- 42.68950769*m.x372 <= 0)
m.c340 = Constraint(expr= - 0.309838295393634*m.x373 + 25.92674585*m.x374 + 18.13482123*m.x375 + 22.12766012*m.x376
- 42.68950769*m.x377 <= 0)
m.c341 = Constraint(expr= - 0.309838295393634*m.x378 + 25.92674585*m.x379 + 18.13482123*m.x380 + 22.12766012*m.x381
- 42.68950769*m.x382 <= 0)
m.c342 = Constraint(expr= - 0.309838295393634*m.x383 + 17.4714791*m.x384 - 39.98407808*m.x385 + 134.55943082*m.x386
- 135.88441782*m.x387 <= 0)
m.c343 = Constraint(expr= - 0.309838295393634*m.x388 + 17.4714791*m.x389 - 39.98407808*m.x390 + 134.55943082*m.x391
- 135.88441782*m.x392 <= 0)
m.c344 = Constraint(expr= - 0.309838295393634*m.x393 + 17.4714791*m.x394 - 39.98407808*m.x395 + 134.55943082*m.x396
- 135.88441782*m.x397 <= 0)
m.c345 = Constraint(expr= - 0.309838295393634*m.x398 + 17.4714791*m.x399 - 39.98407808*m.x400 + 134.55943082*m.x401
- 135.88441782*m.x402 <= 0)
m.c346 = Constraint(expr= - 0.309838295393634*m.x403 + 17.4714791*m.x404 - 39.98407808*m.x405 + 134.55943082*m.x406
- 135.88441782*m.x407 <= 0)
m.c347 = Constraint(expr= - 0.309838295393634*m.x408 + 17.4714791*m.x409 - 39.98407808*m.x410 + 134.55943082*m.x411
- 135.88441782*m.x412 <= 0)
m.c348 = Constraint(expr=m.x98**2 - m.x413 == 0)
m.c349 = Constraint(expr= m.x204 - 5*m.x413 == 0)
m.c350 = Constraint(expr=m.x100**2 - m.x414 == 0)
m.c351 = Constraint(expr= m.x206 - 5*m.x414 == 0)
m.c352 = Constraint(expr=m.x102**2 - m.x415 == 0)
m.c353 = Constraint(expr= m.x208 - 5*m.x415 == 0)
m.c354 = Constraint(expr=m.x104**2 - m.x416 == 0)
m.c355 = Constraint(expr= m.x211 - 4*m.x416 == 0)
m.c356 = Constraint(expr=m.x106**2 - m.x417 == 0)
m.c357 = Constraint(expr= m.x214 - 4*m.x417 == 0)
m.c358 = Constraint(expr=m.x108**2 - m.x418 == 0)
m.c359 = Constraint(expr= m.x217 - 4*m.x418 == 0)
m.c360 = Constraint(expr=m.x113**2 - m.x419 == 0)
m.c361 = Constraint(expr= m.x219 - 5*m.x419 == 0)
m.c362 = Constraint(expr=m.x115**2 - m.x420 == 0)
m.c363 = Constraint(expr= m.x221 - 5*m.x420 == 0)
m.c364 = Constraint(expr=m.x117**2 - m.x421 == 0)
m.c365 = Constraint(expr= m.x223 - 5*m.x421 == 0)
m.c366 = Constraint(expr=m.x140**2 - m.x422 == 0)
m.c367 = Constraint(expr= m.x143 - m.x422 == 0)
m.c368 = Constraint(expr=m.x140**3 - m.x423 == 0)
m.c369 = Constraint(expr= m.x282 - m.x423 == 0)
m.c370 = Constraint(expr=m.x142**2 - m.x424 == 0)
m.c371 = Constraint(expr= m.x151 - m.x424 == 0)
m.c372 = Constraint(expr=m.x142**3 - m.x425 == 0)
m.c373 = Constraint(expr= m.x287 - m.x425 == 0)
m.c374 = Constraint(expr=m.x144**2 - m.x426 == 0)
m.c375 = Constraint(expr= m.x159 - m.x426 == 0)
m.c376 = Constraint(expr=m.x144**3 - m.x427 == 0)
m.c377 = Constraint(expr= m.x292 - m.x427 == 0)
m.c378 = Constraint(expr=m.x146**2 - m.x428 == 0)
m.c379 = Constraint(expr= m.x167 - m.x428 == 0)
m.c380 = Constraint(expr=m.x146**3 - m.x429 == 0)
m.c381 = Constraint(expr= m.x297 - m.x429 == 0)
m.c382 = Constraint(expr=m.x148**2 - m.x430 == 0)
m.c383 = Constraint(expr= m.x175 - m.x430 == 0)
m.c384 = Constraint(expr=m.x148**3 - m.x431 == 0)
m.c385 = Constraint(expr= m.x302 - m.x431 == 0)
m.c386 = Constraint(expr=m.x150**2 - m.x432 == 0)
m.c387 = Constraint(expr= m.x181 - m.x432 == 0)
m.c388 = Constraint(expr=m.x150**3 - m.x433 == 0)
m.c389 = Constraint(expr= m.x307 - m.x433 == 0)
m.c390 = Constraint(expr=m.x152**2 - m.x434 == 0)
m.c391 = Constraint(expr= m.x187 - m.x434 == 0)
m.c392 = Constraint(expr=m.x152**3 - m.x435 == 0)
m.c393 = Constraint(expr= m.x312 - m.x435 == 0)
m.c394 = Constraint(expr=m.x154**2 - m.x436 == 0)
m.c395 = Constraint(expr= m.x191 - m.x436 == 0)
m.c396 = Constraint(expr=m.x154**3 - m.x437 == 0)
m.c397 = Constraint(expr= m.x317 - m.x437 == 0)
m.c398 = Constraint(expr=m.x156**2 - m.x438 == 0)
m.c399 = Constraint(expr= m.x30 - m.x438 == 0)
m.c400 = Constraint(expr=m.x156**3 - m.x439 == 0)
m.c401 = Constraint(expr= m.x322 - m.x439 == 0)
m.c402 = Constraint(expr=m.x158**2 - m.x440 == 0)
m.c403 = Constraint(expr= m.x35 - m.x440 == 0)
m.c404 = Constraint(expr=m.x158**3 - m.x441 == 0)
m.c405 = Constraint(expr= m.x327 - m.x441 == 0)
m.c406 = Constraint(expr=m.x160**2 - m.x442 == 0)
m.c407 = Constraint(expr= m.x37 - m.x442 == 0)
m.c408 = Constraint(expr=m.x160**3 - m.x443 == 0)
m.c409 = Constraint(expr= m.x332 - m.x443 == 0)
m.c410 = Constraint(expr=m.x162**2 - m.x444 == 0)
m.c411 = Constraint(expr= m.x42 - m.x444 == 0)
m.c412 = Constraint(expr=m.x162**3 - m.x445 == 0)
m.c413 = Constraint(expr= m.x337 - m.x445 == 0)
m.c414 = Constraint(expr=m.x164**2 - m.x446 == 0)
m.c415 = Constraint(expr= m.x46 - m.x446 == 0)
m.c416 = Constraint(expr=m.x164**3 - m.x447 == 0)
m.c417 = Constraint(expr= m.x342 - m.x447 == 0)
m.c418 = Constraint(expr=m.x166**2 - m.x448 == 0)
m.c419 = Constraint(expr= m.x48 - m.x448 == 0)
m.c420 = Constraint(expr=m.x166**3 - m.x449 == 0)
m.c421 = Constraint(expr= m.x344 - m.x449 == 0)
m.c422 = Constraint(expr=m.x168**2 - m.x450 == 0)
m.c423 = Constraint(expr= m.x52 - m.x450 == 0)
m.c424 = Constraint(expr=m.x168**3 - m.x451 == 0)
m.c425 = Constraint(expr= m.x352 - m.x451 == 0)
m.c426 = Constraint(expr=m.x170**2 - m.x452 == 0)
m.c427 = Constraint(expr= m.x56 - m.x452 == 0)
m.c428 = Constraint(expr=m.x170**3 - m.x453 == 0)
m.c429 = Constraint(expr= m.x357 - m.x453 == 0)
m.c430 = Constraint(expr=m.x172**2 - m.x454 == 0)
m.c431 = Constraint(expr= m.x58 - m.x454 == 0)
m.c432 = Constraint(expr=m.x172**3 - m.x455 == 0)
m.c433 = Constraint(expr= m.x362 - m.x455 == 0)
m.c434 = Constraint(expr=m.x174**2 - m.x456 == 0)
m.c435 = Constraint(expr= m.x64 - m.x456 == 0)
m.c436 = Constraint(expr=m.x174**3 - m.x457 == 0)
m.c437 = Constraint(expr= m.x367 - m.x457 == 0)
m.c438 = Constraint(expr=m.x176**2 - m.x458 == 0)
m.c439 = Constraint(expr= m.x67 - m.x458 == 0)
m.c440 = Constraint(expr=m.x176**3 - m.x459 == 0)
m.c441 = Constraint(expr= m.x372 - m.x459 == 0)
m.c442 = Constraint(expr=m.x178**2 - m.x460 == 0)
m.c443 = Constraint(expr= m.x69 - m.x460 == 0)
m.c444 = Constraint(expr=m.x178**3 - m.x461 == 0)
m.c445 = Constraint(expr= m.x377 - m.x461 == 0)
m.c446 = Constraint(expr=m.x180**2 - m.x462 == 0)
m.c447 = Constraint(expr= m.x73 - m.x462 == 0)
m.c448 = Constraint(expr=m.x180**3 - m.x463 == 0)
m.c449 = Constraint(expr= m.x382 - m.x463 == 0)
m.c450 = Constraint(expr=m.x182**2 - m.x464 == 0)
m.c451 = Constraint(expr= m.x76 - m.x464 == 0)
m.c452 = Constraint(expr=m.x182**3 - m.x465 == 0)
m.c453 = Constraint(expr= m.x387 - m.x465 == 0)
m.c454 = Constraint(expr=m.x184**2 - m.x466 == 0)
m.c455 = Constraint(expr= m.x81 - m.x466 == 0)
m.c456 = Constraint(expr=m.x184**3 - m.x467 == 0)
m.c457 = Constraint(expr= m.x392 - m.x467 == 0)
m.c458 = Constraint(expr=m.x186**2 - m.x468 == 0)
m.c459 = Constraint(expr= m.x85 - m.x468 == 0)
m.c460 = Constraint(expr=m.x186**3 - m.x469 == 0)
m.c461 = Constraint(expr= m.x397 - m.x469 == 0)
m.c462 = Constraint(expr=m.x188**2 - m.x470 == 0)
m.c463 = Constraint(expr= m.x87 - m.x470 == 0)
m.c464 = Constraint(expr=m.x188**3 - m.x471 == 0)
m.c465 = Constraint(expr= m.x402 - m.x471 == 0)
m.c466 = Constraint(expr=m.x190**2 - m.x472 == 0)
m.c467 = Constraint(expr= m.x90 - m.x472 == 0)
m.c468 = Constraint(expr=m.x190**3 - m.x473 == 0)
m.c469 = Constraint(expr= m.x407 - m.x473 == 0)
m.c470 = Constraint(expr=m.x192**2 - m.x474 == 0)
m.c471 = Constraint(expr= m.x94 - m.x474 == 0)
m.c472 = Constraint(expr=m.x192**3 - m.x475 == 0)
m.c473 = Constraint(expr= m.x412 - m.x475 == 0)
m.c474 = Constraint(expr=m.x140*m.x263 - m.x145 == 0)
m.c475 = Constraint(expr=m.x263*m.x422 - m.x281 == 0)
m.c476 = Constraint(expr=m.x146*m.x263 - m.x169 == 0)
m.c477 = Constraint(expr=m.x263*m.x428 - m.x296 == 0)
m.c478 = Constraint(expr=m.x152*m.x263 - m.x185 == 0)
m.c479 = Constraint(expr=m.x263*m.x434 - m.x311 == 0)
m.c480 = Constraint(expr=m.x263**2 - m.x476 == 0)
m.c481 = Constraint(expr= m.x147 - m.x476 == 0)
m.c482 = Constraint(expr=m.x140*m.x476 - m.x280 == 0)
m.c483 = Constraint(expr=m.x146*m.x476 - m.x295 == 0)
m.c484 = Constraint(expr=m.x152*m.x476 - m.x310 == 0)
m.c485 = Constraint(expr=m.x263**3 - m.x477 == 0)
m.c486 = Constraint(expr=m.b2*m.x477 - m.x279 == 0)
m.c487 = Constraint(expr=m.b5*m.x477 - m.x294 == 0)
m.c488 = Constraint(expr=m.b8*m.x477 - m.x309 == 0)
m.c489 = Constraint(expr=m.x142*m.x264 - m.x155 == 0)
m.c490 = Constraint(expr=m.x264*m.x424 - m.x286 == 0)
m.c491 = Constraint(expr=m.x148*m.x264 - m.x173 == 0)
m.c492 = Constraint(expr=m.x264*m.x430 - m.x301 == 0)
m.c493 = Constraint(expr=m.x154*m.x264 - m.x193 == 0)
m.c494 = Constraint(expr=m.x264*m.x436 - m.x316 == 0)
m.c495 = Constraint(expr=m.x264**2 - m.x478 == 0)
m.c496 = Constraint(expr= m.x153 - m.x478 == 0)
m.c497 = Constraint(expr=m.x142*m.x478 - m.x285 == 0)
m.c498 = Constraint(expr=m.x148*m.x478 - m.x300 == 0)
m.c499 = Constraint(expr=m.x154*m.x478 - m.x315 == 0)
m.c500 = Constraint(expr=m.x264**3 - m.x479 == 0)
m.c501 = Constraint(expr=m.b3*m.x479 - m.x284 == 0)
m.c502 = Constraint(expr=m.b6*m.x479 - m.x299 == 0)
m.c503 = Constraint(expr=m.b9*m.x479 - m.x314 == 0)
m.c504 = Constraint(expr=m.x144*m.x265 - m.x163 == 0)
m.c505 = Constraint(expr=m.x265*m.x426 - m.x291 == 0)
m.c506 = Constraint(expr=m.x150*m.x265 - m.x179 == 0)
m.c507 = Constraint(expr=m.x265*m.x432 - m.x306 == 0)
m.c508 = Constraint(expr=m.x156*m.x265 - m.x31 == 0)
m.c509 = Constraint(expr=m.x265*m.x438 - m.x321 == 0)
m.c510 = Constraint(expr=m.x265**2 - m.x480 == 0)
m.c511 = Constraint(expr= m.x161 - m.x480 == 0)
m.c512 = Constraint(expr=m.x144*m.x480 - m.x290 == 0)
m.c513 = Constraint(expr=m.x150*m.x480 - m.x305 == 0)
m.c514 = Constraint(expr=m.x156*m.x480 - m.x320 == 0)
m.c515 = Constraint(expr=m.x265**3 - m.x481 == 0)
m.c516 = Constraint(expr=m.b4*m.x481 - m.x289 == 0)
m.c517 = Constraint(expr=m.b7*m.x481 - m.x304 == 0)
m.c518 = Constraint(expr=m.b10*m.x481 - m.x319 == 0)
m.c519 = Constraint(expr=m.x158*m.x266 - m.x34 == 0)
m.c520 = Constraint(expr=m.x266*m.x440 - m.x326 == 0)
m.c521 = Constraint(expr=m.x164*m.x266 - m.x45 == 0)
m.c522 = Constraint(expr=m.x266*m.x446 - m.x341 == 0)
m.c523 = Constraint(expr=m.x266**2 - m.x482 == 0)
m.c524 = Constraint(expr= m.x33 - m.x482 == 0)
m.c525 = Constraint(expr=m.x158*m.x482 - m.x325 == 0)
m.c526 = Constraint(expr=m.x164*m.x482 - m.x340 == 0)
m.c527 = Constraint(expr=m.x266**3 - m.x483 == 0)
m.c528 = Constraint(expr=m.b11*m.x483 - m.x324 == 0)
m.c529 = Constraint(expr=m.b14*m.x483 - m.x339 == 0)
m.c530 = Constraint(expr=m.x160*m.x267 - m.x39 == 0)
m.c531 = Constraint(expr=m.x267*m.x442 - m.x331 == 0)
m.c532 = Constraint(expr=m.x166*m.x267 - m.x49 == 0)
m.c533 = Constraint(expr=m.x267*m.x448 - m.x343 == 0)
m.c534 = Constraint(expr=m.x267**2 - m.x484 == 0)
m.c535 = Constraint(expr= m.x38 - m.x484 == 0)
m.c536 = Constraint(expr=m.x160*m.x484 - m.x330 == 0)
m.c537 = Constraint(expr=m.x166*m.x484 - m.x347 == 0)
m.c538 = Constraint(expr=m.x267**3 - m.x485 == 0)
m.c539 = Constraint(expr=m.b12*m.x485 - m.x329 == 0)
m.c540 = Constraint(expr=m.b15*m.x485 - m.x346 == 0)
m.c541 = Constraint(expr=m.x162*m.x268 - m.x41 == 0)
m.c542 = Constraint(expr=m.x268*m.x444 - m.x336 == 0)
m.c543 = Constraint(expr=m.x168*m.x268 - m.x51 == 0)
m.c544 = Constraint(expr=m.x268*m.x450 - m.x351 == 0)
m.c545 = Constraint(expr=m.x268**2 - m.x486 == 0)
m.c546 = Constraint(expr= m.x43 - m.x486 == 0)
m.c547 = Constraint(expr=m.x162*m.x486 - m.x335 == 0)
m.c548 = Constraint(expr=m.x168*m.x486 - m.x350 == 0)
m.c549 = Constraint(expr=m.x268**3 - m.x487 == 0)
m.c550 = Constraint(expr=m.b13*m.x487 - m.x334 == 0)
m.c551 = Constraint(expr=m.b16*m.x487 - m.x349 == 0)
m.c552 = Constraint(expr=m.x170*m.x269 - m.x54 == 0)
m.c553 = Constraint(expr=m.x269*m.x452 - m.x356 == 0)
m.c554 = Constraint(expr=m.x176*m.x269 - m.x66 == 0)
m.c555 = Constraint(expr=m.x269*m.x458 - m.x371 == 0)
m.c556 = Constraint(expr=m.x269**2 - m.x488 == 0)
m.c557 = Constraint(expr= m.x55 - m.x488 == 0)
m.c558 = Constraint(expr=m.x170*m.x488 - m.x355 == 0)
m.c559 = Constraint(expr=m.x176*m.x488 - m.x370 == 0)
m.c560 = Constraint(expr=m.x269**3 - m.x489 == 0)
m.c561 = Constraint(expr=m.b17*m.x489 - m.x354 == 0)
m.c562 = Constraint(expr=m.b20*m.x489 - m.x369 == 0)
m.c563 = Constraint(expr=m.x172*m.x270 - m.x59 == 0)
m.c564 = Constraint(expr=m.x270*m.x454 - m.x361 == 0)
m.c565 = Constraint(expr=m.x178*m.x270 - m.x70 == 0)
m.c566 = Constraint(expr=m.x270*m.x460 - m.x376 == 0)
m.c567 = Constraint(expr=m.x270**2 - m.x490 == 0)
m.c568 = Constraint(expr= m.x60 - m.x490 == 0)
m.c569 = Constraint(expr=m.x172*m.x490 - m.x360 == 0)
m.c570 = Constraint(expr=m.x178*m.x490 - m.x375 == 0)
m.c571 = Constraint(expr=m.x270**3 - m.x491 == 0)
m.c572 = Constraint(expr=m.b18*m.x491 - m.x359 == 0)
m.c573 = Constraint(expr=m.b21*m.x491 - m.x374 == 0)
m.c574 = Constraint(expr=m.x174*m.x271 - m.x63 == 0)
m.c575 = Constraint(expr=m.x271*m.x456 - m.x366 == 0)
m.c576 = Constraint(expr=m.x180*m.x271 - m.x72 == 0)
m.c577 = Constraint(expr=m.x271*m.x462 - m.x381 == 0)
m.c578 = Constraint(expr=m.x271**2 - m.x492 == 0)
m.c579 = Constraint(expr= m.x62 - m.x492 == 0)
m.c580 = Constraint(expr=m.x174*m.x492 - m.x365 == 0)
m.c581 = Constraint(expr=m.x180*m.x492 - m.x380 == 0)
m.c582 = Constraint(expr=m.x271**3 - m.x493 == 0)
m.c583 = Constraint(expr=m.b19*m.x493 - m.x364 == 0)
m.c584 = Constraint(expr=m.b22*m.x493 - m.x379 == 0)
m.c585 = Constraint(expr=m.x182*m.x272 - m.x75 == 0)
m.c586 = Constraint(expr=m.x272*m.x464 - m.x386 == 0)
m.c587 = Constraint(expr=m.x188*m.x272 - m.x88 == 0)
m.c588 = Constraint(expr=m.x272*m.x470 - m.x401 == 0)
m.c589 = Constraint(expr=m.x272**2 - m.x494 == 0)
m.c590 = Constraint(expr= m.x77 - m.x494 == 0)
m.c591 = Constraint(expr=m.x182*m.x494 - m.x385 == 0)
m.c592 = Constraint(expr=m.x188*m.x494 - m.x400 == 0)
m.c593 = Constraint(expr=m.x272**3 - m.x495 == 0)
m.c594 = Constraint(expr=m.b23*m.x495 - m.x384 == 0)
m.c595 = Constraint(expr=m.b26*m.x495 - m.x399 == 0)
m.c596 = Constraint(expr=m.x184*m.x273 - m.x79 == 0)
m.c597 = Constraint(expr=m.x273*m.x466 - m.x391 == 0)
m.c598 = Constraint(expr=m.x190*m.x273 - m.x91 == 0)
m.c599 = Constraint(expr=m.x273*m.x472 - m.x406 == 0)
m.c600 = Constraint(expr=m.x273**2 - m.x496 == 0)
m.c601 = Constraint(expr= m.x80 - m.x496 == 0)
m.c602 = Constraint(expr=m.x184*m.x496 - m.x390 == 0)
m.c603 = Constraint(expr=m.x190*m.x496 - m.x405 == 0)
m.c604 = Constraint(expr=m.x273**3 - m.x497 == 0)
m.c605 = Constraint(expr=m.b24*m.x497 - m.x389 == 0)
m.c606 = Constraint(expr=m.b27*m.x497 - m.x404 == 0)
m.c607 = Constraint(expr=m.x186*m.x274 - m.x83 == 0)
m.c608 = Constraint(expr=m.x274*m.x468 - m.x396 == 0)
m.c609 = Constraint(expr=m.x192*m.x274 - m.x93 == 0)
m.c610 = Constraint(expr=m.x274*m.x474 - m.x411 == 0)
m.c611 = Constraint(expr=m.x274**2 - m.x498 == 0)
m.c612 = Constraint(expr= m.x84 - m.x498 == 0)
m.c613 = Constraint(expr=m.x186*m.x498 - m.x395 == 0)
m.c614 = Constraint(expr=m.x192*m.x498 - m.x410 == 0)
m.c615 = Constraint(expr=m.x274**3 - m.x499 == 0)
m.c616 = Constraint(expr=m.b25*m.x499 - m.x394 == 0)
m.c617 = Constraint(expr=m.b28*m.x499 - m.x409 == 0)
| 38.381299
| 117
| 0.65222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 699
| 0.010197
|
0bbab57a58980cab77be4152c0853746383805da
| 3,265
|
py
|
Python
|
examples/pincell_depletion/restart_depletion.py
|
norberto-schmidt/openmc
|
ff4844303154a68027b9c746300f5704f73e0875
|
[
"MIT"
] | 262
|
2018-08-09T21:27:03.000Z
|
2022-03-24T05:02:10.000Z
|
examples/pincell_depletion/restart_depletion.py
|
norberto-schmidt/openmc
|
ff4844303154a68027b9c746300f5704f73e0875
|
[
"MIT"
] | 753
|
2018-08-03T15:26:57.000Z
|
2022-03-29T23:54:48.000Z
|
examples/pincell_depletion/restart_depletion.py
|
norberto-schmidt/openmc
|
ff4844303154a68027b9c746300f5704f73e0875
|
[
"MIT"
] | 196
|
2018-08-06T13:41:14.000Z
|
2022-03-29T20:47:12.000Z
|
import openmc
import openmc.deplete
import matplotlib.pyplot as plt
###############################################################################
# Load previous simulation results
###############################################################################
# Load geometry from statepoint
statepoint = 'statepoint.100.h5'
with openmc.StatePoint(statepoint) as sp:
geometry = sp.summary.geometry
# Load previous depletion results
previous_results = openmc.deplete.ResultsList.from_hdf5("depletion_results.h5")
###############################################################################
# Transport calculation settings
###############################################################################
# Instantiate a Settings object, set all runtime parameters
settings = openmc.Settings()
settings.batches = 100
settings.inactive = 10
settings.particles = 10000
# Create an initial uniform spatial source distribution over fissionable zones
bounds = [-0.62992, -0.62992, -1, 0.62992, 0.62992, 1]
uniform_dist = openmc.stats.Box(bounds[:3], bounds[3:], only_fissionable=True)
settings.source = openmc.source.Source(space=uniform_dist)
entropy_mesh = openmc.RegularMesh()
entropy_mesh.lower_left = [-0.39218, -0.39218, -1.e50]
entropy_mesh.upper_right = [0.39218, 0.39218, 1.e50]
entropy_mesh.dimension = [10, 10, 1]
settings.entropy_mesh = entropy_mesh
###############################################################################
# Initialize and run depletion calculation
###############################################################################
# Create depletion "operator"
chain_file = './chain_simple.xml'
op = openmc.deplete.Operator(geometry, settings, chain_file, previous_results)
# Perform simulation using the predictor algorithm
time_steps = [1.0, 1.0, 1.0, 1.0, 1.0] # days
power = 174 # W/cm, for 2D simulations only (use W for 3D)
integrator = openmc.deplete.PredictorIntegrator(op, time_steps, power, timestep_units='d')
integrator.integrate()
###############################################################################
# Read depletion calculation results
###############################################################################
# Open results file
results = openmc.deplete.ResultsList.from_hdf5("depletion_results.h5")
# Obtain K_eff as a function of time
time, keff = results.get_eigenvalue()
# Obtain U235 concentration as a function of time
time, n_U235 = results.get_atoms('1', 'U235')
# Obtain Xe135 capture reaction rate as a function of time
time, Xe_capture = results.get_reaction_rate('1', 'Xe135', '(n,gamma)')
###############################################################################
# Generate plots
###############################################################################
days = 24*60*60
plt.figure()
plt.plot(time/days, keff, label="K-effective")
plt.xlabel("Time (days)")
plt.ylabel("Keff")
plt.show()
plt.figure()
plt.plot(time/days, n_U235, label="U 235")
plt.xlabel("Time (days)")
plt.ylabel("n U5 (-)")
plt.show()
plt.figure()
plt.plot(time/days, Xe_capture, label="Xe135 capture")
plt.xlabel("Time (days)")
plt.ylabel("RR (-)")
plt.show()
plt.close('all')
| 35.879121
| 90
| 0.543951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,769
| 0.541807
|
0bbb896c1f766d40e02d03530e5012bd42f6b56e
| 660
|
py
|
Python
|
app/schemas/treatment_type.py
|
DzhonPetrus/Treatment-Management
|
6b08c59d2d4e79181bbae4e951b7a5fd2e3162f1
|
[
"MIT"
] | null | null | null |
app/schemas/treatment_type.py
|
DzhonPetrus/Treatment-Management
|
6b08c59d2d4e79181bbae4e951b7a5fd2e3162f1
|
[
"MIT"
] | null | null | null |
app/schemas/treatment_type.py
|
DzhonPetrus/Treatment-Management
|
6b08c59d2d4e79181bbae4e951b7a5fd2e3162f1
|
[
"MIT"
] | null | null | null |
from datetime import datetime as dt
from typing import Optional, List
from pydantic import BaseModel
from ..utils.schemaHelper import Base, as_form
class TreatmentTypeBase(Base):
name: str
room: str
description: str
fee: float
is_active: Optional[str] = None
@as_form
class CreateTreatmentType(TreatmentTypeBase):
pass
class TreatmentType(TreatmentTypeBase):
id: str
created_at: Optional[dt] = None
updated_at: Optional[dt] = None
class OutTreatmentTypes(Base):
data: List[TreatmentType]
error: bool
message: str
class OutTreatmentType(Base):
data: TreatmentType
error: bool
message: str
| 19.411765
| 46
| 0.721212
| 487
| 0.737879
| 0
| 0
| 63
| 0.095455
| 0
| 0
| 0
| 0
|
0bbbc45ba4c350c8c90d7bb728eaa10783237f8b
| 2,211
|
py
|
Python
|
app/daemon.py
|
mika-koivusaari/mqtt_db_gateway
|
c2e6a0f97d340f5a9d8a2f530f3ae0145064fd2b
|
[
"MIT"
] | 1
|
2017-12-02T17:38:23.000Z
|
2017-12-02T17:38:23.000Z
|
app/daemon.py
|
mika-koivusaari/mqtt_db_gateway
|
c2e6a0f97d340f5a9d8a2f530f3ae0145064fd2b
|
[
"MIT"
] | null | null | null |
app/daemon.py
|
mika-koivusaari/mqtt_db_gateway
|
c2e6a0f97d340f5a9d8a2f530f3ae0145064fd2b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from pep3143daemon import DaemonContext, PidFile
import signal
import os
import sys
import time
class Daemon:
def stop(self, pidfile):
try:
pid = open(pidfile).readline()
except IOError:
print("Daemon already gone, or pidfile was deleted manually")
sys.exit(1)
print("terminating Daemon with Pid: {0}".format(pid))
os.kill(int(pid), signal.SIGTERM)
sys.stdout.write("Waiting...")
while os.path.isfile(self.pid):
sys.stdout.write(".")
sys.stdout.flush()
time.sleep(0.5)
print("Gone")
def reload(self, pidfile):
try:
pid = open(pidfile).readline()
except IOError:
print("Daemon not running, or pidfile was deleted manually")
sys.exit(1)
print("Sending SIGUSR1 to Daemon with Pid: {0}".format(pid))
os.kill(int(pid), signal.SIGUSR1)
sys.stdout.write("Ok")
def start(app):
app.config = app.readConfig(app.config_file)
app.daemon = DaemonContext(pidfile=PidFile(app.pid)
, signal_map={signal.SIGTERM: app.program_cleanup,
signal.SIGHUP: app.terminate,
signal.SIGUSR1: app.reload_program_config}
# ,files_preserve=(sys.stdout)
, stdout=open("/tmp/daemon_stdout.log", 'w')
, stderr=open("/tmp/daemon_stderr.log", 'w')
, gid=app.config["daemon"]["groupid"])
print("daemon created")
if app.nodaemon:
print("no daemon")
app.daemon.detach_process = False
else:
app.daemon.detach_process = True
try:
print("before daemon")
app.daemon.open()
print("after daemon")
app.createLogger()
app.logger.debug('After open')
app.run()
except:
print("Unexpected error:", sys.exc_info()[0])
raise
| 35.66129
| 99
| 0.502035
| 2,089
| 0.944821
| 0
| 0
| 0
| 0
| 0
| 0
| 452
| 0.204432
|
0bc0a0c5b56516ed3c7366dbc0aa3ccecc32fda3
| 623
|
py
|
Python
|
src/posts/forms.py
|
trivvet/djangoAdvance
|
28891893869c1c0c3cf67d7f496dda96322de18c
|
[
"MIT"
] | null | null | null |
src/posts/forms.py
|
trivvet/djangoAdvance
|
28891893869c1c0c3cf67d7f496dda96322de18c
|
[
"MIT"
] | null | null | null |
src/posts/forms.py
|
trivvet/djangoAdvance
|
28891893869c1c0c3cf67d7f496dda96322de18c
|
[
"MIT"
] | null | null | null |
from django import forms
from crispy_forms.helper import FormHelper
from pagedown.widgets import PagedownWidget
from .models import Post
class PostForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(PostForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
content = forms.CharField(widget=PagedownWidget(show_preview=False))
publish = forms.DateField(widget=forms.SelectDateWidget())
class Meta:
model = Post
fields = [
"title",
"content",
"image",
"draft",
"publish",
]
| 23.961538
| 72
| 0.622793
| 482
| 0.773676
| 0
| 0
| 0
| 0
| 0
| 0
| 39
| 0.0626
|
0bc0b1a713ee07a7da22300f41d7eef91e9cf3f3
| 1,621
|
py
|
Python
|
games/migrations/0004_auto_20150726_1430.py
|
rnelson/library
|
5f327c188f2847151dcfc92de0dc4f43f24096bf
|
[
"MIT"
] | null | null | null |
games/migrations/0004_auto_20150726_1430.py
|
rnelson/library
|
5f327c188f2847151dcfc92de0dc4f43f24096bf
|
[
"MIT"
] | null | null | null |
games/migrations/0004_auto_20150726_1430.py
|
rnelson/library
|
5f327c188f2847151dcfc92de0dc4f43f24096bf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('games', '0003_auto_20150725_1737'),
]
operations = [
migrations.AlterField(
model_name='game',
name='description',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='game',
name='max_players',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='max_time',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='min_players',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='min_time',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='name',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='game',
name='url',
field=models.URLField(null=True),
),
migrations.AlterField(
model_name='publisher',
name='country',
field=models.CharField(max_length=2, null=True),
),
migrations.AlterField(
model_name='publisher',
name='url',
field=models.URLField(null=True),
),
]
| 27.016667
| 60
| 0.52992
| 1,512
| 0.932758
| 0
| 0
| 0
| 0
| 0
| 0
| 203
| 0.125231
|
0bc0f8ad9a5e857c61031c1ca0a45f2bb10b8808
| 783
|
py
|
Python
|
Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py
|
tchamabe1979/exareme
|
462983e4feec7808e1fd447d02901502588a8879
|
[
"MIT"
] | null | null | null |
Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py
|
tchamabe1979/exareme
|
462983e4feec7808e1fd447d02901502588a8879
|
[
"MIT"
] | null | null | null |
Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py
|
tchamabe1979/exareme
|
462983e4feec7808e1fd447d02901502588a8879
|
[
"MIT"
] | null | null | null |
import sys
import json
from os import path
from argparse import ArgumentParser
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))) + '/utils/')
from algorithm_utils import set_algorithms_output_data
from health_check_lib import HealthCheckLocalDT
def main():
# Parse arguments
parser = ArgumentParser()
parser.add_argument('-local_step_dbs', required=True, help='Path to local db.')
args, unknown = parser.parse_known_args()
local_dbs = path.abspath(args.local_step_dbs)
local_out = HealthCheckLocalDT.load(local_dbs)
nodes = {}
nodes["active_nodes"] = local_out.get_data()
# Return the algorithm's output
set_algorithms_output_data(json.dumps(nodes))
if __name__ == '__main__':
main()
| 27.964286
| 84
| 0.715198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 119
| 0.15198
|
0bc10ee1d8cb8fa794fa00533f0e4782089ee855
| 107
|
py
|
Python
|
app/search/urlmap.py
|
Hanaasagi/Ushio
|
007f8e50e68bf71a1822b09291b1236a1a37c515
|
[
"MIT"
] | 5
|
2016-10-24T14:01:48.000Z
|
2017-09-26T07:33:20.000Z
|
app/search/urlmap.py
|
Hanaasagi/Ushio
|
007f8e50e68bf71a1822b09291b1236a1a37c515
|
[
"MIT"
] | null | null | null |
app/search/urlmap.py
|
Hanaasagi/Ushio
|
007f8e50e68bf71a1822b09291b1236a1a37c515
|
[
"MIT"
] | null | null | null |
# -*-coding:UTF-8-*-
from handler import SearchHandler
urlpattern = (
(r'/search', SearchHandler),
)
| 13.375
| 33
| 0.654206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 30
| 0.280374
|
0bc1b5133ac6d7c68f1be37cb9acd664f71acc62
| 1,601
|
py
|
Python
|
collect_data/utils/immerseuk/gtr/gtr_extrainfo_awsreduce.py
|
jaklinger/nesta_dataflow
|
5d5647dd8d900a40b460bae0841f7d917e53ae08
|
[
"MIT"
] | null | null | null |
collect_data/utils/immerseuk/gtr/gtr_extrainfo_awsreduce.py
|
jaklinger/nesta_dataflow
|
5d5647dd8d900a40b460bae0841f7d917e53ae08
|
[
"MIT"
] | null | null | null |
collect_data/utils/immerseuk/gtr/gtr_extrainfo_awsreduce.py
|
jaklinger/nesta_dataflow
|
5d5647dd8d900a40b460bae0841f7d917e53ae08
|
[
"MIT"
] | null | null | null |
import logging
from utils.common.datapipeline import DataPipeline
import boto3
import json
from copy import deepcopy
s3 = boto3.resource('s3')
bucket = s3.Bucket('tier-0')
def run(config=None):
orgs = []
for obj in bucket.objects.all():
key = str(obj.key)
if len(key.split("_")) != 3:
continue
data = obj.get()['Body'].read().decode("utf-8")
orgs += json.loads(data)
# if len(orgs) >= 1000:
# break
logging.info("\tGot %s organisations.",len(orgs))
output = []
for org in orgs:
for r in org["results"]:
row = deepcopy(org)
row.pop("results")
row = dict(**row,**r)
if row not in output:
output.append(row)
# Write data
logging.info("\tWriting to table")
with DataPipeline(config) as dp:
for row in output:
dp.insert(row)
if __name__ == "__main__":
#run()
#import numpy as np
#all_numbers = list(np.arange(0,37242,6))
#all_numbers.append(37242)
print(len(open("not_done").read().split()))
n = 0
for obj in bucket.objects.all():
n += int(len(obj.key.split("_")) == 3)
#if key not in all_numbers:
# continue
#print(key,"!!")
#else:
# all_numbers.remove(key)
print(n)
# with open("not_done","w") as f:
# for n in all_numbers:
# print("-->",n,"<--")
# f.write(str(n)+" ")
#data = obj.get()['Body'].read().decode("utf-8")
#orgs += json.loads(data)
| 25.822581
| 56
| 0.519675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 548
| 0.342286
|
0bc1b87155af7211f7ef4f7bb261c76723b7c1da
| 3,595
|
py
|
Python
|
src/features/helpers/processing_v4.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
src/features/helpers/processing_v4.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
src/features/helpers/processing_v4.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
import math
import numpy as np
from matplotlib.patches import FancyArrowPatch
def home_has_possession(row):
if row.possessionTeam == row.homeTeamAbbr:
return True
return False
def calculate_team_sitation(row):
ball_string = 'football'
if row.team == ball_string:
return ball_string
if row.team == 'home' and row.homeHasPossession:
return 'attacking'
elif row.team == 'away' and not row.homeHasPossession:
return 'attacking'
return 'defending'
def convert_speed_to_marker_size(speed: float) -> int:
if 0 < speed <= 1.5:
return 10
elif 1.5 < speed <= 3:
return 15
elif 3 < speed <= 4.5:
return 20
elif 4.5 < speed <= 6:
return 25
return 30
def arrow(x, y, s, ax, color):
"""
Function to draw the arrow of the movement
:param x: position on x-axis
:param y: position on y-axis
:param s: speed in yards/s
:param ax: plot's configuration
:param color: color of the arrows
:return: arrows on the specific positions
"""
# distance between the arrows
distance = 5
ind = range(1, len(x), distance)
# computing of the arrows
for i in ind:
ar = FancyArrowPatch(
(x[i - 1], y[i - 1]), (x[i], y[i]),
arrowstyle='->',
mutation_scale=convert_speed_to_marker_size(s[i]),
color=color,
)
ax.add_patch(ar)
def calculate_arrow_xy(x, y, o):
o = o % 360
delta = 0.1
if o == 0:
y_delta = delta
x_delta = 0
return x + x_delta, y + y_delta
elif o == 90:
y_delta = 0
x_delta = delta
return x + x_delta, y + y_delta
elif o == 180:
y_delta = -delta
x_delta = 0
print(f'F {y_delta}')
return x + x_delta, y + y_delta
elif o == 270:
y_delta = 0
x_delta = -delta
return x + x_delta, y + y_delta
elif 0 < o < 90:
y_delta = math.sin(math.radians(90 - o)) * delta
x_delta = math.sqrt(delta ** 2 - y_delta ** 2)
return x + x_delta, y + y_delta
elif 90 < o < 180:
y_delta = math.sin(math.radians(o - 90)) * delta
x_delta = math.sqrt(delta ** 2 - y_delta ** 2)
return x + x_delta, y - y_delta
elif 180 < o < 270:
x_delta = math.sin(math.radians(o - 180)) * delta
y_delta = math.sqrt(delta ** 2 - x_delta ** 2)
return x - x_delta, y - y_delta
else:
y_delta = math.sin(math.radians(o - 270)) * delta
x_delta = math.sqrt(delta ** 2 - y_delta ** 2)
return x - x_delta, y + y_delta
def arrow_o(x, y, o, s, ax, color):
"""
Function to draw the arrow of the movement
:param x: position on x-axis
:param y: position on y-axis
:param o: orientation in degrees 0-360
:param s: speed in yards/s
:param ax: plot's configuration
:param color: color of the arrows
:return: arrows on the specific positions
"""
# distance between the arrows
distance = 3
ind = range(5, len(x), distance)
# computing of the arrows
for i in ind:
x2, y2 = calculate_arrow_xy(x[i], y[i], o[i])
ar = FancyArrowPatch(
(x[i], y[i]), (x2, y2),
arrowstyle='-|>',
mutation_scale=convert_speed_to_marker_size(s[i]),
alpha=0.6,
color=color,
)
ax.add_patch(ar)
def calculate_distance_v4(x1: np.array, y1: np.array, x2: np.array, y2: np.array) -> np.array:
return np.round(np.sqrt(np.square(x1 - x2) + np.square(y1 - y2)), 2)
| 27.868217
| 94
| 0.569958
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 779
| 0.21669
|
0bc25237116d36d1b3724261d878f108f7fb3326
| 1,103
|
py
|
Python
|
abc199/d/main.py
|
KeiNishikawa218/atcoder
|
0af5e091f8b1fd64d5ca7b46b06b9356eacfe601
|
[
"MIT"
] | null | null | null |
abc199/d/main.py
|
KeiNishikawa218/atcoder
|
0af5e091f8b1fd64d5ca7b46b06b9356eacfe601
|
[
"MIT"
] | null | null | null |
abc199/d/main.py
|
KeiNishikawa218/atcoder
|
0af5e091f8b1fd64d5ca7b46b06b9356eacfe601
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
class UnionFind():
def __init__(self, n):
self.parent = [-1 for _ in range(n)]
# 正==子: 根の頂点番号 / 負==根: 連結頂点数
def find(self, x):
if self.parent[x] < 0:
return x
else:
self.parent[x] = self.find(self.parent[x])
return self.parent[x]
def unite(self, x, y):
x, y = self.find(x), self.find(y)
if x == y:
return False
else:
if self.size(x) < self.size(y):
x, y = y, x
self.parent[x] += self.parent[y]
self.parent[y] = x
def same(self, x, y):
return self.find(x) == self.find(y)
def size(self, x):
x = self.find(x)
return -self.parent[x]
def is_root(self, x):
return self.parent[x] < 0
def main():
n, m = map(int, input().split())
count = 0
pair_list = []
uf = UnionFind(n)
for i in range(m):
array = list(map(int,input().split()))
array[0] -=1 ; array[1] -= 1
pair_list.append(array)
print(uf.unite(n-1,m-1))
main()
| 23.978261
| 54
| 0.481414
| 824
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 80
| 0.070609
|
0bc25628bdeee646aae0cedd3efc79f8829fa812
| 4,963
|
py
|
Python
|
scripts/corpinfo.py
|
HiroshiOhta/GetCorporationInfo
|
3c64ba44a15d481c652da70d62f7127372ac6d1e
|
[
"Apache-2.0"
] | 1
|
2020-05-24T02:41:24.000Z
|
2020-05-24T02:41:24.000Z
|
scripts/corpinfo.py
|
HiroshiOhta/GetCorporationInfo
|
3c64ba44a15d481c652da70d62f7127372ac6d1e
|
[
"Apache-2.0"
] | null | null | null |
scripts/corpinfo.py
|
HiroshiOhta/GetCorporationInfo
|
3c64ba44a15d481c652da70d62f7127372ac6d1e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# 標準ライブラリ
from pathlib import Path
from re import search, sub
from sys import exit, argv
from xml.etree import ElementTree as ET
import csv
# サードパーティライブラリ
from requests import get
from requests.exceptions import Timeout, RequestException
# ローカルなライブラリ
from constants import ENC_API_KEY, NTA_API_URL
from crypt_string import decrypt_strings
def validate_number(corp_number: str) -> bool:
"""
指定された法人番号の妥当性をチェックデジットを用いて検証する。
Parameters
----------
corp_number : str
13桁の法人番号
Returns
-------
bool
指定された法人番号が正しい場合はtrue、誤っている場合はfalseを返す
"""
tmp_corp_num_lst = list(corp_number)
corp_num_lst = list(map(int, tmp_corp_num_lst))
# 最上位1桁目のチェックデジットを取得
check_degit = corp_num_lst[0]
del corp_num_lst[0]
# STEP1: 最下位から偶数桁の和 × 2 + 最下位から奇数桁の和 を求める。
degit_step1 = sum(corp_num_lst[-2::-2]) * 2 + sum(corp_num_lst[-1::-2])
# STEP2: STEP1で求めた数を9で割ったあまりを求める。
degit_step2 = degit_step1 % 9
# STEP3: 9から STEP2 で求めた数を引いた数
degit = 9 - degit_step2
if check_degit == degit:
return True
else:
return False
def get_corp_info(api_key: str, corp_number: str) -> str:
"""
[summary]
Parameters
----------
api_key : str
[description]
corp_number : str
[description]
Returns
-------
str
[description]
"""
# クエリーパラメータの作成
# ------------------------------------------------------------------------------
params = {
'id': api_key,
'number': corp_number,
'type': '12',
'history': '0',
}
# 法人情報の取得
# ------------------------------------------------------------------------------
try:
response = get(NTA_API_URL, params=params, timeout=3.0)
response.raise_for_status()
except Timeout as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
print("タイムアウトしました。")
exit(11)
except RequestException as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
exit(12)
# XMLの解析と出力
# ------------------------------------------------------------------------------
root = ET.fromstring(response.text)
num = 4
corp_info_list = [["法人番号", "最終更新年月日", "商号又は名称",
"本店又は主たる事務所の所在地", "郵便番号", "商号又は名称(フリガナ)"]]
if num >= len(root):
# TODO: logging で出力するように変更する。要学習。
print("指定された法人番号(" + corp_number + ")のデータが存在しません。")
else:
while num < len(root):
corp_info_list.append([root[num][1].text,
root[num][4].text,
root[num][6].text,
root[num][9].text +
root[num][10].text +
root[num][11].text,
sub(r'([0-9]{3})([0-9]{4})',
r'\1-\2', root[num][15].text),
root[num][28].text])
num += 1
for corp_info in corp_info_list[1:]:
print("{0: <14} : {1}".format(corp_info_list[0][0], corp_info[0]))
print("{0: <14} : {1}".format(corp_info_list[0][2], corp_info[2]))
print("{0: <14} : {1}".format(corp_info_list[0][5], corp_info[5]))
print("{0: <14} : {1}".format(corp_info_list[0][4], corp_info[4]))
print("{0: <14} : {1}".format(corp_info_list[0][3], corp_info[3]))
print("{0: <14} : {1}".format(corp_info_list[0][1], corp_info[1]))
print("")
try:
with open('../log/corp_info.csv', 'w', encoding='utf-8') as csv_out:
writer = csv.writer(csv_out, lineterminator='\n')
writer.writerows(corp_info_list)
except FileNotFoundError as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
except PermissionError as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
except csv.Error as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
if __name__ == "__main__":
# Web-API利用用アプリケーションIDの復号
if Path(argv[-1]).is_file():
api_key = decrypt_strings(ENC_API_KEY, argv[-1])
del argv[-1]
else:
api_key = decrypt_strings(ENC_API_KEY)
# 入力された法人番号の確認
if not argv[1:]:
# TODO: logging で出力するように変更する。要学習。
print("法人番号が指定されてません。")
exit(1)
else:
for corp_number in argv[1:]:
if not search("^[1-9][0-9]{12}$", corp_number):
# TODO: logging で出力するように変更する。要学習。
print("法人番号は13桁で指定して下さい。")
exit(2)
elif not validate_number(corp_number):
# TODO: logging で出力するように変更する。要学習。
print("指定された法人番号(" + corp_number + ")は正しくありません。")
exit(3)
# 法人番号から情報を取得する。
corp_numbers = ",".join(map(str, argv[1:]))
get_corp_info(api_key, corp_numbers)
exit(0)
| 25.715026
| 84
| 0.518638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,624
| 0.437479
|
e7e46d31c42a93c03c2df71128dd11ecc6e4322c
| 3,289
|
py
|
Python
|
lib/misc.py
|
cripplet/langmuir-hash
|
5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5
|
[
"MIT"
] | null | null | null |
lib/misc.py
|
cripplet/langmuir-hash
|
5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5
|
[
"MIT"
] | null | null | null |
lib/misc.py
|
cripplet/langmuir-hash
|
5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5
|
[
"MIT"
] | null | null | null |
# custom libs
from lib.args import getConf
# Python libs
from re import sub
from os import mkdir
from os.path import exists
from getpass import getuser
from socket import gethostname
def genFrame(file):
from classes.frame import Frame
from lib.array import getGrid
grid = getGrid(file)
return(Frame(len(grid[0]), len(grid), 0, grid))
# given an int (treated as binary list), generate all unique rotational permutations of int (circular shifts)
# http://bit.ly/GLdKmI
def genPermutations(i, width):
permutations = list()
for j in range(width):
permutations.append(i)
# (i & 1) << (width - 1) advances the end bit to the beginning of the binary string
i = (i >> 1) | ((i & 1) << (width - 1))
return(list(set(permutations)))
# given a string representation of a neighbor configuration, return the number of neighbors in the configuration
def getConfigNum(config):
return(len(filter(lambda x: x == "1", list(config))))
# makes a unique directory
def initDir(dir):
i = 0
tmpDir = dir
while(exists(tmpDir)):
i += 1
tmpDir = dir + "." + str(i)
mkdir(tmpDir)
return(tmpDir)
def pad(i, max):
maxLength = len(str(max))
return(str(i).zfill(maxLength))
def resolveBoundary(bound, coord):
if(coord < 0):
return(coord + bound)
if(coord > bound - 1):
return(coord - bound)
return(coord)
# given an array of lines:
# stripping lines that begin with "#"
# stripping the rest of a line with "#" in the middle
# stripping lines that end with ":"
# remove whitespace
def prep(file):
lines = list()
for line in file:
line = sub(r'\s', '', line.split("#")[0])
if((line != "") and (line[-1] != ":")):
lines.append(line)
return(lines)
# bin() format is "0bxxxxxx"
# [2:] strips "0b"
# [-width:] selects last < width > chars
def toBin(i, width):
return(bin(i)[2:][-width:].zfill(width))
# renders the configuration file
# def renderConfig(folder):
# if(folder[-1] != "/"):
# folder += "/"
# fp = open(folder + "config.conf", "r")
# s = "config file for " + folder[:-1] + ":\n\n"
# for line in fp:
# s += line
# return(s)
def renderConfig(name):
fp = open(name, "r")
s = "config file for " + name + ":\n\n"
for line in fp:
s += line
return(s)
# given a config file, output a CSV line
def renderCSV(simulation):
try:
open(simulation + "/conf.conf", "r")
except IOError as err:
return()
params = getConf(simulation + "/config.conf")
s = getuser() + "@" + gethostname() + ":" + simulation + ","
s += str(params["steps"]) + ","
s += str(params["dens"]) + ","
s += str(params["hori"]) + ","
s += str(params["diag"]) + ","
s += str(params["beta"]) + ","
s += str(params["energies"][0]["000000"]) + ","
s += str(params["energies"][1]["000001"]) + ","
s += str(params["energies"][2]["000011"]) + ","
s += str(params["energies"][2]["000101"]) + ","
s += str(params["energies"][2]["001001"]) + ","
s += str(params["energies"][3]["000111"]) + ","
s += str(params["energies"][3]["001011"]) + ","
s += str(params["energies"][3]["010011"]) + ","
s += str(params["energies"][3]["010101"]) + ","
s += str(params["energies"][4]["001111"]) + ","
s += str(params["energies"][4]["010111"]) + ","
s += str(params["energies"][4]["011011"]) + ","
s += str(params["energies"][5]["011111"]) + ","
s += str(params["energies"][6]["111111"])
return(s)
| 28.353448
| 112
| 0.617817
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,325
| 0.402858
|
e7e6f4d9ac01c5dc81ed803d1582d06a2e43feb7
| 5,538
|
py
|
Python
|
actions/geoip.py
|
cognifloyd/stackstorm-networking_utils
|
56bbb6fc55f7662c2e7e7cccd79f1ebbfcb1df38
|
[
"Apache-2.0"
] | null | null | null |
actions/geoip.py
|
cognifloyd/stackstorm-networking_utils
|
56bbb6fc55f7662c2e7e7cccd79f1ebbfcb1df38
|
[
"Apache-2.0"
] | null | null | null |
actions/geoip.py
|
cognifloyd/stackstorm-networking_utils
|
56bbb6fc55f7662c2e7e7cccd79f1ebbfcb1df38
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import ipaddress
import geoip2.database
from st2common.runners.base_action import Action
class GeoIpAction(Action):
def _get_databases(self):
"""
Try to open all the GeoIP2 databases we need.
"""
try:
reader_isp = geoip2.database.Reader(self.config['isp_db'])
except IOError:
reader_isp = None
try:
reader_asn = geoip2.database.Reader(self.config['asn_db'])
except IOError:
reader_asn = None
try:
reader_city = geoip2.database.Reader(self.config['city_db'])
except IOError:
reader_city = None
return (reader_isp, reader_asn, reader_city)
def run(self, ip_addresses):
"""
Return GeoIP information about an IP address
Args:
- ip_address: The IP address to validate.
Raises:
- ValueError: On invalid database.
Returns:
dict: with keys of IP address containing a dict of the
GeoIP information.
"""
results = {"geoip": {}}
status = False
(reader_isp, reader_asn, reader_city) = self._get_databases()
if reader_city is None and reader_isp is None and reader_asn is None:
results['error'] = "No GeoIP2 databases"
return (status, results)
else:
status = True
try:
for ip_address in ip_addresses:
details = {}
# As ipaddress is a backport from Python 3.3+ it errors if the
# ip address is a string and not unicode.
try:
ip_obj = ipaddress.ip_address(six.text_type(ip_address))
except ValueError as e:
results['geoip'][ip_address] = {
'error': {'name': "Error",
'value': "Invalid IP: {}".format(e)}
}
continue
if ip_obj.is_private:
details['error'] = {'name': "Error",
'value': "Private IP"}
results['geoip'][ip_address] = details
continue
if reader_isp:
response = reader_isp.isp(ip_address)
details['as_num'] = {
'name': "AS Number",
'value': response.autonomous_system_number}
details['as_org'] = {
'name': "AS Org",
'value': response.autonomous_system_organization}
details['isp'] = {'name': "ISP",
'value': response.isp}
details['org'] = {'name': "Org",
'value': response.organization}
elif reader_asn:
response = reader_asn.asn(ip_address)
details['as_num'] = {
'name': "AS Number",
'value': response.autonomous_system_number}
details['as_org'] = {
'name': "AS Org",
'value': response.autonomous_system_organization}
if reader_city:
response = reader_city.city(ip_address)
details['city'] = {'name': "City",
'value': response.city.name}
details['country'] = {'name': "Country",
'value': response.country.name}
details['lat'] = {'name': "Lat",
'value': response.location.latitude} # NOQA pylint: disable=no-member
details['lon'] = {'name': "Lon",
'value': response.location.longitude} # NOQA pylint: disable=no-member
url = "maps.google.com"
details['link'] = {
'name': "Google Map",
'value': "https://{url}/maps/place//@{lat},{lon},{z}z".format(
url=url,
z=10,
lat=details['lat']['value'],
lon=details['lon']['value'])}
results['geoip'][ip_address] = details
except Exception:
self.logger.error("Something went really wrong!")
raise
finally:
if reader_city:
reader_city.close()
if reader_isp:
reader_isp.close()
if reader_asn:
reader_asn.close()
return (status, results)
| 36.434211
| 109
| 0.501083
| 4,654
| 0.840376
| 0
| 0
| 0
| 0
| 0
| 0
| 1,882
| 0.339834
|
e7e747c17639e0dcf83dd1ce0bf4d49fb48d32c9
| 6,372
|
py
|
Python
|
backend/src/dealer/helpers/result.py
|
codepals-org/poker
|
8b58df2ff4d3d9799c42652a9d6942d8ec6b3707
|
[
"MIT"
] | 2
|
2020-11-07T16:37:14.000Z
|
2020-11-07T17:11:24.000Z
|
backend/src/dealer/helpers/result.py
|
codepals-org/poker
|
8b58df2ff4d3d9799c42652a9d6942d8ec6b3707
|
[
"MIT"
] | 7
|
2020-11-07T14:04:06.000Z
|
2020-11-11T11:49:13.000Z
|
backend/src/dealer/helpers/result.py
|
codepals-org/poker
|
8b58df2ff4d3d9799c42652a9d6942d8ec6b3707
|
[
"MIT"
] | 1
|
2020-11-08T13:00:27.000Z
|
2020-11-08T13:00:27.000Z
|
""" This module comes with functions to decide which poker player out
of all players has the best cards.
"""
import itertools
# full_list in [('A','A'),('B','B')...,('F','F')]
def results(full_list, public_card):
""" The results function takes a list of player cards and
the community cards (in the middle of the table) and calculates
who of the players has the wining hand. """
#public_card = ['6H', '6D', '5S', '4S', '8S']
#full_list = [['9C', 'AS'], ['9H', '5C'], ['4D', '2S'], ['KC', '2D'], ['9D', '10C']]
high_comb_rank = []
high_type_rank = []
high_point_rank = []
public_card_temp = []
winner_card_type = []
public_card_temp.extend(list(public_card))
total_players = len(full_list)
for player_card_check in full_list:
player_card_check += public_card
card_combinations = list(itertools.combinations(player_card_check, 5))
color_all = []
size_all = []
for card_combination in card_combinations:
color_current = []
for card in card_combination:
color_current.append(str(card[-1]))
color_all.append(color_current)
size_current = []
for card in card_combination:
if card[-2].isdigit():
size5 = int(card[-2])
if size5 == 0:
size5 = 10
else:
if card[-2] == "J":
size5 = 11
elif card[-2] == "Q":
size5 = 12
elif card[-2] == "K":
size5 = 13
elif card[-2] == "A":
size5 = 14
size_current.append(size5)
size_all.append(size_current)
card_type_all = []
type_score_all = []
high_card_all = []
win_point = []
for i, card_combination in enumerate(card_combinations):
color = color_all[i]
size = size_all[i]
high_card = []
card_type = []
size_set = list(set(size))
while len(set(color)) == 1:
if max(size) - min(size) == 4:
card_type = 'Straight flush'
high_card = max(size)
break
else:
card_type = 'Flush'
high_card = sum(size)
break
else:
if len(set(size)) == 5:
if max(size) - min(size) == 4:
if sorted(size)[2] == sum(size) / len(size):
card_type = 'Straight'
high_card = max(size)
elif max(size) - min(size) == 12:
if sum(size) == 28:
card_type = 'Straight'
high_card = 5
else:
card_type = 'High card'
high_card = sum(size)
else:
card_type = 'High card'
high_card = sum(size)
elif len(size) - 1 == len(set(size)):
card_type = 'One pair'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
elif len(size) - 2 == len(set(size)):
size_temp = []
size_temp.extend(size)
for a in range(0, 5):
for b in range(0, 3):
if size[a] == size_set[b]:
size[a] = 0
size_set[b] = 0
last = [x for x in size if x != 0]
size = []
size.extend(size_temp)
if last[0] == last[1]:
card_type = 'Three of a kind'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
else:
card_type = 'Two pairs'
high_card = sum([x for n, x in enumerate(size) if x in size[:n]])
elif len(size) - 3 == len(set(size)):
for a in range(0, 5):
for b in range(0, 2):
if size[a] == size[b]:
size[a] = 0
size_set[b] = 0
last = [x for x in size if x != 0]
if last[0] == last[1] == last[2]:
card_type = 'Four of a kind'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
else:
card_type = 'Full house'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
type_score = []
if card_type == 'Straight flush':
type_score = 9
elif card_type == 'Four of a kind':
type_score = 8
elif card_type == 'Full house':
type_score = 7
elif card_type == 'Flush':
type_score = 6
elif card_type == 'Straight':
type_score = 5
elif card_type == 'Three of a kind':
type_score = 4
elif card_type == 'Two pairs':
type_score = 3
elif card_type == 'One pair':
type_score = 2
elif card_type == 'High card':
type_score = 1
card_type_all.append(card_type)
high_card_all.append(high_card)
win_point.append(type_score * int(100) + high_card)
high_point = max(win_point)
locate = win_point.index(max(win_point))
high_comb = card_combinations[locate]
high_type = card_type_all[locate]
high_point_rank.append(high_point)
high_comb_rank.append(high_comb)
high_type_rank.append(high_type)
winner = ()
for i in range(len(high_point_rank)):
if high_point_rank[i] == max(high_point_rank):
winner += (i,)
for i in winner:
a = int(i)
b = high_type_rank[a]
winner_card_type.append(b)
return (winner, winner_card_type)
| 38.155689
| 91
| 0.44005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 719
| 0.112837
|
e7e78d1aba44146a11b4493e469f13a8468f2449
| 420
|
py
|
Python
|
nimble-newts/askgrieves/chatbot/models.py
|
Vthechamp22/summer-code-jam-2021
|
0a8bf1f22f6c73300891fd779da36efd8e1304c1
|
[
"MIT"
] | 40
|
2020-08-02T07:38:22.000Z
|
2021-07-26T01:46:50.000Z
|
nimble-newts/askgrieves/chatbot/models.py
|
Vthechamp22/summer-code-jam-2021
|
0a8bf1f22f6c73300891fd779da36efd8e1304c1
|
[
"MIT"
] | 134
|
2020-07-31T12:15:45.000Z
|
2020-12-13T04:42:19.000Z
|
nimble-newts/askgrieves/chatbot/models.py
|
AvianAnalyst/summer-code-jam-2020
|
c5e2aeb4ce399c438a1b8aad393d9c2e9ef98a75
|
[
"MIT"
] | 101
|
2020-07-31T12:00:47.000Z
|
2021-11-01T09:06:58.000Z
|
from django.db import models
class Chatbot(models.Model):
name = models.CharField(max_length=500)
def __str__(self):
return f"name={self.name}"
class WikiArticle(models.Model):
name = models.CharField(max_length=500)
summary = models.TextField(max_length=100)
full_page = models.TextField(max_length=500)
def __str__(self):
return f"name={self.name} summary={self.summary}"
| 23.333333
| 57
| 0.7
| 385
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 61
| 0.145238
|
e7e9d221f1fcec4aa818bff540aa8cfe75c86d5f
| 1,026
|
py
|
Python
|
examples/example_wait_for.py
|
plun1331/discord.py-components-1
|
a31b1a0cfbd31b98d01e910ed905c9c70afe0c3e
|
[
"MIT"
] | 1
|
2021-08-07T18:40:36.000Z
|
2021-08-07T18:40:36.000Z
|
examples/example_wait_for.py
|
plun1331/discord.py-components-1
|
a31b1a0cfbd31b98d01e910ed905c9c70afe0c3e
|
[
"MIT"
] | null | null | null |
examples/example_wait_for.py
|
plun1331/discord.py-components-1
|
a31b1a0cfbd31b98d01e910ed905c9c70afe0c3e
|
[
"MIT"
] | null | null | null |
from discord.ext.commands import Bot
from discord_components import DiscordComponents, Button, ButtonStyle, InteractionType
from asyncio import TimeoutError
bot = Bot("!")
@bot.event
async def on_ready():
DiscordComponents(bot)
print(f"Logged in as {bot.user}!")
@bot.command()
async def waitforclick(ctx):
m = await ctx.send(
"Buttons waiting for a click",
components=[
Button(style=ButtonStyle.red, label="Click Me!"),
],
)
def check(res):
return ctx.author == res.user and res.channel == ctx.channel
try:
res = await bot.wait_for("button_click", check=check, timeout=15)
await res.respond(
type=InteractionType.ChannelMessageWithSource, content=f"{res.component.label} pressed"
)
except TimeoutError:
await m.edit(
"Prompt timed out!",
components=[
Button(style=ButtonStyle.red, label="Timed out!", disabled=True),
],
)
bot.run("TOKEN")
| 24.428571
| 99
| 0.621832
| 0
| 0
| 0
| 0
| 828
| 0.807018
| 802
| 0.781676
| 154
| 0.150097
|
e7ea14302b331a9466a14df8ced10e7042b53923
| 7,081
|
py
|
Python
|
core/data/dataloader/upb_kitti.py
|
nemodrive/awesome-semantic-segmentation-pytorch
|
fa0e4174004822ace0560cc046c2fbdb81f1e1b9
|
[
"Apache-2.0"
] | null | null | null |
core/data/dataloader/upb_kitti.py
|
nemodrive/awesome-semantic-segmentation-pytorch
|
fa0e4174004822ace0560cc046c2fbdb81f1e1b9
|
[
"Apache-2.0"
] | null | null | null |
core/data/dataloader/upb_kitti.py
|
nemodrive/awesome-semantic-segmentation-pytorch
|
fa0e4174004822ace0560cc046c2fbdb81f1e1b9
|
[
"Apache-2.0"
] | null | null | null |
"""Pascal VOC Semantic Segmentation Dataset."""
import os
import torch
import numpy as np
from PIL import Image
from .segbase import SegmentationDataset
class VOCSegmentation(SegmentationDataset):
"""Pascal VOC Semantic Segmentation Dataset.
Parameters
----------
root : string
Path to VOCdevkit folder. Default is './datasets/VOCdevkit'
split: string
'train', 'val' or 'test'
transform : callable, optional
A function that transforms the image
Examples
--------
>>> from torchvision import transforms
>>> import torch.utils.data as data
>>> # Transforms for Normalization
>>> input_transform = transforms.Compose([
>>> transforms.ToTensor(),
>>> transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
>>> ])
>>> # Create Dataset
>>> trainset = VOCSegmentation(split='train', transform=input_transform)
>>> # Create Training Loader
>>> train_data = data.DataLoader(
>>> trainset, 4, shuffle=True,
>>> num_workers=4)
"""
BASE_DIR = 'labels'
NUM_CLASS = 1 # 1 for soft labels
def __init__(self, root='/HDD1_2TB/storage/kitti_self_supervised_labels', split='train', mode=None, transform=None,
**kwargs):
super(KITTISegmentation, self).__init__(root, split, mode, transform, **kwargs)
_voc_root = os.path.join(root, self.BASE_DIR)
_mask_dir = os.path.join(_voc_root, 'SegmentationClass')#os.path.join(_voc_root, 'JPEGImages')
_image_dir = os.path.join(_voc_root, 'JPEGImages')
_path_mask_dir = os.path.join(_voc_root, 'SoftRoadGaussianLabels')#os.path.join(_voc_root, 'JPEGImages')
# train/val/test splits are pre-cut
_splits_dir = os.path.join(_voc_root, 'ImageSets/Segmentation')
if split == 'train':
_split_f = os.path.join(_splits_dir, 'train')
elif split == 'val':
_split_f = os.path.join(_splits_dir, 'val') #'val_upb.txt'; with info has the files that are synched with the steering info files
elif split == 'test':
_split_f = os.path.join(_splits_dir, 'test')
else:
raise RuntimeError('Unknown dataset split.')
self.images = []
self.masks = []
self.path_masks = []
self.cmds = []
with open(os.path.join(_split_f), "r") as lines:
for line in lines:
file_name = line.split(',')[0]
cmd = line.split(',')[1]
_image = os.path.join(_image_dir, file_name)
assert os.path.isfile(_image)
_path_mask = os.path.join(_path_mask_dir, file_name.replace('/', '\\')) # doar filename pt eval
assert os.path.isfile(_path_mask)
self.images.append(_image)
self.path_masks.append(_path_mask)
self.cmds.append(cmd)
if split != 'test':
_mask = os.path.join(_mask_dir, file_name.replace('/', '\\')) # doar filename pt eval
assert os.path.isfile(_mask)
self.masks.append(_mask)
if split != 'test':
assert (len(self.images) == len(self.masks))
print('Found {} images in the folder {}'.format(len(self.images), _voc_root))
def __getitem__(self, index):
img = Image.open(self.images[index]).convert('RGB')
# print(self.cmds[index])
# img.show()
# time.sleep(8)
if self.mode == 'test':
img = self._img_transform(img)
if self.transform is not None:
img = self.transform(img)
return img, os.path.basename(self.images[index])
mask = Image.open(self.masks[index]).quantize(self.num_class + 1) # 1 for train or 2 for eval
path_mask = Image.open(self.path_masks[index]).convert('RGB')
# path_mask = np.load(self.path_masks[index], allow_pickle=True)
# path_mask = Image.fromarray(path_mask)
# mask.show()
# time.sleep(5)
# synchronized transform
if self.mode == 'train':
img, mask, path_mask = self._sync_transform(img, mask, path_mask)
elif self.mode == 'val':
img, mask, path_mask = self._val_sync_transform(img, mask, path_mask)
else:
assert self.mode == 'testval'
img, mask = self._img_transform(img), self._mask_transform(mask)
# general resize, normalize and toTensor
if self.transform is not None:
img = self.transform(img)
path_mask = transforms.ToTensor()(path_mask)
path_mask = path_mask[1].unsqueeze(0)
if path_mask.max() != 0:
path_mask = path_mask / path_mask.max()
return img, mask, path_mask, self.images[index]# os.path.basename(self.images[index])
def __len__(self):
return len(self.images)
def _mask_transform(self, mask):
target = np.array(mask).astype('int32')
target[target == 255] = -1
return torch.from_numpy(target).long()
@property
def classes(self):
"""Category names."""
return ('path', 'rest')
class KITTIImageSampler(Sampler):
def __init__(self, image_data, prob_weights):
self.image_data = image_data
# Get dataset length in terms of video frames and start frame for each video
self.start_frames = []
self.len = len(image_data)
self.seen = 0
self.samples_cmd = {0: [], 1: [], 2: [], 3: [], 4: [], 5: []}
self.samples_idx = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}
self._population = [0, 1, 2, 3, 4, 5]
self._weights = prob_weights
self._split_samples()
for key in self.samples_cmd.keys():
np.random.shuffle(self.samples_cmd[key])
def __len__(self):
return self.len
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
# added this while because samples_cmd[sample_type] could be empty
while True:
sample_type = np.random.choice(self._population, p=self._weights)
if self.samples_cmd[sample_type]:
break
idx = self.samples_cmd[sample_type][self.samples_idx[sample_type]]
self.samples_idx[sample_type] += 1
if self.samples_idx[sample_type] >= len(self.samples_cmd[sample_type]):
self.samples_idx[sample_type] = 0
np.random.shuffle(self.samples_cmd[sample_type])
self.seen += 1
if self.seen >= self.len:
for key in self.samples_cmd.keys():
np.random.shuffle(self.samples_cmd[key])
self.samples_idx[key] = 0
self.seen = 0
raise StopIteration
return idx
def _split_samples(self):
index = 0
for j in range(len(self.image_data)):
cmd = self.image_data[j]
self.samples_cmd[cmd].append(index)
index += 1
if __name__ == '__main__':
dataset = KITTISegmentation()
| 36.880208
| 141
| 0.591018
| 6,858
| 0.968507
| 0
| 0
| 94
| 0.013275
| 0
| 0
| 1,975
| 0.278915
|
e7ea5fbf2a5ea893fa5d02bc075a60e6e8983358
| 4,580
|
py
|
Python
|
app/request.py
|
angelakarenzi5/News-Highlight
|
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
|
[
"Unlicense"
] | null | null | null |
app/request.py
|
angelakarenzi5/News-Highlight
|
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
|
[
"Unlicense"
] | null | null | null |
app/request.py
|
angelakarenzi5/News-Highlight
|
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
|
[
"Unlicense"
] | null | null | null |
from app import app
import urllib.request,json
from .models import source
from .models import article
Source = source.Source
Article = article.Article
# Getting api key
api_key = app.config['NEWS_API_KEY']
# Getting the source base url
base_url = app.config["SOURCE_API_BASE_URL"]
article_url = app.config["ARTICLE_API_BASE_URL"]
def process_results(source_list):
'''
Function that processes the source result and transform them to a list of Objects
Args:
source_list: A list of dictionaries that contain source details
Returns :
source_results: A list of source objects
'''
source_results = []
for source_item in source_list:
id = source_item.get('id')
name = source_item.get('name')
description= source_item.get('description')
url = source_item.get('url')
category = source_item.get('category')
language = source_item.get('language')
country = source_item.get('country')
if url:
source_object = Source(id,name,description,url,category,language,country)
source_results.append(source_object)
return source_results
def get_sources(category):
'''
Function that gets the json response to our url request
'''
get_sources_url = base_url.format(category,api_key)
with urllib.request.urlopen(get_sources_url) as url:
get_sources_data = url.read()
get_sources_response = json.loads(get_sources_data)
source_results = None
if get_sources_response['sources']:
source_results_list = get_sources_response['sources']
source_results = process_results(source_results_list)
return source_results
def get_articles(category):
'''
Function that gets the json response to our url request
'''
get_articles_url = article_url.format(category,api_key)
with urllib.request.urlopen(get_articles_url) as url:
get_articles_data = url.read()
get_articles_response = json.loads(get_articles_data)
article_results = None
if get_articles_response['articles']:
article_results_list = get_articles_response['articles']
article_results = process_results(article_results_list)
return article_results
def get_source(id):
get_sources_details_url = article_url.format(id,api_key)
with urllib.request.urlopen(get_sources_details_url) as url:
source_details_data = url.read()
source_details_response = json.loads(source_details_data)
source_object = None
if source_details_response:
id = source_details_response.get('id')
name = source_details_response.get('name')
description = source_details_response.get('description')
url = source_details_response.get('url')
category = source_details_response.get('category')
language = source_details_response.get('language')
country = source_details_response.get('country')
source_object = Source(id,name,description,url,category,language,country)
return source_object
def process_articles(article_list):
'''
Function that processes the article result and transform them to a list of Objects
Args:
article_list: A list of dictionaries that contain article details
Returns :
article_results: A list of article objects
'''
article_results = []
for article_item in article_list:
author = article_item.get('author')
title = article_item.get('title')
description= article_item.get('description')
url =article_item.get('url')
urlToImage = article_item.get('urlToImage')
publishedAt = article_item.get('publishedAt')
content = article_item.get('content')
if url:
article_object =Article(author,title,description, url, urlToImage,publishedAt,content)
article_results.append(article_object)
return article_results
def get_articles(source):
'''
Function that gets the json response to our url request
'''
get_articles_url = article_url.format(source,api_key)
with urllib.request.urlopen(get_articles_url) as url:
get_articles_data = url.read()
get_articles_response = json.loads(get_articles_data)
article_results = None
if get_articles_response['articles']:
article_results_list = get_articles_response['articles']
article_results = process_articles(article_results_list)
return article_results
| 31.156463
| 98
| 0.691921
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,050
| 0.229258
|
e7ea9b418ef09dc2361de5d9ada98bfd38198af3
| 19
|
py
|
Python
|
login.py
|
XM001-creater/test_one
|
1cf96a45c8dfbf988125e3d250d86fb06fe65c34
|
[
"MIT"
] | null | null | null |
login.py
|
XM001-creater/test_one
|
1cf96a45c8dfbf988125e3d250d86fb06fe65c34
|
[
"MIT"
] | null | null | null |
login.py
|
XM001-creater/test_one
|
1cf96a45c8dfbf988125e3d250d86fb06fe65c34
|
[
"MIT"
] | null | null | null |
num1 =1
num2 = 222
| 6.333333
| 10
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
e7ecc557e33faf2b68bd5445272a43c0e0419ea1
| 445
|
py
|
Python
|
change_file_name.py
|
Guzhongren/picuture2thumbnail
|
15d58c2e53652e5c5af9ff1bf89883b9038bfa03
|
[
"MIT"
] | 1
|
2019-07-07T17:51:37.000Z
|
2019-07-07T17:51:37.000Z
|
change_file_name.py
|
Guzhongren/picuture2thumbnail
|
15d58c2e53652e5c5af9ff1bf89883b9038bfa03
|
[
"MIT"
] | null | null | null |
change_file_name.py
|
Guzhongren/picuture2thumbnail
|
15d58c2e53652e5c5af9ff1bf89883b9038bfa03
|
[
"MIT"
] | 1
|
2020-01-19T08:27:10.000Z
|
2020-01-19T08:27:10.000Z
|
# -*- coding: utf-8 -*-
# Author:Guzhongren
# created: 2017-05-08
import os
path = 'C:\\geoconFailed\\willfix\\'
for file in os.listdir(path):
if os.path.isfile(os.path.join(path,file))==True:
_file= file.split(".")
_file_name=_file[0]
_file_type=_file[1]
new_file_name=_file_name[:-1]+"."+_file_type
os.rename(os.path.join(path,file), os.path.join(path, new_file_name))
print(file+u"更名成功")
| 27.8125
| 77
| 0.624719
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 113
| 0.249448
|
e7ed80b597ccfb79e5e0d84b01e14970f4384658
| 434
|
py
|
Python
|
day22/day22.py
|
norbert-e-horn/adventofcode-2017
|
81a6a8eb6f23f2191786d1ea8b2aad1f54d9c12a
|
[
"Apache-2.0"
] | null | null | null |
day22/day22.py
|
norbert-e-horn/adventofcode-2017
|
81a6a8eb6f23f2191786d1ea8b2aad1f54d9c12a
|
[
"Apache-2.0"
] | null | null | null |
day22/day22.py
|
norbert-e-horn/adventofcode-2017
|
81a6a8eb6f23f2191786d1ea8b2aad1f54d9c12a
|
[
"Apache-2.0"
] | null | null | null |
import sys
c=[[2if a=="#"else 0for a in i]for i in sys.argv[1].split("\n")]
n=len(c)
def m(x):
d[0]=(d[0]+a[d[1]]+3)%4
a[d[1]]=(x+a[d[1]])%4
if a[d[1]]==2:d[2]+=1
d[1]+=(s+(1-s)*(d[0]&1))*(-1+2*(d[0]&1^(d[0]&2)>>1))
s=1001
a=[]
k=(s-n)//2
for i in range(s):a+=[0]*k+c[i-k]+k*[0]if k<=i<(s+n)/2else[0]*s
b=list(a)
d=[0,s**2//2,0]
for i in range(10000):m(2)
print(d[2])
a=b
d=[0,s**2//2,0]
for i in range(10000000):m(1)
print(d[2])
| 20.666667
| 64
| 0.495392
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 7
| 0.016129
|
e7edbdfed8164b295e564361932bcbdae312f33f
| 10,178
|
py
|
Python
|
armory/scenarios/audio_asr.py
|
GuillaumeLeclerc/armory
|
c24928701b4ff6fc37cdb994ea784f9733a8e8da
|
[
"MIT"
] | 1
|
2021-06-17T23:05:58.000Z
|
2021-06-17T23:05:58.000Z
|
armory/scenarios/audio_asr.py
|
GuillaumeLeclerc/armory
|
c24928701b4ff6fc37cdb994ea784f9733a8e8da
|
[
"MIT"
] | null | null | null |
armory/scenarios/audio_asr.py
|
GuillaumeLeclerc/armory
|
c24928701b4ff6fc37cdb994ea784f9733a8e8da
|
[
"MIT"
] | null | null | null |
"""
Automatic speech recognition scenario
"""
import logging
from typing import Optional
from tqdm import tqdm
import numpy as np
from art.preprocessing.audio import LFilter, LFilterPyTorch
from armory.utils.config_loading import (
load_dataset,
load_model,
load_attack,
load_adversarial_dataset,
load_defense_wrapper,
load_defense_internal,
load_label_targeter,
)
from armory.utils import metrics
from armory.scenarios.base import Scenario
from armory.utils.export import SampleExporter
logger = logging.getLogger(__name__)
def load_audio_channel(delay, attenuation, pytorch=True):
"""
Return an art LFilter object for a simple delay (multipath) channel
If attenuation == 0 or delay == 0, return an identity channel
Otherwise, return a channel with length equal to delay + 1
NOTE: lfilter truncates the end of the echo, so output length equals input length
"""
delay = int(delay)
attenuation = float(attenuation)
if delay < 0:
raise ValueError(f"delay {delay} must be a nonnegative number (of samples)")
if delay == 0 or attenuation == 0:
logger.warning("Using an identity channel")
numerator_coef = np.array([1.0])
denominator_coef = np.array([1.0])
else:
if not (-1 <= attenuation <= 1):
logger.warning(f"filter attenuation {attenuation} not in [-1, 1]")
# Simple FIR filter with a single multipath delay
numerator_coef = np.zeros(delay + 1)
numerator_coef[0] = 1.0
numerator_coef[delay] = attenuation
denominator_coef = np.zeros_like(numerator_coef)
denominator_coef[0] = 1.0
if pytorch:
try:
return LFilterPyTorch(
numerator_coef=numerator_coef, denominator_coef=denominator_coef
)
except ImportError:
logger.exception("PyTorch not available. Resorting to scipy filter")
logger.warning("Scipy LFilter does not currently implement proper gradients")
return LFilter(numerator_coef=numerator_coef, denominator_coef=denominator_coef)
class AutomaticSpeechRecognition(Scenario):
def _evaluate(
self,
config: dict,
num_eval_batches: Optional[int],
skip_benign: Optional[bool],
skip_attack: Optional[bool],
skip_misclassified: Optional[bool],
) -> dict:
"""
Evaluate the config and return a results dict
"""
if skip_misclassified:
raise ValueError("skip_misclassified shouldn't be set for ASR scenario")
model_config = config["model"]
estimator, fit_preprocessing_fn = load_model(model_config)
audio_channel_config = config.get("adhoc", {}).get("audio_channel")
if audio_channel_config is not None:
logger.info("loading audio channel")
for k in "delay", "attenuation":
if k not in audio_channel_config:
raise ValueError(f"audio_channel must have key {k}")
audio_channel = load_audio_channel(**audio_channel_config)
if estimator.preprocessing_defences:
estimator.preprocessing_defences.insert(0, audio_channel)
else:
estimator.preprocessing_defences = [audio_channel]
estimator._update_preprocessing_operations()
defense_config = config.get("defense") or {}
defense_type = defense_config.get("type")
if defense_type in ["Preprocessor", "Postprocessor"]:
logger.info(f"Applying internal {defense_type} defense to estimator")
estimator = load_defense_internal(config["defense"], estimator)
if model_config["fit"]:
logger.info(
f"Fitting model {model_config['module']}.{model_config['name']}..."
)
fit_kwargs = model_config["fit_kwargs"]
logger.info(f"Loading train dataset {config['dataset']['name']}...")
batch_size = config["dataset"].pop("batch_size")
config["dataset"]["batch_size"] = fit_kwargs.get(
"fit_batch_size", batch_size
)
train_data = load_dataset(
config["dataset"],
epochs=fit_kwargs["nb_epochs"],
split=config["dataset"].get("train_split", "train_clean100"),
preprocessing_fn=fit_preprocessing_fn,
shuffle_files=True,
)
config["dataset"]["batch_size"] = batch_size
if defense_type == "Trainer":
logger.info(f"Training with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], estimator)
defense.fit_generator(train_data, **fit_kwargs)
else:
logger.info("Fitting estimator on clean train dataset...")
estimator.fit_generator(train_data, **fit_kwargs)
if defense_type == "Transform":
# NOTE: Transform currently not supported
logger.info(f"Transforming estimator with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], estimator)
estimator = defense()
attack_config = config["attack"]
attack_type = attack_config.get("type")
targeted = bool(attack_config.get("targeted"))
metrics_logger = metrics.MetricsLogger.from_config(
config["metric"],
skip_benign=skip_benign,
skip_attack=skip_attack,
targeted=targeted,
)
if config["dataset"]["batch_size"] != 1:
logger.warning("Evaluation batch_size != 1 may not be supported.")
predict_kwargs = config["model"].get("predict_kwargs", {})
eval_split = config["dataset"].get("eval_split", "test_clean")
if skip_benign:
logger.info("Skipping benign classification...")
else:
# Evaluate the ART estimator on benign test examples
logger.info(f"Loading test dataset {config['dataset']['name']}...")
test_data = load_dataset(
config["dataset"],
epochs=1,
split=eval_split,
num_batches=num_eval_batches,
shuffle_files=False,
)
logger.info("Running inference on benign examples...")
for x, y in tqdm(test_data, desc="Benign"):
# Ensure that input sample isn't overwritten by estimator
x.flags.writeable = False
with metrics.resource_context(
name="Inference",
profiler=config["metric"].get("profiler_type"),
computational_resource_dict=metrics_logger.computational_resource_dict,
):
y_pred = estimator.predict(x, **predict_kwargs)
metrics_logger.update_task(y, y_pred)
metrics_logger.log_task()
if skip_attack:
logger.info("Skipping attack generation...")
return metrics_logger.results()
# Imperceptible attack still WIP
if (config.get("adhoc") or {}).get("skip_adversarial"):
logger.info("Skipping adversarial classification...")
return metrics_logger.results()
# Evaluate the ART estimator on adversarial test examples
logger.info("Generating or loading / testing adversarial examples...")
if attack_type == "preloaded":
test_data = load_adversarial_dataset(
attack_config,
epochs=1,
split="adversarial",
num_batches=num_eval_batches,
shuffle_files=False,
)
else:
attack = load_attack(attack_config, estimator)
if targeted != attack.targeted:
logger.warning(
f"targeted config {targeted} != attack field {attack.targeted}"
)
test_data = load_dataset(
config["dataset"],
epochs=1,
split=eval_split,
num_batches=num_eval_batches,
shuffle_files=False,
)
if targeted:
label_targeter = load_label_targeter(attack_config["targeted_labels"])
export_samples = config["scenario"].get("export_samples")
if export_samples is not None and export_samples > 0:
sample_exporter = SampleExporter(
self.scenario_output_dir, test_data.context, export_samples
)
else:
sample_exporter = None
for x, y in tqdm(test_data, desc="Attack"):
with metrics.resource_context(
name="Attack",
profiler=config["metric"].get("profiler_type"),
computational_resource_dict=metrics_logger.computational_resource_dict,
):
if attack_type == "preloaded":
x, x_adv = x
if targeted:
y, y_target = y
elif attack_config.get("use_label"):
x_adv = attack.generate(x=x, y=y)
elif targeted:
y_target = label_targeter.generate(y)
x_adv = attack.generate(x=x, y=y_target)
else:
x_adv = attack.generate(x=x)
# Ensure that input sample isn't overwritten by estimator
x_adv.flags.writeable = False
y_pred_adv = estimator.predict(x_adv, **predict_kwargs)
metrics_logger.update_task(y, y_pred_adv, adversarial=True)
if targeted:
metrics_logger.update_task(
y_target, y_pred_adv, adversarial=True, targeted=True,
)
metrics_logger.update_perturbation(x, x_adv)
if sample_exporter is not None:
sample_exporter.export(x, x_adv, y, y_pred_adv)
metrics_logger.log_task(adversarial=True)
if targeted:
metrics_logger.log_task(adversarial=True, targeted=True)
return metrics_logger.results()
| 39.449612
| 91
| 0.599921
| 8,071
| 0.792985
| 0
| 0
| 0
| 0
| 0
| 0
| 2,428
| 0.238554
|
e7ee6d842483ab8133f076264eb1658607e7ec98
| 5,558
|
py
|
Python
|
FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py
|
rishiagarwal-oracle/fmw-kubernetes
|
cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f
|
[
"UPL-1.0",
"MIT"
] | null | null | null |
FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py
|
rishiagarwal-oracle/fmw-kubernetes
|
cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f
|
[
"UPL-1.0",
"MIT"
] | null | null | null |
FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py
|
rishiagarwal-oracle/fmw-kubernetes
|
cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f
|
[
"UPL-1.0",
"MIT"
] | null | null | null |
# Copyright (c) 2022, Oracle and/or its affiliates.
#
# Licensed under the Universal Permissive License v 1.0 as shown at
# https://oss.oracle.com/licenses/upl
import xml.dom.minidom
import re
import sys
def getManagedServerCount(domainHome):
# use the parse() function to load and parse an XML file
doc = xml.dom.minidom.parse(domainHome + "/config/config.xml")
servers = doc.getElementsByTagName("server")
print "Total Configured Managed Servers: %d " % (servers.length - 1)
return servers.length - 1;
# Method to uncomment and comment the required tag and save back
def replaceXml(domainHome, ms_server):
f = open(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/ticket-cache.xml","r+w")
filecontent = f.read()
#Uncomment the one to be used
filecontent = re.sub ( r'<!--<cacheManagerPeerProviderFactory','<cacheManagerPeerProviderFactory', filecontent,1)
filecontent = re.sub ( r'cas_tgt" />-->','cas_tgt" />', filecontent,1)
#Comment the one not used
filecontent = re.sub ( r'<cacheManagerPeerProviderFactory','<!--cacheManagerPeerProviderFactory', filecontent,1)
filecontent = re.sub ( r'propertySeparator="," />','propertySeparator="," -->', filecontent,1)
f.seek(0)
f.write(filecontent)
f.write("\n\n\n")
f.close()
# Method to replace the properties
def replaceRmiUrlsInCache(domainHome, prefix, n, ms_server, excludedServerNumber, filename, port):
doc = xml.dom.minidom.parse(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/" + filename)
abc = doc.getElementsByTagName("cacheManagerPeerProviderFactory")
processString = "peerDiscovery=manual,rmiUrls=//localhost:<port>/notifier"
for element in abc:
element.setAttribute("properties", processString)
for x in range (1,n-1):
processString = processString + "|//localhost:<port>/notifier"
# We should have got the properties attribute now tokenized with localhost and 41001. Exclude 1 add the rest
for i in range (1,n+1):
if i <> int(excludedServerNumber):
processString = re.sub ( r'localhost',prefix + str(i), processString,1)
processString = re.sub ( r'<port>',str(port), processString,1)
element.setAttribute("properties", processString)
print(processString)
ghi = doc.getElementsByTagName("cacheManagerPeerListenerFactory")
for element in ghi:
processString = element.getAttribute("properties")
processString = "hostName="+prefix+ str(excludedServerNumber) +",port=" + str(port) +",remoteObjectPort=" + str(int(port)+1) + ",socketTimeoutMillis=12000"
element.setAttribute("properties", processString)
myfile = open(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/" + filename , "w")
myfile.write(doc.toxml())
myfile.close()
print("Updated " + filename)
# Method to replace the properties
def replaceRmiUrls(domainHome, prefix, n, ms_server, excludedServerNumber, port):
doc = xml.dom.minidom.parse(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/ticket-cache.xml")
abc = doc.getElementsByTagName("cacheManagerPeerProviderFactory")
processString = ""
for element in abc:
processString = element.getAttribute("properties")
for x in range (1,n-1):
processString = processString + "|//localhost:41001/cas_st|//localhost:41001/cas_tgt"
# We should have got the properties attribute now tokenized with localhost and 41001. Exclude 1 add the rest
for i in range (1,n+1):
if i <> int(excludedServerNumber):
processString = re.sub ( r'localhost',prefix + str(i), processString,1)
processString = re.sub ( r'41001',str(port), processString,1)
processString = re.sub ( r'localhost',prefix + str(i), processString,1)
processString = re.sub ( r'41001',str(port), processString,1)
element.setAttribute("properties", processString)
print(processString)
ghi = doc.getElementsByTagName("cacheManagerPeerListenerFactory")
for element in ghi:
processString = element.getAttribute("properties")
processString = "hostName=" + prefix + str(excludedServerNumber) + ",port=" + str(port) + ",remoteObjectPort=" + str(int(port)+1) + ",socketTimeoutMillis=12000"
element.setAttribute("properties", processString)
myfile = open(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/ticket-cache.xml", "w")
myfile.write(doc.toxml())
myfile.close()
print("Updated " + "ticket-cache.xml")
def main():
# count the arguments
arguments = len(sys.argv) - 1
print ("The script is called with %i arguments" % (arguments))
domainHome = sys.argv[1]
serverPrefix = sys.argv[2]
ms_server = sys.argv[3]
port = sys.argv[4]
excludedServerNumber = ms_server[-1]
print("Host prefix set to " + serverPrefix)
print("Managed Server set to - " + ms_server)
print("Excluded Server Number set to - " + excludedServerNumber)
print("Starting port set to - " + port)
replaceXml(domainHome, ms_server)
servercount = getManagedServerCount(domainHome)
replaceRmiUrls(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, port)
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "linked-cache.xml", int(port) + 2)
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "cs-cache.xml", int(port) + 4)
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "cas-cache.xml", int(port) + 6 )
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "ss-cache.xml", int(port) + 8 )
if __name__ == "__main__":
# calling main function
main()
| 45.933884
| 161
| 0.737496
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,033
| 0.365779
|
e7ee8f88cffe1a482d5fa7391195738c0119a53d
| 2,228
|
py
|
Python
|
SQLFileGenerator/sqlqueries.py
|
DataMadeEasy/PySQLFileGenerator
|
3efc54fa7b8741f48d00dc199675081b0fc4e04d
|
[
"BSD-2-Clause"
] | null | null | null |
SQLFileGenerator/sqlqueries.py
|
DataMadeEasy/PySQLFileGenerator
|
3efc54fa7b8741f48d00dc199675081b0fc4e04d
|
[
"BSD-2-Clause"
] | null | null | null |
SQLFileGenerator/sqlqueries.py
|
DataMadeEasy/PySQLFileGenerator
|
3efc54fa7b8741f48d00dc199675081b0fc4e04d
|
[
"BSD-2-Clause"
] | null | null | null |
sqlqueries = {
'WeatherForecast':"select concat ('FY', to_char(f.forecasted_timestamp, 'YY')) Fiscal_yr, to_char(f.forecasted_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(f.forecasted_timestamp, 'DD')) Fiscal_day, f.zipcode zip, min(f.temp_avg) low, max(f.temp_avg) high, max(f.wind_speed) wind, max(f.humidity) humidity from forecast f where to_char(forecast_timestamp, 'YYYY-MM-DD HH24') = (select max(to_char(forecast_timestamp, 'YYYY-MM-DD HH24')) from forecast) group by to_char(f.forecasted_timestamp, 'YY'), to_char(f.forecasted_timestamp, 'MON'), to_char(f.forecasted_timestamp, 'DD'), f.zipcode;",
'WeatherActDesc':"select concat ('FY', to_char(o.observation_timestamp, 'YY')) Fiscal_yr, to_char(o.observation_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(o.observation_timestamp, 'DD')) Fiscal_day, o.zipcode zip, o.weather_description descripion from observations o group by to_char(o.observation_timestamp, 'YY'), to_char(o.observation_timestamp, 'MON'), to_char(o.observation_timestamp, 'DD'), o.zipcode, o.weather_description order by fiscal_yr, fiscal_mth, fiscal_day, zip;",
'WeatherActual':"select concat ('FY', to_char(o.observation_timestamp, 'YY')) Fiscal_yr, to_char(o.observation_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(o.observation_timestamp, 'DD')) Fiscal_day, o.zipcode zip, min(o.temp_avg) low, max(o.temp_avg) high, max(o.wind_speed) wind, max(o.humidity) humidity from observations o group by to_char(o.observation_timestamp, 'YY'), to_char(o.observation_timestamp, 'MON') , to_char(o.observation_timestamp, 'DD') , o.zipcode order by fiscal_yr, fiscal_mth, fiscal_day, zip;",
'WeatherDescription':"select concat ('FY', to_char(f.forecasted_timestamp, 'YY')) Fiscal_yr , to_char(f.forecasted_timestamp, 'MON') Fiscal_mth , concat ('Day_', to_char(f.forecasted_timestamp, 'DD')) Fiscal_day , f.zipcode zip , f.weather_description descripion from forecast f where to_char(forecast_timestamp, 'YYYY-MM-DD HH24') = (select max(to_char(forecast_timestamp, 'YYYY-MM-DD HH24')) from forecast) group by to_char(forecasted_timestamp, 'YY') , to_char(f.forecasted_timestamp, 'MON') , to_char(f.forecasted_timestamp, 'DD') , f.zipcode , f.weather_description;"
}
| 371.333333
| 604
| 0.763465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,185
| 0.9807
|
e7f06cecae55d479e6604b53a295b76a9bdf0276
| 5,005
|
py
|
Python
|
backend/tests/unit/protocols/application/test_lists.py
|
pez-globo/pufferfish-software
|
b42fecd652731dd80fbe366e95983503fced37a4
|
[
"Apache-2.0"
] | 1
|
2020-10-20T23:47:23.000Z
|
2020-10-20T23:47:23.000Z
|
backend/tests/unit/protocols/application/test_lists.py
|
pez-globo/pufferfish-software
|
b42fecd652731dd80fbe366e95983503fced37a4
|
[
"Apache-2.0"
] | 242
|
2020-10-23T06:44:01.000Z
|
2022-01-28T05:50:45.000Z
|
backend/tests/unit/protocols/application/test_lists.py
|
pez-globo/pufferfish-vent-software
|
f1e5e47acf1941e7c729adb750b85bf26c38b274
|
[
"Apache-2.0"
] | 1
|
2021-04-12T02:10:18.000Z
|
2021-04-12T02:10:18.000Z
|
"""Test the functionality of protocols.application.states classes."""
from ventserver.protocols.application import lists
from ventserver.protocols.protobuf import mcu_pb as pb
def test_send_new_elements() -> None:
"""Test adding new elements to a list for sending."""
example_sequence = [
lists.UpdateEvent(new_elements=[pb.LogEvent(id=i)])
for i in range(20)
]
synchronizer = lists.SendSynchronizer(
segment_type=pb.NextLogEvents,
max_len=10, max_segment_len=5
)
assert synchronizer.output() is None
for update_event in example_sequence:
synchronizer.input(update_event)
assert synchronizer.output() is None
# The first 10 events should've been discarded
for next_expected in range(10):
synchronizer.input(lists.UpdateEvent(next_expected=next_expected))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == next_expected
assert output.total == 10
assert output.remaining == 10
for (i, event) in enumerate(output.elements):
assert event.id == 10 + i
# Segments should be returned as requested
for next_expected in range(10, 20):
synchronizer.input(lists.UpdateEvent(next_expected=next_expected))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == next_expected
assert output.total == 10
assert output.remaining == 10 - (next_expected - 10)
for (i, event) in enumerate(output.elements):
assert event.id == next_expected + i
if next_expected <= 15:
assert len(output.elements) == 5
else:
assert len(output.elements) == 5 - (next_expected - 15)
# New elements should be in the segment resulting from a repeated request
assert synchronizer.output() is None
synchronizer.input(lists.UpdateEvent(
new_elements=[pb.LogEvent(id=20)], next_expected=19
))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == 19
assert output.total == 10
assert output.remaining == 2
for (i, event) in enumerate(output.elements):
assert event.id == 19 + i
assert len(output.elements) == 2
# TODO: add a test where we send all events, then reset expected event to 0.
# All events should be sent again.
def test_receive_new_elements() -> None:
"""Test adding new elements to a list from receiving."""
example_sequence = [
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 5)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(5, 10)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(7, 11)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 4)]
),
pb.NextLogEvents(session_id=1),
pb.NextLogEvents(
session_id=1, elements=[pb.LogEvent(id=i) for i in range(0, 4)]
),
]
synchronizer: lists.ReceiveSynchronizer[pb.LogEvent] = \
lists.ReceiveSynchronizer()
assert synchronizer.output() is None
for segment in example_sequence:
synchronizer.input(segment)
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 5
assert len(update_event.new_elements) == 5
for (i, element) in enumerate(update_event.new_elements):
assert element.id == i
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 10
assert len(update_event.new_elements) == 5
for (i, element) in enumerate(update_event.new_elements):
assert element.id == 5 + i
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 11
assert len(update_event.new_elements) == 1
assert update_event.new_elements[0].id == 10
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 11
assert len(update_event.new_elements) == 0
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 1
assert update_event.next_expected == 0
assert len(update_event.new_elements) == 0
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 1
assert update_event.next_expected == 4
assert len(update_event.new_elements) == 4
for (i, element) in enumerate(update_event.new_elements):
assert element.id == i
| 36.532847
| 77
| 0.675524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 449
| 0.08971
|
e7f2a75349f080e6ef9556951fc033879ae1e187
| 1,969
|
py
|
Python
|
application/api.py
|
DonBlaine/OpenDoorData
|
74740c6ff6dca893f0389963f2ef12de42a36829
|
[
"MIT"
] | null | null | null |
application/api.py
|
DonBlaine/OpenDoorData
|
74740c6ff6dca893f0389963f2ef12de42a36829
|
[
"MIT"
] | null | null | null |
application/api.py
|
DonBlaine/OpenDoorData
|
74740c6ff6dca893f0389963f2ef12de42a36829
|
[
"MIT"
] | null | null | null |
# file that contains db models to be exposed via a REST API
from models import room, survey, wifi_log, timetable, module # import db models
from app import app # import Flask app
from auth import auth # import Auth app to provide user authentificaiton
from flask import request # import request object to parse json request data
from flask_peewee.rest import RestAPI,UserAuthentication, RestrictOwnerResource, AdminAuthentication
# create RestrictOwnerResource subclass which prevents users modifying another user's content
class SurveyResource(RestrictOwnerResource):
owner_field = 'reporter'
def check_post(self):
'''fucntion that checks users are associated with the module they are submitting a POST request to '''
obj = request.get_json() # parse and return incoming json request data
user = obj["reporter"]
mod= obj["module_code"]
modules = module.select().where(module.module_code == mod) # select module data from module table in db using module_code posted by user
authorized = False # initialise authorized variable as False
for item in modules:
instructor = str(item.instructor) # select instructor associated with item
if instructor == user:
authorized = True
return authorized
# instantiate UserAuthentication
user_auth = UserAuthentication(auth)
# instantiate admin-only auth
admin_auth = AdminAuthentication(auth)
# instantiate our api wrapper, specifying user_auth as the default
api = RestAPI(app, default_auth=user_auth)
# register models so they are exposed via /api/<model>/
api.register(room, auth=admin_auth, allowed_methods=['GET'])
api.register(survey,SurveyResource,allowed_methods=['GET','POST'])
api.register(wifi_log, auth=admin_auth,allowed_methods=['GET'])
api.register(timetable, auth=admin_auth, allowed_methods=['GET'])
api.register(module, auth=admin_auth, allowed_methods=['GET'])
| 39.38
| 145
| 0.739462
| 805
| 0.408837
| 0
| 0
| 0
| 0
| 0
| 0
| 840
| 0.426612
|