hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c7b66acfc0f1fc9f0407ccd4877bc57ccf79afa1 | 4,691 | py | Python | pycardcast/net/aiohttp.py | Elizafox/pycardcast | 36fb8009f32f733fd18a7f3263a61362fdb75ec3 | [
"WTFPL"
] | null | null | null | pycardcast/net/aiohttp.py | Elizafox/pycardcast | 36fb8009f32f733fd18a7f3263a61362fdb75ec3 | [
"WTFPL"
] | null | null | null | pycardcast/net/aiohttp.py | Elizafox/pycardcast | 36fb8009f32f733fd18a7f3263a61362fdb75ec3 | [
"WTFPL"
] | 1 | 2020-04-09T10:12:46.000Z | 2020-04-09T10:12:46.000Z | # Copyright 2015 Elizabeth Myers.
# All rights reserved.
# This file is part of the pycardcast project. See LICENSE in the root
# directory for licensing information.
import asyncio
import aiohttp
from pycardcast.net import CardcastAPIBase
from pycardcast.deck import (DeckInfo, DeckInfoNotFoundError,
DeckInfoRetrievalError)
from pycardcast.card import (BlackCard, WhiteCard, CardNotFoundError,
CardRetrievalError)
from pycardcast.search import (SearchReturn, SearchNotFoundError,
SearchRetrievalError)
| 37.830645 | 78 | 0.563206 |
c7b71c7227264e168736696fa5f4ef910e4d9c22 | 2,345 | py | Python | libtiepie/triggeroutput.py | TiePie/python-libtiepie | d2a9875855298a58d6a16be5b61aaa89a558e7d8 | [
"MIT"
] | 6 | 2020-01-04T02:00:35.000Z | 2022-03-22T00:32:26.000Z | libtiepie/triggeroutput.py | TiePie/python-libtiepie | d2a9875855298a58d6a16be5b61aaa89a558e7d8 | [
"MIT"
] | 3 | 2020-08-05T15:16:29.000Z | 2022-03-21T07:00:27.000Z | libtiepie/triggeroutput.py | TiePie/python-libtiepie | d2a9875855298a58d6a16be5b61aaa89a558e7d8 | [
"MIT"
] | null | null | null | from ctypes import *
from .api import api
from .const import *
from .library import library
| 33.028169 | 72 | 0.665245 |
c7b7578b3382d7cf2565fe8fe7621c5d451e663b | 1,374 | py | Python | conduit_rest/radish/conduit_rest_steps.py | dduleba/tw2019-ui-tests | 5f149c6c2bdb9f2d69a02c038248374f6b0b5903 | [
"MIT"
] | 1 | 2019-09-27T23:12:07.000Z | 2019-09-27T23:12:07.000Z | conduit_rest/radish/conduit_rest_steps.py | dduleba/conduit-tests | 5f149c6c2bdb9f2d69a02c038248374f6b0b5903 | [
"MIT"
] | null | null | null | conduit_rest/radish/conduit_rest_steps.py | dduleba/conduit-tests | 5f149c6c2bdb9f2d69a02c038248374f6b0b5903 | [
"MIT"
] | null | null | null | import time
from faker import Faker
from radish_ext.radish.step_config import StepConfig
from conduit.client import ConduitClient, ConduitConfig
| 31.227273 | 114 | 0.61936 |
c7b88fe5b2537ef40175e1a577b998fdb2d3a5c9 | 1,233 | py | Python | SummaryExternalClient.py | Hackillinois2k18/Main-Repo | e998cc3283e0469b98a842220a30a72c5b105dad | [
"MIT"
] | 5 | 2020-03-10T03:23:18.000Z | 2021-11-12T17:06:51.000Z | SummaryExternalClient.py | Hackillinois2k18/FyveBot | e998cc3283e0469b98a842220a30a72c5b105dad | [
"MIT"
] | 3 | 2018-02-24T05:25:28.000Z | 2018-02-24T05:43:49.000Z | SummaryExternalClient.py | Hackillinois2k18/Main-Repo | e998cc3283e0469b98a842220a30a72c5b105dad | [
"MIT"
] | 3 | 2019-01-20T14:50:11.000Z | 2021-11-12T17:06:55.000Z | import requests
import credentials
| 35.228571 | 82 | 0.586375 |
c7b8b9fdf2de5fb240b87971d0e7f35941af2c81 | 1,485 | py | Python | tests/test_render.py | isuruf/conda-build | 9f163925f5d03a46e921162892bf4c6bc86b1072 | [
"BSD-3-Clause"
] | null | null | null | tests/test_render.py | isuruf/conda-build | 9f163925f5d03a46e921162892bf4c6bc86b1072 | [
"BSD-3-Clause"
] | 1 | 2019-10-08T15:03:56.000Z | 2019-10-08T15:03:56.000Z | tests/test_render.py | awwad/conda-build | b0be80283ec2e3ef7e49b5da923b1438e74e27b5 | [
"BSD-3-Clause"
] | null | null | null | import os
import sys
from conda_build import api
from conda_build import render
import pytest
| 33 | 65 | 0.690909 |
c7b8e20d5ed5e23189a112d56d8a749537d1ecec | 173 | py | Python | ABC/007/b.py | fumiyanll23/AtCoder | 362ca9fcacb5415c1458bc8dee5326ba2cc70b65 | [
"MIT"
] | null | null | null | ABC/007/b.py | fumiyanll23/AtCoder | 362ca9fcacb5415c1458bc8dee5326ba2cc70b65 | [
"MIT"
] | null | null | null | ABC/007/b.py | fumiyanll23/AtCoder | 362ca9fcacb5415c1458bc8dee5326ba2cc70b65 | [
"MIT"
] | null | null | null |
if __name__ == '__main__':
main()
| 10.8125 | 26 | 0.421965 |
c7b94b2b66d38c20024028b233b4eaed057202d2 | 5,057 | py | Python | SPAE/read_write.py | simon-schuler/SPAE | 2b970e30838da258b969b316488e7963d66119be | [
"MIT"
] | null | null | null | SPAE/read_write.py | simon-schuler/SPAE | 2b970e30838da258b969b316488e7963d66119be | [
"MIT"
] | 1 | 2021-04-12T20:28:55.000Z | 2021-04-12T20:28:55.000Z | SPAE/read_write.py | simon-schuler/SPAE | 2b970e30838da258b969b316488e7963d66119be | [
"MIT"
] | null | null | null | #Writing MOOG parameter file for the parameter, abundance, and error calculations.
#The parameter file only needs to be written once, at beginning of the routine, because the output
#files are overwritten with each itereation of the routine, only minimal output data are needed.
#
#The user can choose to have the parameter file written to screen by choosing verbose=True
#The user can choose to have more detailed MOOG output by chooseing the appropriate values for the
#MOOG input parameters.
import numpy as np
#Function for creating the solar and stellar linelists
#Reads Moog output files, parsing elements and colums
| 41.45082 | 168 | 0.489816 |
c7ba2b5a0bc557fae2df973eed4ab42b40580f6e | 1,862 | py | Python | lectures/optimization/optimization_plots.py | carolinalvarez/ose-course-scientific-computing | 4b816fa81320c88fc5f35b203f0541e0a1a00939 | [
"MIT"
] | null | null | null | lectures/optimization/optimization_plots.py | carolinalvarez/ose-course-scientific-computing | 4b816fa81320c88fc5f35b203f0541e0a1a00939 | [
"MIT"
] | null | null | null | lectures/optimization/optimization_plots.py | carolinalvarez/ose-course-scientific-computing | 4b816fa81320c88fc5f35b203f0541e0a1a00939 | [
"MIT"
] | null | null | null | """Plots for optimization lecture."""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import cm
def plot_contour(f, allvecs, legend_path):
"""Plot contour graph for function f."""
# Create array from values with at least two dimensions.
allvecs = np.atleast_2d(allvecs)
X, Y, Z = _get_grid(f)
CS = plt.contour(X, Y, Z)
plt.clabel(CS, inline=1, fontsize=10)
plt.title("objective function")
plt.xlabel("variable $x_1$")
plt.ylabel("variable $x_2$")
plt.rc("text", usetex=False)
plt.rc("font", family="serif")
plt.plot(1, 1, "r*", markersize=10, label="minimum")
plt.plot(4.5, -1.5, "bx", markersize=10, label="initial guess")
plt.plot(
np.array(allvecs)[:, 0], np.array(allvecs)[:, 1], "go", markersize=4, label=legend_path,
)
plt.legend()
return plt
def _get_grid(f):
"""Create a grid for function f."""
# create data to visualize objective function
n = 50 # number of discretization points along the x-axis
m = 50 # number of discretization points along the x-axis
a = -2.0
b = 5.0 # extreme points in the x-axis
c = -2
d = 5.0 # extreme points in the y-axis
X, Y = np.meshgrid(np.linspace(a, b, n), np.linspace(c, d, m))
Z = np.zeros(X.shape)
argument = np.zeros(2)
for i in range(X.shape[0]):
for j in range(X.shape[1]):
argument[0] = X[i, j]
argument[1] = Y[i, j]
Z[i][j] = f(argument)
return X, Y, Z
def plot_surf(f):
"""Plot surface graph of function f."""
X, Y, Z = _get_grid(f)
fig = plt.figure()
ax = fig.gca(projection="3d")
# Plot the surface.
surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm)
plt.xlabel("variable $x_1$")
plt.ylabel("variable $x_2$")
fig.colorbar(surf)
plt.title("objective function")
| 27.791045 | 96 | 0.605263 |
c7ba60efd06c8906b83387592b8347e6da526db9 | 7,141 | py | Python | gdsfactory/functions.py | simbilod/gdsfactory | 4d76db32674c3edb4d16260e3177ee29ef9ce11d | [
"MIT"
] | null | null | null | gdsfactory/functions.py | simbilod/gdsfactory | 4d76db32674c3edb4d16260e3177ee29ef9ce11d | [
"MIT"
] | null | null | null | gdsfactory/functions.py | simbilod/gdsfactory | 4d76db32674c3edb4d16260e3177ee29ef9ce11d | [
"MIT"
] | null | null | null | """All functions return a Component so you can easily pipe or compose them.
There are two types of functions:
- decorators: return the original component
- containers: return a new component
"""
from functools import lru_cache, partial
import numpy as np
from omegaconf import OmegaConf
from pydantic import validate_arguments
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.text_rectangular import text_rectangular_multi_layer
from gdsfactory.port import auto_rename_ports
from gdsfactory.types import (
Anchor,
Axis,
ComponentSpec,
Float2,
Layer,
List,
Optional,
Strs,
)
cache = lru_cache(maxsize=None)
def add_port(component: Component, **kwargs) -> Component:
"""Return Component with a new port."""
component.add_port(**kwargs)
return component
def add_texts(
components: List[ComponentSpec],
prefix: str = "",
index0: int = 0,
**kwargs,
) -> List[Component]:
"""Return a list of Component with text labels.
Args:
components: list of component specs.
prefix: Optional prefix for the labels.
index0: defaults to 0 (0, for first component, 1 for second ...).
keyword Args:
text_offset: relative to component size info anchor. Defaults to center.
text_anchor: relative to component (ce cw nc ne nw sc se sw center cc).
text_factory: function to add text labels.
"""
return [
add_text(component, text=f"{prefix}{i+index0}", **kwargs)
for i, component in enumerate(components)
]
rotate90 = partial(rotate, angle=90)
rotate90n = partial(rotate, angle=-90)
rotate180 = partial(rotate, angle=180)
def move_port_to_zero(component: Component, port_name: str = "o1"):
"""Return a container that contains a reference to the original component.
The new component has port_name in (0, 0).
"""
if port_name not in component.ports:
raise ValueError(
f"port_name = {port_name!r} not in {list(component.ports.keys())}"
)
return move(component, -component.ports[port_name].midpoint)
def update_info(component: Component, **kwargs) -> Component:
"""Return Component with updated info."""
component.info.update(**kwargs)
return component
__all__ = (
"add_port",
"add_text",
"add_settings_label",
"auto_rename_ports",
"cache",
"mirror",
"move",
"move_port_to_zero",
"rotate",
"update_info",
)
if __name__ == "__main__":
import gdsfactory as gf
c = gf.components.mmi1x2(
length_mmi=10,
decorator=partial(add_settings_label, settings=["name", "length_mmi"]),
)
# c.show()
cr = rotate(component=c)
cr.show()
# cr = c.rotate()
# cr.pprint()
# cr.show()
# cm = move(c, destination=(20, 20))
# cm.show()
# cm = mirror(c)
# cm.show()
# cm = c.mirror()
# cm.show()
# cm2 = move_port_to_zero(cm)
# cm2.show()
# cm3 = add_text(c, "hi")
# cm3.show()
# cr = rotate(component=c)
# cr.show()
# print(component_rotated)
# component_rotated.pprint
# component_netlist = component.get_netlist()
# component.pprint_netlist()
| 25.967273 | 87 | 0.669654 |
c7ba7f82e01986b93c50e54b040c99061ee59d08 | 26,640 | py | Python | OverlayUFOs/Overlay UFOs.roboFontExt/lib/OverlayUFOs.py | connordavenport/fbOpenTools | 794c71d504cea1248c256bea11d5249b0a4144a1 | [
"Unlicense"
] | null | null | null | OverlayUFOs/Overlay UFOs.roboFontExt/lib/OverlayUFOs.py | connordavenport/fbOpenTools | 794c71d504cea1248c256bea11d5249b0a4144a1 | [
"Unlicense"
] | null | null | null | OverlayUFOs/Overlay UFOs.roboFontExt/lib/OverlayUFOs.py | connordavenport/fbOpenTools | 794c71d504cea1248c256bea11d5249b0a4144a1 | [
"Unlicense"
] | null | null | null | #coding=utf-8
from __future__ import division
"""
# OVERLAY UFOS
For anyone looking in here, sorry the code is so messy. This is a standalone version of a script with a lot of dependencies.
"""
import os
from AppKit import * #@PydevCodeAnalysisIgnore
from vanilla import * #@PydevCodeAnalysisIgnore
from mojo.drawingTools import *
from mojo.events import addObserver, removeObserver
from mojo.extensions import getExtensionDefault, setExtensionDefault, getExtensionDefaultColor, setExtensionDefaultColor
from mojo.UI import UpdateCurrentGlyphView
from fontTools.pens.transformPen import TransformPen
from defconAppKit.windows.baseWindow import BaseWindowController
import unicodedata
#from lib.tools.defaults import getDefaultColor
from lib.tools.drawing import strokePixelPath
from lib.UI.spaceCenter.glyphSequenceEditText import splitText
from builtins import chr
selectedSymbol = u''
if __name__ == "__main__":
OverlayUFOs() | 39.118943 | 182 | 0.575526 |
c7ba815c300287faa117210ec887325390625523 | 114 | py | Python | nautapy/__init__.py | armandofcom/nautapy | 6907e350021752b54998f6b0b5674dccc8ca9ddd | [
"MIT"
] | 25 | 2020-03-20T05:02:09.000Z | 2022-03-29T13:24:36.000Z | nautapy/__init__.py | armandofcom/nautapy | 6907e350021752b54998f6b0b5674dccc8ca9ddd | [
"MIT"
] | 7 | 2020-01-22T23:10:25.000Z | 2021-06-02T21:41:27.000Z | nautapy/__init__.py | armandofcom/nautapy | 6907e350021752b54998f6b0b5674dccc8ca9ddd | [
"MIT"
] | 14 | 2020-03-20T05:02:18.000Z | 2022-03-29T13:24:39.000Z | import os
appdata_path = os.path.expanduser("~/.local/share/nautapy")
os.makedirs(appdata_path, exist_ok=True)
| 16.285714 | 59 | 0.763158 |
c7bb3480194f9fe2fbc061710221cb965aa24166 | 9,368 | py | Python | pyteamup/Calendar.py | LogicallyUnfit/pyTeamUp | a398fe6808d506ca4e05090b58e0a697aa1f46e5 | [
"MIT"
] | 5 | 2019-04-11T14:52:19.000Z | 2022-03-13T10:39:22.000Z | pyteamup/Calendar.py | LogicallyUnfit/pyTeamUp | a398fe6808d506ca4e05090b58e0a697aa1f46e5 | [
"MIT"
] | 9 | 2019-04-11T14:49:59.000Z | 2021-11-30T08:34:31.000Z | pyteamup/Calendar.py | LogicallyUnfit/pyTeamUp | a398fe6808d506ca4e05090b58e0a697aa1f46e5 | [
"MIT"
] | 3 | 2019-04-11T14:17:00.000Z | 2021-07-15T06:59:13.000Z | import requests
import json
import datetime
import sys
from dateutil.parser import parse as to_datetime
try:
import pandas as pd
except:
pass
from pyteamup.utils.utilities import *
from pyteamup.utils.constants import *
from pyteamup.Event import Event
def get_event_collection(self, start_dt=None, end_dt=None, subcal_id=None, returnas='events', markdown=False):
"""
Method allows bulk fetching of events that fall between the provided time frame. If None is provided then
the current date -30 and +180 days is used.
:param start_dt: if set as None then set as today minus 30 days
:param end_dt: if left as None then set as today plus 180 days
:param subcal_id: optional str or list-like if a different calendar should be queried
:return: json of events
"""
if returnas not in ('events', 'dataframe', 'dict'):
raise TypeError('Returnas not recognized. Recognized values: event, series, dict')
if start_dt is None:
start_dt = datetime.date.today() - datetime.timedelta(30)
if end_dt is None:
end_dt = datetime.date.today() + datetime.timedelta(180)
subcal_par = ''
if subcal_id:
if isinstance(subcal_id, (list, tuple)):
for id in subcal_id:
subcal_par += f'&subcalendarId[]={id}'
else:
subcal_par = f'&subcalendarId[]={subcal_id}'
if markdown == True:
para_markdown = '&format[]=markdown'
else:
para_markdown = ''
parameters = f'&startDate={start_dt.strftime("%Y-%m-%d")}&endDate={end_dt.strftime("%Y-%m-%d")}' + subcal_par + para_markdown
req = requests.get(self._event_collection_url + parameters)
check_status_code(req.status_code)
self.events_json = json.loads(req.text)['events']
if returnas == 'events':
return [Event(self, **event_dict) for event_dict in self.events_json]
elif returnas == 'dataframe' and 'pandas' in sys.modules:
return pd.DataFrame.from_records(self.events_json)
else:
return self.events_json
def _create_event_from_json(self, payload):
""" Lazy Creation of Event by passing a formatted payload"""
resp = requests.post(self._event_collection_url, data=payload, headers=POST_HEADERS)
try:
check_status_code(resp.status_code)
except:
print(payload)
print(resp.text)
raise
return resp.text
def get_changed_events(self, modified_since, returnas='event'):
"""
Get changed events since given unix time
:param modified_since: <int> Unix timestamp, must be less than 30 days old
:param returnas: <str> `event` `series` `dict` are valid options
:return: Tuple of event list and returned timestamp
"""
if returnas not in ('event', 'series', 'dict'):
raise TypeError('Returnas not recognized. Recognized values: event, series, dict')
url = self._base_url + EVENTS_BASE + self.__token_str + '&modifiedSince=' + str(modified_since)
resp = requests.get(url)
check_status_code(resp.status_code)
events_json = json.loads(resp.text)['events']
timestamp = json.loads(resp.text)['timestamp']
if returnas == 'events':
return [Event(self, **event_dict) for event_dict in events_json], timestamp
elif returnas == 'dataframe' and 'pandas' in sys.modules:
return pd.DataFrame.from_records(events_json), timestamp
else:
return events_json, timestamp
def new_event(self, title, start_dt, end_dt, subcalendar_ids, all_day=False,
notes=None, location=None, who=None, remote_id=None, returnas='event'):
"""
Create a new event within a provided subcalendar. Can return as Event object, Series object, or Dictionary.
Undo_id not included with return unless returnas='event' in which case it is included with the returned Event Object
:param subcalendar_id: <str, int, or list-like> Required - the ID of the subcalendar within the calendar the event should be created in.
:param title: <str> Title of the event, must be
:param start_dt: <datetime> Start Datetime
:param end_dt: <datetime> End Datetime
:param all_day: <Bool> Allday or Not
:param notes: <str> HTML or Markdown formatted string detailing the Description
:param location: <str> Location of the event
:param who: <str>
:param remote_id: <str> Remote ID of the event, used to link the TeamUp event record to its source information
:param returnas: <str> `event` `series` `dict` are valid options
:return:
"""
if returnas not in ('event','dict','series'):
raise ValueError(f'Unrecognized returnas paramter: {returnas}')
if not isinstance(start_dt, datetime.datetime) or not isinstance(end_dt, datetime.datetime):
try:
start_dt = to_datetime(start_dt)
end_dt = to_datetime(end_dt)
except:
raise ValueError('Parse failed, please pass all dates as a datetime object')
if isinstance(subcalendar_ids, (str, int)):
subcalendar_ids = [subcalendar_ids]
if not isinstance(subcalendar_ids, (tuple, list)):
raise ValueError(f'Unrecognized Type: Subcalendar_ids type: {type(subcalendar_ids)}')
dict = {'remote_id': remote_id,
'title': title,
'subcalendar_ids': subcalendar_ids,
'start_dt': format_date(start_dt),
'end_dt': format_date(end_dt),
'all_day': all_day,
'notes': notes,
'location': location,
'who': who
}
resp_text = self._create_event_from_json(json.dumps(dict))
resp_dict = json.loads(resp_text)
event_dict = resp_dict['event']
undo_id = resp_dict['undo_id']
if returnas == 'event':
return Event(self, undo_id = undo_id, **event_dict)
elif returnas == 'series' and 'pandas' in sys.modules:
return pd.Series(event_dict)
else:
return event_dict
| 39.694915 | 144 | 0.627242 |
c7bd4060064aa4ccc776c07aa7678497ec65e795 | 8,232 | py | Python | configs/regnet.py | roatienza/agmax | 2a7299cc506605aeaaf64b6155b5c826c71d5786 | [
"Apache-2.0"
] | 2 | 2021-11-05T13:09:12.000Z | 2022-03-04T05:07:33.000Z | configs/regnet.py | roatienza/agmax | 2a7299cc506605aeaaf64b6155b5c826c71d5786 | [
"Apache-2.0"
] | 1 | 2021-11-04T10:06:57.000Z | 2021-11-07T08:35:39.000Z | configs/regnet.py | roatienza/agmax | 2a7299cc506605aeaaf64b6155b5c826c71d5786 | [
"Apache-2.0"
] | null | null | null |
from . import constant
parameters = {
'RegNet' : { "lr": 0.1, "epochs": 100, "weight_decay": 5e-5, "batch_size": 128, "nesterov": True, "init_backbone":True, "init_extractor":True,},
}
backbone_config = {
"RegNetX002" : {"channels": 3, "dropout": 0.2,},
"RegNetY004" : {"channels": 3, "dropout": 0.2,},
}
train = {
# RegNetX002
'RegNetX002-standard': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetX002-cutmix': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetX002-standard-agmax': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetX002-auto_augment-cutmix-agmax': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetX002-auto_augment-mixup-agmax': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
'RegNetX002-auto_augment-cutmix': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetX002-auto_augment-mixup': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
# RegNetY004
'RegNetY004-standard': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetY004-cutmix': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetY004-standard-agmax': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetY004-auto_augment-cutmix-agmax': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetY004-auto_augment-mixup-agmax': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
'RegNetY004-auto_augment-cutmix': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetY004-auto_augment-mixup': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
}
| 68.6 | 153 | 0.409621 |
c7bde259829ba295ad5078b7f30b72f3fddb4e13 | 1,608 | py | Python | examples/ws2812/main.py | ivankravets/pumbaa | 2a1869cc204e3128516ed6fa9f89529aedec1702 | [
"MIT"
] | 69 | 2016-09-04T18:36:18.000Z | 2021-07-04T21:51:54.000Z | examples/ws2812/main.py | ivankravets/pumbaa | 2a1869cc204e3128516ed6fa9f89529aedec1702 | [
"MIT"
] | 42 | 2016-09-02T20:10:19.000Z | 2020-07-01T05:54:01.000Z | examples/ws2812/main.py | ivankravets/pumbaa | 2a1869cc204e3128516ed6fa9f89529aedec1702 | [
"MIT"
] | 11 | 2016-09-29T14:33:23.000Z | 2021-02-28T19:30:49.000Z | #
# @section License
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2017, Erik Moqvist
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# This file is part of the Pumbaa project.
#
import board
from drivers import Ws2812
import time
PIXEL_MAX = 81
RED = PIXEL_MAX * b'\x00\xff\x00'
GREEN = PIXEL_MAX * b'\xff\x00\x00'
BLUE = PIXEL_MAX * b'\x00\x00\xff'
WS2812 = Ws2812(board.PIN_GPIO18)
while True:
print('Red.')
WS2812.write(RED)
time.sleep(0.5)
print('Green.')
WS2812.write(GREEN)
time.sleep(0.5)
print('Blue.')
WS2812.write(BLUE)
time.sleep(0.5)
| 29.236364 | 69 | 0.735075 |
c7be4754a949474c9764e2ad170025656a516b5f | 740 | py | Python | reports/urls.py | aysiu/manana | 8af8b57c72f6154affdb5f3a9a3469a49e5818fe | [
"Apache-2.0"
] | 9 | 2016-02-16T23:53:40.000Z | 2020-07-13T16:04:18.000Z | reports/urls.py | aysiu/manana | 8af8b57c72f6154affdb5f3a9a3469a49e5818fe | [
"Apache-2.0"
] | null | null | null | reports/urls.py | aysiu/manana | 8af8b57c72f6154affdb5f3a9a3469a49e5818fe | [
"Apache-2.0"
] | 4 | 2016-02-16T23:56:13.000Z | 2019-05-20T15:12:14.000Z | from django.conf.urls import patterns, include, url
urlpatterns = patterns('reports.views',
url(r'^index/*$', 'index'),
url(r'^dashboard/*$', 'dashboard'),
url(r'^$', 'index'),
url(r'^detail/(?P<serial>[^/]+)$', 'detail'),
url(r'^detailpkg/(?P<serial>[^/]+)/(?P<manifest_name>[^/]+)$', 'detail_pkg'),
url(r'^detailmachine/(?P<serial>[^/]+)$', 'machine_detail'),
url(r'^appleupdate/(?P<serial>[^/]+)$', 'appleupdate'),
url(r'^raw/(?P<serial>[^/]+)$', 'raw'),
url(r'^submit/(?P<submission_type>[^/]+)$', 'submit'),
url(r'^warranty/(?P<serial>[^/]+)$', 'warranty'),
# for compatibilty with MunkiReport scripts
url(r'^ip$', 'lookup_ip'),
url(r'^(?P<submission_type>[^/]+)$', 'submit'),
) | 41.111111 | 81 | 0.554054 |
c7be660a1e99ce3791843752d3993ac9fa123bdb | 5,812 | py | Python | BackEnd/venv/lib/python3.8/site-packages/pytest_flask/fixtures.py | MatheusBrodt/App_LabCarolVS | 9552149ceaa9bee15ef9a45fab2983c6651031c4 | [
"MIT"
] | null | null | null | BackEnd/venv/lib/python3.8/site-packages/pytest_flask/fixtures.py | MatheusBrodt/App_LabCarolVS | 9552149ceaa9bee15ef9a45fab2983c6651031c4 | [
"MIT"
] | 1 | 2019-08-20T18:42:14.000Z | 2019-08-20T18:42:14.000Z | BackEnd/venv/lib/python3.8/site-packages/pytest_flask/fixtures.py | MatheusBrodt/App_LabCarolVS | 9552149ceaa9bee15ef9a45fab2983c6651031c4 | [
"MIT"
] | 1 | 2019-08-20T18:11:48.000Z | 2019-08-20T18:11:48.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import multiprocessing
import pytest
import socket
import signal
import os
import logging
try:
from urllib2 import URLError, urlopen
except ImportError:
from urllib.error import URLError
from urllib.request import urlopen
from flask import _request_ctx_stack
class LiveServer(object):
"""The helper class uses to manage live server. Handles creation and
stopping application in a separate process.
:param app: The application to run.
:param host: The host where to listen (default localhost).
:param port: The port to run application.
"""
def start(self):
"""Start application in a separate process."""
self._process = multiprocessing.Process(
target=worker,
args=(self.app, self.host, self.port)
)
self._process.start()
# We must wait for the server to start listening with a maximum
# timeout of 5 seconds.
timeout = 5
while timeout > 0:
time.sleep(1)
try:
urlopen(self.url())
timeout = 0
except URLError:
timeout -= 1
def url(self, url=''):
"""Returns the complete url based on server options."""
return 'http://%s:%d%s' % (self.host, self.port, url)
def stop(self):
"""Stop application process."""
if self._process:
if self.clean_stop and self._stop_cleanly():
return
if self._process.is_alive():
# If it's still alive, kill it
self._process.terminate()
def _stop_cleanly(self, timeout=5):
"""Attempts to stop the server cleanly by sending a SIGINT signal and waiting for
``timeout`` seconds.
:return: True if the server was cleanly stopped, False otherwise.
"""
try:
os.kill(self._process.pid, signal.SIGINT)
self._process.join(timeout)
return True
except Exception as ex:
logging.error('Failed to join the live server process: %r', ex)
return False
def _rewrite_server_name(server_name, new_port):
"""Rewrite server port in ``server_name`` with ``new_port`` value."""
sep = ':'
if sep in server_name:
server_name, port = server_name.split(sep, 1)
return sep.join((server_name, new_port))
def _make_accept_header(mimetype):
return [('Accept', mimetype)]
| 28.213592 | 89 | 0.635754 |
c7be8fc77e58c39c645eb0be54b3d89d725dc934 | 7,700 | py | Python | tableauserverclient/server/endpoint/endpoint.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
] | 1 | 2021-12-22T21:34:17.000Z | 2021-12-22T21:34:17.000Z | tableauserverclient/server/endpoint/endpoint.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
] | null | null | null | tableauserverclient/server/endpoint/endpoint.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
] | null | null | null | from .exceptions import (
ServerResponseError,
InternalServerError,
NonXMLResponseError,
EndpointUnavailableError,
)
from functools import wraps
from xml.etree.ElementTree import ParseError
from ..query import QuerySet
import logging
try:
from distutils2.version import NormalizedVersion as Version
except ImportError:
from distutils.version import LooseVersion as Version
logger = logging.getLogger("tableau.endpoint")
Success_codes = [200, 201, 202, 204]
def api(version):
"""Annotate the minimum supported version for an endpoint.
Checks the version on the server object and compares normalized versions.
It will raise an exception if the server version is > the version specified.
Args:
`version` minimum version that supports the endpoint. String.
Raises:
EndpointUnavailableError
Returns:
None
Example:
>>> @api(version="2.3")
>>> def get(self, req_options=None):
>>> ...
"""
return _decorator
def parameter_added_in(**params):
"""Annotate minimum versions for new parameters or request options on an endpoint.
The api decorator documents when an endpoint was added, this decorator annotates
keyword arguments on endpoints that may control functionality added after an endpoint was introduced.
The REST API will ignore invalid parameters in most cases, so this raises a warning instead of throwing
an exception.
Args:
Key/value pairs of the form `parameter`=`version`. Kwargs.
Raises:
UserWarning
Returns:
None
Example:
>>> @api(version="2.0")
>>> @parameter_added_in(no_extract='2.5')
>>> def download(self, workbook_id, filepath=None, extract_only=False):
>>> ...
"""
return _decorator
class QuerysetEndpoint(Endpoint):
| 33.189655 | 118 | 0.632597 |
c7c0ec1f2d22d969372f765fb0d7aef4a98be04f | 4,617 | py | Python | spec/test_importer.py | lajohnston/anki-freeplane | 746e3dd714653df428f0541609b9c51e29cd2726 | [
"MIT"
] | 15 | 2016-10-06T00:27:26.000Z | 2022-03-04T04:24:50.000Z | spec/test_importer.py | eljay26/anki-freeplane | 746e3dd714653df428f0541609b9c51e29cd2726 | [
"MIT"
] | null | null | null | spec/test_importer.py | eljay26/anki-freeplane | 746e3dd714653df428f0541609b9c51e29cd2726 | [
"MIT"
] | 6 | 2016-11-08T06:55:47.000Z | 2021-03-24T22:15:14.000Z | import unittest
from freeplane_importer.importer import Importer
from mock import Mock
from mock import MagicMock
from mock import call
from freeplane_importer.model_not_found_exception import ModelNotFoundException
| 38.157025 | 90 | 0.719731 |
c7c11d6e36451e4175726cdb9543215d1fb0fff9 | 1,089 | py | Python | analysis/fitexp.py | mfkasim91/idcovid19 | 3e51b16354581a4e0defc635f837f93faff26afc | [
"BSD-3-Clause"
] | null | null | null | analysis/fitexp.py | mfkasim91/idcovid19 | 3e51b16354581a4e0defc635f837f93faff26afc | [
"BSD-3-Clause"
] | null | null | null | analysis/fitexp.py | mfkasim91/idcovid19 | 3e51b16354581a4e0defc635f837f93faff26afc | [
"BSD-3-Clause"
] | null | null | null | import argparse
import numpy as np
from scipy.stats import linregress
import matplotlib.pyplot as plt
parser = argparse.ArgumentParser()
parser.add_argument("--plot", action="store_const", default=False, const=True)
args = parser.parse_args()
data = np.loadtxt("../data/data.csv", skiprows=1, usecols=list(range(1,8)), delimiter=",")[33:,:]
xdays = data[:,0] - np.mean(data[:,0])
deaths = data[:,-1]
print(xdays, deaths)
logdeaths = np.log(deaths)
slope, offset, rval, pval, stderr = linregress(xdays, logdeaths)
stderr = np.sqrt(np.sum((logdeaths-(slope*logdeaths+offset))**2) / (len(logdeaths)-2.)) / np.sqrt(np.sum((xdays - np.mean(xdays))**2))
if args.plot:
plt.plot(xdays, np.exp(offset + slope*xdays), 'C0-')
plt.plot(xdays, np.exp(offset + (slope+stderr)*xdays), 'C0--')
plt.plot(xdays, np.exp(offset + (slope-stderr)*xdays), 'C0--')
plt.plot(xdays, deaths, 'C0o')
plt.gca().set_yscale("log")
plt.show()
print("Slope: %.3e" % slope)
print("Doubling every: %.2f" % (np.log(2)/slope))
print("R-squared: %.3f" % (rval*rval))
print("Stderr: %.3e" % stderr)
| 35.129032 | 134 | 0.665748 |
c7c22a9174889ccacec698f1b477ffd20a7822b0 | 1,716 | py | Python | .venv/lib/python3.7/site-packages/jedi/inference/lazy_value.py | ITCRStevenLPZ/Proyecto2-Analisis-de-Algoritmos | 4acdbc423428fb2e0068720add69e7870c87929a | [
"Apache-2.0"
] | 76 | 2020-07-06T14:44:05.000Z | 2022-02-14T15:30:21.000Z | .venv/lib/python3.7/site-packages/jedi/inference/lazy_value.py | ITCRStevenLPZ/Proyecto2-Analisis-de-Algoritmos | 4acdbc423428fb2e0068720add69e7870c87929a | [
"Apache-2.0"
] | 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | .venv/lib/python3.7/site-packages/jedi/inference/lazy_value.py | ITCRStevenLPZ/Proyecto2-Analisis-de-Algoritmos | 4acdbc423428fb2e0068720add69e7870c87929a | [
"Apache-2.0"
] | 11 | 2020-07-12T16:18:07.000Z | 2022-02-05T16:48:35.000Z | from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.common import monkeypatch
def get_merged_lazy_value(lazy_values):
if len(lazy_values) > 1:
return MergedLazyValues(lazy_values)
else:
return lazy_values[0]
| 27.677419 | 83 | 0.674825 |
c7c399f4aa408e4541e327b125cd44ba175da7ef | 1,901 | py | Python | percept/plot.py | joshleeb/PerceptronVis | 2d0e2f1969e11498533f190f5598c174b7584513 | [
"MIT"
] | null | null | null | percept/plot.py | joshleeb/PerceptronVis | 2d0e2f1969e11498533f190f5598c174b7584513 | [
"MIT"
] | null | null | null | percept/plot.py | joshleeb/PerceptronVis | 2d0e2f1969e11498533f190f5598c174b7584513 | [
"MIT"
] | null | null | null | import matplotlib.lines as lines
import matplotlib.pyplot as plt
COLOR_CLASSIFICATIONS = [
'black', # Unclassified
'blue', # Classified True (1)
'red' # Classified False (0)
]
def generate_line(ax, p0, p1, color='black', style='-'):
'''
Generates a line between points p0 and p1 which extends to be the width of
the plot.
'''
x0, y0 = p0
x1, y1 = p1
gradient = (y0 - y1) / (x0 - x1)
intercept = y1 - gradient * x1
x = ax.get_xlim()
data_y = [x[0] * gradient + intercept, x[1] * gradient + intercept]
return lines.Line2D(x, data_y, color=color, linestyle=style)
def get_boundary_plot_fn(weights):
'''
Gets the function used to represent and plot the line representative by the
perceptron's weights. The equation is: f(x) = -(w1/w2)x - w0/w2.
'''
return fn
def get_point_color(point, colors):
'''
Get's the color of the point to be displayed.
'''
if point.classification is None:
return colors[0]
return colors[1] if point.classification else colors[2]
def generate(title, class_boundary, weights, points, bounds):
'''
Generates a scatter plot of points with the actualy classification boundary
and the perceptron's classification boundary drawn in.
'''
boundary_fn = get_boundary_plot_fn(weights)
fig, ax = plt.subplots(figsize=(8, 8))
ax.set_xlim(bounds[0])
ax.set_ylim(bounds[1])
ax.set_title(title)
ax.add_line(generate_line(
ax, class_boundary[0], class_boundary[1], 'cyan', '--'
))
ax.add_line(generate_line(ax, (0, boundary_fn(0)), (1, boundary_fn(1))))
ax.scatter(
[pt.x for pt in points], [pt.y for pt in points],
c=[get_point_color(pt, COLOR_CLASSIFICATIONS) for pt in points], s=30
)
return fig
| 29.246154 | 79 | 0.637559 |
c7c444c1fb4481f333fa9c3252930b474ff296c2 | 27,392 | py | Python | openpype/hosts/flame/api/lib.py | j-cube/OpenPype | f0849cbd08070a320d19bb55b7e368189a57e3ab | [
"MIT"
] | 1 | 2022-02-08T15:40:41.000Z | 2022-02-08T15:40:41.000Z | openpype/hosts/flame/api/lib.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
] | 2 | 2022-03-18T01:46:03.000Z | 2022-03-18T01:46:16.000Z | openpype/hosts/flame/api/lib.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
] | null | null | null | import sys
import os
import re
import json
import pickle
import tempfile
import itertools
import contextlib
import xml.etree.cElementTree as cET
from copy import deepcopy
from xml.etree import ElementTree as ET
from pprint import pformat
from .constants import (
MARKER_COLOR,
MARKER_DURATION,
MARKER_NAME,
COLOR_MAP,
MARKER_PUBLISH_DEFAULT
)
import openpype.api as openpype
log = openpype.Logger.get_logger(__name__)
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
def get_current_project():
import flame
return flame.project.current_project
def get_current_sequence(selection):
import flame
process_timeline = None
if len(selection) == 1:
if isinstance(selection[0], flame.PySequence):
process_timeline = selection[0]
if isinstance(selection[0], flame.PySegment):
process_timeline = segment_to_sequence(selection[0])
else:
for segment in selection:
if isinstance(segment, flame.PySegment):
process_timeline = segment_to_sequence(segment)
break
return process_timeline
def rescan_hooks():
import flame
try:
flame.execute_shortcut('Rescan Python Hooks')
except Exception:
pass
def get_metadata(project_name, _log=None):
# TODO: can be replaced by MediaInfoFile class method
from adsk.libwiretapPythonClientAPI import (
WireTapClient,
WireTapServerHandle,
WireTapNodeHandle,
WireTapStr
)
policy_wiretap = GetProjectColorPolicy(_log=_log)
return policy_wiretap.process(project_name)
def get_segment_data_marker(segment, with_marker=None):
"""
Get openpype track item tag created by creator or loader plugin.
Attributes:
segment (flame.PySegment): flame api object
with_marker (bool)[optional]: if true it will return also marker object
Returns:
dict: openpype tag data
Returns(with_marker=True):
flame.PyMarker, dict
"""
for marker in segment.markers:
comment = marker.comment.get_value()
color = marker.colour.get_value()
name = marker.name.get_value()
if (name == MARKER_NAME) and (
color == COLOR_MAP[MARKER_COLOR]):
if not with_marker:
return json.loads(comment)
else:
return marker, json.loads(comment)
def set_segment_data_marker(segment, data=None):
"""
Set openpype track item tag to input segment.
Attributes:
segment (flame.PySegment): flame api object
Returns:
dict: json loaded data
"""
data = data or dict()
marker_data = get_segment_data_marker(segment, True)
if marker_data:
# get available openpype tag if any
marker, tag_data = marker_data
# update tag data with new data
tag_data.update(data)
# update marker with tag data
marker.comment = json.dumps(tag_data)
else:
# update tag data with new data
marker = create_segment_data_marker(segment)
# add tag data to marker's comment
marker.comment = json.dumps(data)
def set_publish_attribute(segment, value):
""" Set Publish attribute in input Tag object
Attribute:
segment (flame.PySegment)): flame api object
value (bool): True or False
"""
tag_data = get_segment_data_marker(segment)
tag_data["publish"] = value
# set data to the publish attribute
set_segment_data_marker(segment, tag_data)
def get_publish_attribute(segment):
""" Get Publish attribute from input Tag object
Attribute:
segment (flame.PySegment)): flame api object
Returns:
bool: True or False
"""
tag_data = get_segment_data_marker(segment)
if not tag_data:
set_publish_attribute(segment, MARKER_PUBLISH_DEFAULT)
return MARKER_PUBLISH_DEFAULT
return tag_data["publish"]
def create_segment_data_marker(segment):
""" Create openpype marker on a segment.
Attributes:
segment (flame.PySegment): flame api object
Returns:
flame.PyMarker: flame api object
"""
# get duration of segment
duration = segment.record_duration.relative_frame
# calculate start frame of the new marker
start_frame = int(segment.record_in.relative_frame) + int(duration / 2)
# create marker
marker = segment.create_marker(start_frame)
# set marker name
marker.name = MARKER_NAME
# set duration
marker.duration = MARKER_DURATION
# set colour
marker.colour = COLOR_MAP[MARKER_COLOR] # Red
return marker
def reset_segment_selection(sequence):
"""Deselect all selected nodes
"""
for ver in sequence.versions:
for track in ver.tracks:
if len(track.segments) == 0 and track.hidden:
continue
for segment in track.segments:
segment.selected = False
def get_reformated_filename(filename, padded=True):
"""
Return fixed python expression path
Args:
filename (str): file name
Returns:
type: string with reformated path
Example:
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
"""
found = FRAME_PATTERN.search(filename)
if not found:
log.info("File name is not sequence: {}".format(filename))
return filename
padding = get_padding_from_filename(filename)
replacement = "%0{}d".format(padding) if padded else "%d"
start_idx, end_idx = found.span(1)
return replacement.join(
[filename[:start_idx], filename[end_idx:]]
)
def get_padding_from_filename(filename):
"""
Return padding number from Flame path style
Args:
filename (str): file name
Returns:
int: padding number
Example:
get_padding_from_filename("plate.0001.exr") > 4
"""
found = get_frame_from_filename(filename)
return len(found) if found else None
def get_frame_from_filename(filename):
"""
Return sequence number from Flame path style
Args:
filename (str): file name
Returns:
int: sequence frame number
Example:
def get_frame_from_filename(path):
("plate.0001.exr") > 0001
"""
found = re.findall(FRAME_PATTERN, filename)
return found.pop() if found else None
def get_clip_segment(flame_clip):
name = flame_clip.name.get_value()
version = flame_clip.versions[0]
track = version.tracks[0]
segments = track.segments
if len(segments) < 1:
raise ValueError("Clip `{}` has no segments!".format(name))
if len(segments) > 1:
raise ValueError("Clip `{}` has too many segments!".format(name))
return segments[0]
| 29.109458 | 79 | 0.593531 |
c7c5220186916c25d94c94c265afef27d8cdfced | 1,287 | py | Python | newanalysis/plot_performances.py | nriesterer/cogsci-individualization | da97bf0a6b53f440670e22ff591348f3d3fab230 | [
"MIT"
] | null | null | null | newanalysis/plot_performances.py | nriesterer/cogsci-individualization | da97bf0a6b53f440670e22ff591348f3d3fab230 | [
"MIT"
] | null | null | null | newanalysis/plot_performances.py | nriesterer/cogsci-individualization | da97bf0a6b53f440670e22ff591348f3d3fab230 | [
"MIT"
] | null | null | null | import sys
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
if len(sys.argv) != 3:
print('usage: python plot_performances.py <group_csv> <indiv_csv>')
exit()
group_file = sys.argv[1]
indiv_file = sys.argv[2]
# Load the data
df_group = pd.read_csv(group_file)
df_indiv = pd.read_csv(indiv_file)
df = pd.concat([df_group, df_indiv], sort=True)
# Prepare the data for plotting
plot_df = df.groupby(['model', 'id'], as_index=False)['hit'].agg('mean')
mfa_df = plot_df.loc[plot_df['model'] == 'MFA']
mfa_median = mfa_df['hit'].median()
plot_df = plot_df.loc[plot_df['model'] != 'MFA']
# Plot the data
sns.set(style='whitegrid', palette='colorblind')
plt.figure(figsize=(7, 3))
order = plot_df.groupby('model', as_index=False)['hit'].agg('median').sort_values('hit')['model']
colors = [('C0' if 'mReasoner' in x else 'C2') for x in order]
sns.boxplot(x='model', y='hit', data=plot_df, order=order, palette=colors)
plt.axhline(y=mfa_median, ls='--', color='C7', zorder=10)
plt.text(0.002, mfa_median + 0.015, 'MFA', color='C7', fontsize=10, transform=plt.gca().transAxes)
plt.xlabel('')
plt.yticks(np.arange(0, 1.1, 0.1))
plt.ylabel('Coverage Accuracy')
plt.tight_layout()
plt.savefig('visualizations/performances.pdf')
plt.show()
| 28.6 | 98 | 0.700855 |
c7c52b0c2a58b302536c4281e3d875f7998a6140 | 611 | py | Python | src/helpers.py | demirdagemir/thesis | 4a48bddf815c91729e27484548bb7bbf7ddeda64 | [
"MIT"
] | null | null | null | src/helpers.py | demirdagemir/thesis | 4a48bddf815c91729e27484548bb7bbf7ddeda64 | [
"MIT"
] | null | null | null | src/helpers.py | demirdagemir/thesis | 4a48bddf815c91729e27484548bb7bbf7ddeda64 | [
"MIT"
] | null | null | null | from Aion.utils.data import getADBPath
import subprocess
| 33.944444 | 97 | 0.680851 |
c7c5b3d53e6ad031199ab57c86f15523078de6cc | 1,969 | py | Python | tests/test_show.py | domi007/pigskin | c379284ebbbdb3a9df42de70227041e3c137b6dc | [
"MIT"
] | 6 | 2018-08-15T13:29:22.000Z | 2020-09-12T14:39:20.000Z | tests/test_show.py | domi007/pigskin | c379284ebbbdb3a9df42de70227041e3c137b6dc | [
"MIT"
] | 26 | 2018-08-15T13:08:49.000Z | 2020-01-12T22:27:38.000Z | tests/test_show.py | domi007/pigskin | c379284ebbbdb3a9df42de70227041e3c137b6dc | [
"MIT"
] | 4 | 2018-08-15T13:52:26.000Z | 2019-04-28T17:09:04.000Z | from collections import OrderedDict
import pytest
import vcr
try: # Python 2.7
# requests's ``json()`` function returns strings as unicode (as per the
# JSON spec). In 2.7, those are of type unicode rather than str. basestring
# was created to help with that.
# https://docs.python.org/2/library/functions.html#basestring
basestring = basestring
except NameError:
basestring = str
| 24.6125 | 79 | 0.584053 |
c7c66a8f8b52a73b0ced73b9208760d1628d3b03 | 3,165 | py | Python | integration_test/basic_op_capi.py | cl9200/nbase-arc | 47c124b11b0bb2e8a8428c6d628ce82dc24c1ade | [
"Apache-2.0"
] | null | null | null | integration_test/basic_op_capi.py | cl9200/nbase-arc | 47c124b11b0bb2e8a8428c6d628ce82dc24c1ade | [
"Apache-2.0"
] | null | null | null | integration_test/basic_op_capi.py | cl9200/nbase-arc | 47c124b11b0bb2e8a8428c6d628ce82dc24c1ade | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2015 Naver Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import subprocess
import unittest
import testbase
import default_cluster
import util
import os
import constant
import config
import time
import telnetlib
import signal
| 28.00885 | 113 | 0.653081 |
c7c6a85099fcd6a3265a36a9b36bdf7fa4e9b9a7 | 5,509 | py | Python | examples/scripts/flopy_lake_example.py | andrewcalderwood/flopy | 0432ce96a0a5eec4d20adb4d384505632a2db3dc | [
"CC0-1.0",
"BSD-3-Clause"
] | 351 | 2015-01-03T15:18:48.000Z | 2022-03-31T09:46:43.000Z | examples/scripts/flopy_lake_example.py | andrewcalderwood/flopy | 0432ce96a0a5eec4d20adb4d384505632a2db3dc | [
"CC0-1.0",
"BSD-3-Clause"
] | 1,256 | 2015-01-15T21:10:42.000Z | 2022-03-31T22:43:06.000Z | examples/scripts/flopy_lake_example.py | andrewcalderwood/flopy | 0432ce96a0a5eec4d20adb4d384505632a2db3dc | [
"CC0-1.0",
"BSD-3-Clause"
] | 553 | 2015-01-31T22:46:48.000Z | 2022-03-31T17:43:35.000Z | import os
import sys
import numpy as np
import matplotlib.pyplot as plt
import flopy
if __name__ == "__main__":
success = run()
| 35.089172 | 103 | 0.626429 |
c7c6afa7ba07a568b76988ebc296a4b468c42738 | 11,428 | py | Python | P2/Caso2/clustering.py | Ocete/Inteligenica-de-Negocio | 0c3bb3914893c608790002743530aba535be7249 | [
"MIT"
] | null | null | null | P2/Caso2/clustering.py | Ocete/Inteligenica-de-Negocio | 0c3bb3914893c608790002743530aba535be7249 | [
"MIT"
] | null | null | null | P2/Caso2/clustering.py | Ocete/Inteligenica-de-Negocio | 0c3bb3914893c608790002743530aba535be7249 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Documentacin sobre clustering en Python:
http://scikit-learn.org/stable/modules/clustering.html
http://www.learndatasci.com/k-means-clustering-algorithms-python-intro/
http://hdbscan.readthedocs.io/en/latest/comparing_clustering_algorithms.html
https://joernhees.de/blog/2015/08/26/scipy-hierarchical-clustering-and-dendrogram-tutorial/
http://www.learndatasci.com/k-means-clustering-algorithms-python-intro/
'''
import time
import csv
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from sklearn import preprocessing
from sklearn import metrics
from sklearn import cluster
from math import floor
import seaborn as sns
# Cosas bonitas por defecto
sns.set()
censo = pd.read_csv('../mujeres_fecundidad_INE_2018.csv')
'''
for col in censo:
missing_count = sum(pd.isnull(censo[col]))
if missing_count > 0:
print(col,missing_count)
#'''
#Se pueden reemplazar los valores desconocidos por un nmero
#censo = censo.replace(np.NaN,0)
# Sustituimos valores perdidos con la media
for col in censo:
censo[col].fillna(censo[col].mean(), inplace=True)
#seleccionar casos
subset = censo.loc[(censo['TRAREPRO']==1) & (censo['NEMBTRAREPRO']<=6)]
# Seleccionar variables
usadas = ['NHIJOS', 'TIPOTRAREPRO', 'NMESESTRAREPRO', 'NEMBTRAREPRO']
X = subset[usadas]
X_normal = X.apply(norm_to_zero_one)
print('Tamao de la poblacin tras filtrado: ',len(X_normal.index))
for col in X:
missing_count = sum(pd.isnull(censo[col]))
if missing_count > 0:
print(col,missing_count, ' AFTER')
algoritmos = (('KMeans', cluster.KMeans(init='k-means++', n_clusters=5, n_init=5)),
('MeanShift', cluster.MeanShift(cluster_all=False, min_bin_freq=3)),
('Ward', cluster.AgglomerativeClustering(n_clusters=4, linkage='ward')),
('DBScan', cluster.DBSCAN(eps=0.35, min_samples=5)),
('Birch', cluster.Birch(threshold=0.1,n_clusters=5)))
cluster_predict = {}
calinski = {}
silh = {}
times = {}
n_clusters = {}
clusters_fig, clusters_axis = plt.subplots(3, 2, figsize=(10,10))
clusters_colors = ['gold', 'yellowgreen', 'lightcoral', 'lightskyblue', '#ffb347']
ijs = [(0,0), (0,1), (1,0), (1,1), (2,0), (2,1)]
for i_alg, par in enumerate(algoritmos):
name, alg = par
print('----- Ejecutando ' + name,)
t = time.time()
cluster_predict[name] = alg.fit_predict(X_normal)
tiempo = time.time() - t
times[name] = tiempo
metric_CH = metrics.calinski_harabasz_score(X_normal, cluster_predict[name])
calinski[name] = metric_CH
metric_SC = metrics.silhouette_score(X_normal, cluster_predict[name], metric='euclidean', sample_size=floor(len(X)), random_state=123456)
silh[name] = metric_SC
# Asignamos de clusters a DataFrame
clusters = pd.DataFrame(cluster_predict[name],index=X.index,columns=['cluster'])
if (name == 'KMeans'):
clusters_kmeans = clusters
alg_kmeans = alg
elif (name == 'Ward'):
clusters_ward = clusters
print("Tamao de cada cluster:")
size = clusters['cluster'].value_counts()
cluster_fractions = []
for num,i in size.iteritems():
print('%s: %5d (%5.2f%%)' % (num,i,100*i/len(clusters)))
cluster_fractions.append( 100*i/len(clusters) )
n_clusters[name] = len(size)
# Bar charts
if ( len(cluster_fractions) > 7 ):
cluster_fractions = cluster_fractions[0:6]
i, j = ijs[i_alg]
y_pos = np.arange(len(cluster_fractions))
labels = [ "Cluster " + str(i) for i in range(len(cluster_fractions)) ]
clusters_axis[i, j].bar(y_pos, cluster_fractions, tick_label=labels, color=clusters_colors)
clusters_axis[i, j].set_ylim(0, 100)
clusters_axis[i, j].set_title(name)
if (j == 0):
clusters_axis[i, j].set_ylabel("Cluster size (%)")
clusters_axis[2,1].remove()
#clusters_fig.savefig("clusters.png")
plt.show()
from prettytable import PrettyTable
header = ['Algoritmo', 'CH', 'Silh', 'Tiempo', 'Nmero de clusters']
tabla = PrettyTable(header)
for name, alg in algoritmos:
tabla.add_row([name,
"{0:.2f}".format(calinski[name]),
"{0:.2f}".format(silh[name]),
"{0:.2f}".format(times[name]),
n_clusters[name]])
print(tabla)
# Escribir los datos en un general.csv
'''
with open('general.csv', mode='w+', newline='') as file:
writer = csv.DictWriter(file, fieldnames=header)
writer.writeheader()
for name, _ in algoritmos:
writer.writerow({'Algoritmo': name,
'CH': "{0:.2f}".format(calinski[name]),
'Silh': "{0:.2f}".format(silh[name]),
'Tiempo': "{0:.2f}".format(times[name]),
'Nmero de clusters': n_clusters[name]})
#'''
# ----------------------- FUNCIONES DE DISTRIBUCIN ---------
print("---------- Preparando funciones de distribucin...")
n_clusters_ward = n_clusters['Ward']
n_var = len(usadas)
X_ward = pd.concat([X, clusters_ward], axis=1)
fig, axes = plt.subplots(n_clusters_ward, n_var, sharey=True, figsize=(15,15))
fig.subplots_adjust(wspace=0, hspace=0)
colors = sns.color_palette(palette=None, n_colors=n_clusters_ward, desat=None)
rango = []
for j in range(n_var):
rango.append([X_ward[usadas[j]].min(), X_ward[usadas[j]].max()])
for i in range(n_clusters_ward):
dat_filt = X_ward.loc[X_ward['cluster']==i]
for j in range(n_var):
#ax = sns.kdeplot(dat_filt[usadas[j]], label="", shade=True, color=colors[i], ax=axes[i,j])
ax = sns.boxplot(dat_filt[usadas[j]], color=colors[i], flierprops={'marker':'o','markersize':4}, ax=axes[i,j])
if (i==n_clusters_ward-1):
axes[i,j].set_xlabel(usadas[j])
else:
axes[i,j].set_xlabel("")
if (j==0):
axes[i,j].set_ylabel("Cluster "+str(i))
else:
axes[i,j].set_ylabel("")
axes[i,j].set_yticks([])
axes[i,j].grid(axis='x', linestyle='-', linewidth='0.2', color='gray')
axes[i,j].grid(axis='y', b=False)
ax.set_xlim(rango[j][0]-0.05*(rango[j][1]-rango[j][0]),rango[j][1]+0.05*(rango[j][1]-rango[j][0]))
plt.show()
#fig.savefig("boxes.png")
# ---------------- SCATTER MATRIX -----------------------
'''
plt.clf()
print("---------- Preparando el scatter matrix...")
# Se aade la asignacin de clusters como columna a X
variables = list(X_ward)
variables.remove('cluster')
sns_plot = sns.pairplot(X_ward, vars=variables, hue="cluster", palette='Paired', plot_kws={"s": 25}, diag_kind="hist")
sns_plot.fig.subplots_adjust(wspace=.03, hspace=.03);
# sns_plot.savefig("scatter_matrix.png")
plt.show()
#'''
# ----------------------- DENDOGRAMAS -----------------------
#En clustering hay que normalizar para las mtricas de distancia
# X_normal = preprocessing.normalize(X, norm='l2')
X_normal = (X - X.min() ) / (X.max() - X.min())
#Vamos a usar este jerrquico y nos quedamos con 100 clusters, es decir, cien ramificaciones del dendrograma
ward = cluster.AgglomerativeClustering(n_clusters=20, linkage='ward')
name, algorithm = ('Ward', ward)
cluster_predict = {}
k = {}
t = time.time()
cluster_predict[name] = algorithm.fit_predict(X_normal)
tiempo = time.time() - t
k[name] = len(set(cluster_predict[name]))
# Se convierte la asignacin de clusters a DataFrame
clusters = pd.DataFrame(cluster_predict['Ward'],index=X.index,columns=['cluster'])
# Y se aade como columna a X
X_cluster = pd.concat([X, clusters], axis=1)
# Filtro quitando los elementos (outliers) que caen en clusters muy pequeos en el jerrquico
min_size = 3
X_filtrado = X
'''
X_cluster[X_cluster.groupby('cluster').cluster.transform(len) > min_size]
k_filtrado = len(set(X_filtrado['cluster']))
print("De los {:.0f} clusters hay {:.0f} con ms de {:.0f} elementos. Del total de {:.0f} elementos, se seleccionan {:.0f}".format(k['Ward'],k_filtrado,min_size,len(X),len(X_filtrado)))
X_filtrado = X_filtrado.drop('cluster', 1)
X_filtrado = X
#'''
#Normalizo el conjunto filtrado
X_filtrado_normal = preprocessing.normalize(X_filtrado, norm='l2')
# Obtengo el dendrograma usando scipy, que realmente vuelve a ejecutar el clustering jerrquico
from scipy.cluster import hierarchy
linkage_array = hierarchy.ward(X_filtrado_normal)
plt.clf()
dendro = hierarchy.dendrogram(linkage_array,orientation='left', p=10, truncate_mode='lastp') #lo pongo en horizontal para compararlo con el generado por seaborn
# puedo usar "p=10,truncate_mode='lastp'" para cortar el dendrograma en 10 hojas
# Dendograma usando seaborn (que a su vez usa scipy) para incluir un heatmap
X_filtrado_normal_DF = pd.DataFrame(X_filtrado_normal, index=X_filtrado.index, columns=usadas)
# Aadimos una columna de label para indicar el cluster al que pertenece cada objeto
labels = X_ward['cluster']
lut = dict(zip(set(labels), sns.color_palette(palette="Blues_d", n_colors=n_clusters_ward)))
row_colors = pd.DataFrame(labels)['cluster'].map(lut)
clustergrid = sns.clustermap(X_filtrado_normal_DF, method='ward', row_colors=row_colors, col_cluster=False, figsize=(20,10), cmap="YlGnBu", yticklabels=False)
# Para aadir los labels reordenados. Ahora mismo no salen los colores en la
# columna donde deberian. Intuyo que esto se debe a que los ids no encajan.
#'''
ordering = clustergrid.dendrogram_row.reordered_ind
labels_list = [x for _, x in sorted(zip(ordering,labels), key=lambda pair: pair[0])]
labels = pd.Series(labels_list, index=X_filtrado_normal_DF.index, name='cluster')
lut = dict(zip(set(labels), sns.color_palette(palette="Blues_d", n_colors=n_clusters_ward)))
row_colors = pd.DataFrame(labels)['cluster'].map(lut)
clustergrid = sns.clustermap(X_filtrado_normal_DF, method='ward', row_colors=row_colors, col_cluster=False, figsize=(20,10), cmap="YlGnBu", yticklabels=False)
#'''
#plt.savefig("dendograma.png")
# ----------------------- HEATMAPS -----------------------
#'''
plt.figure(1)
centers = pd.DataFrame(alg_kmeans.cluster_centers_, columns=list(X))
centers_desnormal = centers.copy()
centers_desnormal = centers.drop([4])
# Calculamos los centroides
X = pd.concat([X, clusters_ward], axis=1)
for variable in list(centers):
for k_cluster in range(n_clusters_ward):
centroide = X.loc[(clusters_ward['cluster']==k_cluster)][variable].mean()
centers_desnormal.loc[k_cluster, variable] = centroide
# Normalizamos
centers_normal2 = centers_desnormal.copy()
centers_normal2 = (centers_normal2 - centers_normal2.min() ) / (centers_normal2.max() - centers_normal2.min())
import matplotlib.pyplot as plt
heatmap_fig, ax = plt.subplots(figsize=(10,10))
heatmap = sns.heatmap(centers_normal2, cmap="YlGnBu", annot=centers_desnormal, fmt='.3f')
# Para evitar que los bloques de arriba y abajo se corten por la mitad
bottom, top = ax.get_ylim()
ax.set_ylim(bottom + 0.5, top - 0.5)
#heatmap_fig.savefig("heatmap.png")
#'''
| 37.468852 | 187 | 0.651995 |
c7c71735421912226dadf924d3330fb19e4f6af5 | 9,029 | py | Python | signal_processing/ecg_preproc.py | DeepPSP/cpsc2020 | 47acb884ea1f2f819e564d8a17ad37001ed0df27 | [
"BSD-3-Clause"
] | 1 | 2021-12-07T11:44:48.000Z | 2021-12-07T11:44:48.000Z | signal_processing/ecg_preproc.py | wenh06/cpsc2020 | 47acb884ea1f2f819e564d8a17ad37001ed0df27 | [
"BSD-3-Clause"
] | null | null | null | signal_processing/ecg_preproc.py | wenh06/cpsc2020 | 47acb884ea1f2f819e564d8a17ad37001ed0df27 | [
"BSD-3-Clause"
] | 1 | 2021-05-25T14:56:02.000Z | 2021-05-25T14:56:02.000Z | """
preprocess of (single lead) ecg signal:
band pass --> remove baseline --> find rpeaks --> denoise (mainly deal with motion artefact)
TODO:
1. motion artefact detection,
and slice the signal into continuous (no motion artefact within) segments
2. to add
References:
-----------
[1] https://github.com/PIA-Group/BioSPPy
[2] to add
"""
import os, time
import multiprocessing as mp
from copy import deepcopy
from numbers import Real
from typing import Union, Optional, Any, List, Dict
import numpy as np
from easydict import EasyDict as ED
from scipy.ndimage.filters import median_filter
from scipy.signal.signaltools import resample
from scipy.io import savemat
# from scipy.signal import medfilt
# https://github.com/scipy/scipy/issues/9680
try:
from biosppy.signals.tools import filter_signal
except:
from references.biosppy.biosppy.signals.tools import filter_signal
from cfg import PreprocCfg
from .ecg_rpeaks import (
xqrs_detect, gqrs_detect, pantompkins,
hamilton_detect, ssf_detect, christov_detect, engzee_detect, gamboa_detect,
)
from .ecg_rpeaks_dl import seq_lab_net_detect
__all__ = [
"preprocess_signal",
"parallel_preprocess_signal",
"denoise_signal",
]
QRS_DETECTORS = {
"xqrs": xqrs_detect,
"gqrs": gqrs_detect,
"pantompkins": pantompkins,
"hamilton": hamilton_detect,
"ssf": ssf_detect,
"christov": christov_detect,
"engzee": engzee_detect,
"gamboa": gamboa_detect,
"seq_lab": seq_lab_net_detect,
}
DL_QRS_DETECTORS = [
"seq_lab",
]
def preprocess_signal(raw_sig:np.ndarray, fs:Real, config:Optional[ED]=None) -> Dict[str, np.ndarray]:
""" finished, checked,
Parameters:
-----------
raw_sig: ndarray,
the raw ecg signal
fs: real number,
sampling frequency of `raw_sig`
config: dict, optional,
extra process configuration,
`PreprocCfg` will be updated by this `config`
Returns:
--------
retval: dict,
with items
- 'filtered_ecg': the array of the processed ecg signal
- 'rpeaks': the array of indices of rpeaks; empty if 'rpeaks' in `config` is not set
NOTE:
-----
output (`retval`) are resampled to have sampling frequency
equal to `config.fs` (if `config` has item `fs`) or `PreprocCfg.fs`
"""
filtered_ecg = raw_sig.copy()
cfg = deepcopy(PreprocCfg)
cfg.update(config or {})
if fs != cfg.fs:
filtered_ecg = resample(filtered_ecg, int(round(len(filtered_ecg)*cfg.fs/fs)))
# remove baseline
if 'baseline' in cfg.preproc:
window1 = 2 * (cfg.baseline_window1 // 2) + 1 # window size must be odd
window2 = 2 * (cfg.baseline_window2 // 2) + 1
baseline = median_filter(filtered_ecg, size=window1, mode='nearest')
baseline = median_filter(baseline, size=window2, mode='nearest')
filtered_ecg = filtered_ecg - baseline
# filter signal
if 'bandpass' in cfg.preproc:
filtered_ecg = filter_signal(
signal=filtered_ecg,
ftype='FIR',
band='bandpass',
order=int(0.3 * fs),
sampling_rate=fs,
frequency=cfg.filter_band,
)['signal']
if cfg.rpeaks and cfg.rpeaks.lower() not in DL_QRS_DETECTORS:
# dl detectors not for parallel computing using `mp`
detector = QRS_DETECTORS[cfg.rpeaks.lower()]
rpeaks = detector(sig=filtered_ecg, fs=fs).astype(int)
else:
rpeaks = np.array([], dtype=int)
retval = ED({
"filtered_ecg": filtered_ecg,
"rpeaks": rpeaks,
})
return retval
def parallel_preprocess_signal(raw_sig:np.ndarray, fs:Real, config:Optional[ED]=None, save_dir:Optional[str]=None, save_fmt:str='npy', verbose:int=0) -> Dict[str, np.ndarray]:
""" finished, checked,
Parameters:
-----------
raw_sig: ndarray,
the raw ecg signal
fs: real number,
sampling frequency of `raw_sig`
config: dict, optional,
extra process configuration,
`PreprocCfg` will `update` this `config`
save_dir: str, optional,
directory for saving the outcome ('filtered_ecg' and 'rpeaks')
save_fmt: str, default 'npy',
format of the save files, 'npy' or 'mat'
Returns:
--------
retval: dict,
with items
- 'filtered_ecg': the array of the processed ecg signal
- 'rpeaks': the array of indices of rpeaks; empty if 'rpeaks' in `config` is not set
NOTE:
-----
output (`retval`) are resampled to have sampling frequency
equal to `config.fs` (if `config` has item `fs`) or `PreprocCfg.fs`
"""
start_time = time.time()
cfg = deepcopy(PreprocCfg)
cfg.update(config or {})
epoch_len = int(cfg.parallel_epoch_len * fs)
epoch_overlap_half = int(cfg.parallel_epoch_overlap * fs) // 2
epoch_overlap = 2 * epoch_overlap_half
epoch_forward = epoch_len - epoch_overlap
if len(raw_sig) <= 3 * epoch_len: # too short, no need for parallel computing
retval = preprocess_signal(raw_sig, fs, cfg)
if cfg.rpeaks and cfg.rpeaks.lower() in DL_QRS_DETECTORS:
rpeaks = QRS_DETECTORS[cfg.rpeaks.lower()](sig=raw_sig, fs=fs, verbose=verbose).astype(int)
retval.rpeaks = rpeaks
return retval
l_epoch = [
raw_sig[idx*epoch_forward: idx*epoch_forward + epoch_len] \
for idx in range((len(raw_sig)-epoch_overlap)//epoch_forward)
]
if cfg.parallel_keep_tail:
tail_start_idx = epoch_forward * len(l_epoch) + epoch_overlap
if len(raw_sig) - tail_start_idx < 30 * fs: # less than 30s, make configurable?
# append to the last epoch
l_epoch[-1] = np.append(l_epoch[-1], raw_sig[tail_start_idx:])
else: # long enough
tail_epoch = raw_sig[tail_start_idx-epoch_overlap:]
l_epoch.append(tail_epoch)
cpu_num = max(1, mp.cpu_count()-3)
with mp.Pool(processes=cpu_num) as pool:
result = pool.starmap(
func=preprocess_signal,
iterable=[(e, fs, cfg) for e in l_epoch],
)
if cfg.parallel_keep_tail:
tail_result = result[-1]
result = result[:-1]
filtered_ecg = result[0]['filtered_ecg'][:epoch_len-epoch_overlap_half]
rpeaks = result[0]['rpeaks'][np.where(result[0]['rpeaks']<epoch_len-epoch_overlap_half)[0]]
for idx, e in enumerate(result[1:]):
filtered_ecg = np.append(
filtered_ecg, e['filtered_ecg'][epoch_overlap_half: -epoch_overlap_half]
)
epoch_rpeaks = e['rpeaks'][np.where( (e['rpeaks'] >= epoch_overlap_half) & (e['rpeaks'] < epoch_len-epoch_overlap_half) )[0]]
rpeaks = np.append(rpeaks, (idx+1)*epoch_forward + epoch_rpeaks)
if cfg.parallel_keep_tail:
filtered_ecg = np.append(filtered_ecg, tail_result['filtered_ecg'][epoch_overlap_half:])
tail_rpeaks = tail_result['rpeaks'][np.where(tail_result['rpeaks'] >= epoch_overlap_half)[0]]
rpeaks = np.append(rpeaks, len(result)*epoch_forward + tail_rpeaks)
if verbose >= 1:
if cfg.rpeaks.lower() in DL_QRS_DETECTORS:
print(f"signal processing took {round(time.time()-start_time, 3)} seconds")
else:
print(f"signal processing and R peaks detection took {round(time.time()-start_time, 3)} seconds")
start_time = time.time()
if cfg.rpeaks and cfg.rpeaks.lower() in DL_QRS_DETECTORS:
rpeaks = QRS_DETECTORS[cfg.rpeaks.lower()](sig=raw_sig, fs=fs, verbose=verbose).astype(int)
if verbose >= 1:
print(f"R peaks detection using {cfg.rpeaks} took {round(time.time()-start_time, 3)} seconds")
if save_dir:
# NOTE: this part is not tested
os.makedirs(save_dir, exist_ok=True)
if save_fmt.lower() == 'npy':
np.save(os.path.join(save_dir, "filtered_ecg.npy"), filtered_ecg)
np.save(os.path.join(save_dir, "rpeaks.npy"), rpeaks)
elif save_fmt.lower() == 'mat':
# save into 2 files, keep in accordance
savemat(os.path.join(save_dir, "filtered_ecg.mat"), {"filtered_ecg": filtered_ecg}, format='5')
savemat(os.path.join(save_dir, "rpeaks.mat"), {"rpeaks": rpeaks}, format='5')
retval = ED({
"filtered_ecg": filtered_ecg,
"rpeaks": rpeaks,
})
return retval
"""
to check correctness of the function `parallel_preprocess_signal`,
say for record A01, one can call
>>> raw_sig = loadmat("./data/A01.mat")['ecg'].flatten()
>>> processed = parallel_preprocess_signal(raw_sig, 400)
>>> print(len(processed['filtered_ecg']) - len(raw_sig))
>>> start_t = int(3600*24.7811)
>>> len_t = 10
>>> fig, ax = plt.subplots(figsize=(20,6))
>>> ax.plot(hehe['filtered_ecg'][start_t*400:(start_t+len_t)*400])
>>> for r in [p for p in hehe['rpeaks'] if start_t*400 <= p < (start_t+len_t)*400]:
>>> ax.axvline(r-start_t*400,c='red',linestyle='dashed')
>>> plt.show()
or one can use the 'dataset.py'
"""
| 34.59387 | 175 | 0.646694 |
c7c75c3cc68eb1ff8bc4c52efd3bee52faa60a5f | 761 | bzl | Python | ocaml/bootstrap.bzl | mobileink/obazl | eb9d10d1aac040dbc05a038265276e3ab3a52233 | [
"Apache-2.0"
] | null | null | null | ocaml/bootstrap.bzl | mobileink/obazl | eb9d10d1aac040dbc05a038265276e3ab3a52233 | [
"Apache-2.0"
] | null | null | null | ocaml/bootstrap.bzl | mobileink/obazl | eb9d10d1aac040dbc05a038265276e3ab3a52233 | [
"Apache-2.0"
] | null | null | null | ## mv to //:WORKSPACE.bzl ocaml_configure
load("//ocaml/_bootstrap:ocaml.bzl", _ocaml_configure = "ocaml_configure")
# load("//ocaml/_bootstrap:obazl.bzl", _obazl_configure = "obazl_configure")
load("//ocaml/_rules:ocaml_repository.bzl" , _ocaml_repository = "ocaml_repository")
# load("//ocaml/_rules:opam_configuration.bzl" , _opam_configuration = "opam_configuration")
# load("//ocaml/_toolchains:ocaml_toolchains.bzl",
# _ocaml_toolchain = "ocaml_toolchain",
# _ocaml_register_toolchains = "ocaml_register_toolchains")
# obazl_configure = _obazl_configure
ocaml_configure = _ocaml_configure
ocaml_repository = _ocaml_repository
# ocaml_toolchain = _ocaml_toolchain
# ocaml_register_toolchains = _ocaml_register_toolchains
| 38.05 | 96 | 0.768725 |
c7c963a523b032b23261574567ab5a4c018c9176 | 44 | py | Python | tsts.py | tedtroxell/metrician | d4164dbff8db5645ee8beca11dc55ba6c26c4cb6 | [
"MIT"
] | null | null | null | tsts.py | tedtroxell/metrician | d4164dbff8db5645ee8beca11dc55ba6c26c4cb6 | [
"MIT"
] | null | null | null | tsts.py | tedtroxell/metrician | d4164dbff8db5645ee8beca11dc55ba6c26c4cb6 | [
"MIT"
] | null | null | null | from metrician.explainations.tests import *
| 22 | 43 | 0.840909 |
c7c9b4be102dc7ada3fac5b424f329fc54878619 | 3,021 | py | Python | simple/facenet.py | taflahi/facenet | 64e74744437e18978782b497b42300b8d4a2342b | [
"MIT"
] | 5 | 2018-09-25T21:04:39.000Z | 2020-09-03T20:07:37.000Z | simple/facenet.py | SoloSynth1/facenet | 64e74744437e18978782b497b42300b8d4a2342b | [
"MIT"
] | null | null | null | simple/facenet.py | SoloSynth1/facenet | 64e74744437e18978782b497b42300b8d4a2342b | [
"MIT"
] | 14 | 2018-10-15T00:03:24.000Z | 2020-08-11T05:04:24.000Z | import tensorflow as tf
from .. src.align import detect_face
from .. src import facenet
from .. simple import download_model
import sys
import os
from os.path import expanduser
import copy
import cv2
import numpy as np
from scipy import spatial
minsize = 20 # minimum size of face
threshold = [0.6, 0.7, 0.7] # three steps's threshold
factor = 0.709 # scale factor
| 35.127907 | 96 | 0.620655 |
c7cb2a8553964cb9e86d2c3de96decefdde5eb6c | 89 | py | Python | tf2stats/__init__.py | TheAntecedent/Quintessence | f32dc1b11ded212121ebc0f925d15c845cb6ea4b | [
"MIT"
] | 1 | 2019-10-08T04:38:08.000Z | 2019-10-08T04:38:08.000Z | tf2stats/__init__.py | TheAntecedent/Quintessence | f32dc1b11ded212121ebc0f925d15c845cb6ea4b | [
"MIT"
] | 1 | 2021-04-30T20:51:05.000Z | 2021-04-30T20:51:05.000Z | tf2stats/__init__.py | TheAntecedent/Quintessence | f32dc1b11ded212121ebc0f925d15c845cb6ea4b | [
"MIT"
] | null | null | null | from .aggregated_stats import *
from .game_stats import *
from .stat_definitions import * | 29.666667 | 31 | 0.808989 |
c7cb514f4b628937e89d11a214a0267002c52972 | 1,515 | py | Python | tests/test_messages/test_inbound/test_manage_all_link_record.py | michaeldavie/pyinsteon | e5b2e2910f4eff1474f158051fa71f75c2077dd6 | [
"MIT"
] | 15 | 2020-07-08T05:29:14.000Z | 2022-03-24T18:56:26.000Z | tests/test_messages/test_inbound/test_manage_all_link_record.py | michaeldavie/pyinsteon | e5b2e2910f4eff1474f158051fa71f75c2077dd6 | [
"MIT"
] | 107 | 2019-06-03T09:23:02.000Z | 2022-03-31T23:12:38.000Z | tests/test_messages/test_inbound/test_manage_all_link_record.py | michaeldavie/pyinsteon | e5b2e2910f4eff1474f158051fa71f75c2077dd6 | [
"MIT"
] | 16 | 2019-01-24T01:09:49.000Z | 2022-02-24T03:48:42.000Z | """Test Manage All-Link Record."""
import unittest
from binascii import unhexlify
from pyinsteon.address import Address
from pyinsteon.constants import AckNak, ManageAllLinkRecordAction, MessageId
from pyinsteon.protocol.messages.all_link_record_flags import \
AllLinkRecordFlags
from tests import set_log_levels
from tests.utils import hex_to_inbound_message
# pylint: disable=no-member
| 30.3 | 76 | 0.654785 |
c7cbc44076f7cb93b253c24fadcf22b9899a01e8 | 5,054 | py | Python | Clock/Clock_Fig3F.py | chAwater/OpenFig | d37d59c6a77d76c7d8a9e8623ce94a95406f1843 | [
"MIT"
] | null | null | null | Clock/Clock_Fig3F.py | chAwater/OpenFig | d37d59c6a77d76c7d8a9e8623ce94a95406f1843 | [
"MIT"
] | null | null | null | Clock/Clock_Fig3F.py | chAwater/OpenFig | d37d59c6a77d76c7d8a9e8623ce94a95406f1843 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# # Figure Info.
#
# | Title | Journal | Authors | Article Date | Code Date | Figure | Links |
# |:------|:-------:|:-------:|:------------:|:---------:|:------:|:-----:|
# |A microfluidic approach for experimentally modelling <br> the intercellular coupling system of a mammalian <br> circadian clock at single-cell level|Lab on a Chip|Kui Han|2020.03.02|2020.03.11| Fig3F | [DOI](https://doi.org/10.1039/D0LC00140F) |
#
# In[1]:
# data_file = 'SinPeaksDOWN.xls'
# new_inputs = pd.read_excel(data_file,header=None)
# new_inputs.to_csv('data.csv',index=False)
# In[2]:
import os, sys, warnings
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.rcParams['svg.fonttype'] = 'none'
sns.set_context(context='poster')
bigsize = 20
midsize = 18
smallsize = 14
hugesize = 24
# In[ ]:
# Load data
new_inputs = pd.read_csv('data.csv')
new_inputs = new_inputs.values.flatten()
new_inputs = new_inputs[~np.isnan(new_inputs)]
new_inputs = pd.Series(new_inputs)
dict_time = new_inputs.astype(int).value_counts()
# Set start and end days
d_min = np.floor( ((new_inputs-12)/24).astype(np.float).min() )
d_min = max(0, d_min)
d_max = np.ceil( ((new_inputs-12)/24).astype(np.float).max() )
drug_time = 22 + np.arange(0,d_max+1)*24
# Set plot
n_plot = int( d_max - d_min + 1 )
n_rows = int( np.ceil(n_plot/4) )
ratio_dfs_dict = dict(zip(np.arange(n_plot), [pd.DataFrame()]*n_plot))
fig, axs = plt.subplots(
ncols=4,nrows=n_rows,
figsize=(18,n_rows*4),
subplot_kw={'polar':True},
gridspec_kw={'hspace':0.5},
)
axs = axs.flatten()
# Plot data for each 24h
for i_time in dict_time.keys():
if i_time<12:
continue
d_time = int( np.floor((i_time-12)/24)-d_min )
# In one day
ratio_df = ratio_dfs_dict[d_time]
ratio_df = ratio_df.append(
{
'ref_time' : ((i_time-12) % 24),
'n' : dict_time[i_time]
}, ignore_index=True)
ratio_dfs_dict[d_time] = ratio_df
# Date to r
t_time = (((i_time-12) % 24)/24)*2*np.pi
t_drug = ((1+drug_time[d_time]-12)%24)/24*2*np.pi
axs[d_time].bar(t_drug, 1, width=2/24*2*np.pi, bottom=0.0, color='bisque', edgecolor='k', alpha=0.7, zorder=10)
axs[d_time].scatter(t_time, 0.5, color='dodgerblue', s=dict_time[i_time]*30, alpha=0.7, zorder=20)
# Plot info for each 24h
for i,ax in enumerate(axs):
labels = (12+np.arange(24*(d_min+i),24*(d_min+i+1),6)).astype(int).astype(str)
labels[0] = str( int(labels[0])+24 ) + ' / ' + labels[0]
labels[2] = labels[2] + ' h'
ax.set_xticklabels( labels, fontsize=midsize )
ax.set_yticklabels([])
ax.tick_params(axis='x', pad=0)
ratio_df = ratio_dfs_dict[i]
if ratio_df.shape[0]!=0:
r_df = pd.concat(
[
ratio_df['n'],
pd.cut(
ratio_df['ref_time'],
bins =[0, 3, 10, 14, 24 ],
labels=[ 'Q1','Q2','Q3','Q4'],
include_lowest=True,
)
], axis=1
).groupby('ref_time').sum()
r = np.round( 100*(r_df.loc['Q3']/r_df.sum())['n'], 1 )
ax.text( 12/24*2*np.pi, -0.5, str(r)+'%', fontsize=smallsize, ha='center', va='center', color='tomato' )
ax.plot(
np.linspace(10, 14, 20)/24*2*np.pi,
[0.05]*20,
lw=5, color='tomato',alpha=0.7,
zorder=20,
)
ax.set_thetagrids([0,90,180,270])
ax.set_theta_zero_location('N')
ax.set_theta_direction(-1)
ax.set_rgrids([])
ax.set_rlim(0,1)
ax.set_rorigin(-1.0)
ax.annotate(
s='',
xytext=(np.pi/8,1),
xy=(np.pi*3/8,1),
size=40,
arrowprops={
'facecolor':'black',
'arrowstyle':'->',
'connectionstyle':"arc3,rad=-0.17",
},
)
ax.text(np.pi/4,1,'Time',fontsize=smallsize, rotation=-40, ha='center',va='bottom')
else:
lgs = []
for s in np.arange(5,30,5):
lg = ax.scatter(s, 0.5, color='dodgerblue', s=s*30, alpha=0.7, zorder=1, label=s)
lgs.append(lg)
lg = ax.scatter(1,1,marker='s',s=300, color='bisque', edgecolor='k', alpha=0.7, label='Drug')
lgs.append(lg)
ax.set_rlim(0,0.1)
ax.axis('off')
ax.legend(
handles=lgs,
ncol=2,
title='# of cells',
title_fontsize=midsize,
fontsize=smallsize,
frameon=False,
labelspacing=1.5,
handletextpad=0.2,
columnspacing=0.4,
)
fig.subplots_adjust(hspace=0.3)
fig.suptitle('Cells distribution under drug treatment', y=1, fontsize=hugesize)
fig.savefig('Clock_Fig3F.svg', transparent=True, bbox_inches='tight')
fig.savefig('Clock_Fig3F.png', transparent=True, bbox_inches='tight')
plt.show()
# In[ ]:
| 28.234637 | 248 | 0.564108 |
c7cbd8f6da109df8e878fcc548912f6a3815a1c2 | 10,733 | py | Python | rameniaapp/views/report.py | awlane/ramenia | 6bf8e75a1f279ac584daa4ee19927ffccaa67551 | [
"MIT"
] | null | null | null | rameniaapp/views/report.py | awlane/ramenia | 6bf8e75a1f279ac584daa4ee19927ffccaa67551 | [
"MIT"
] | null | null | null | rameniaapp/views/report.py | awlane/ramenia | 6bf8e75a1f279ac584daa4ee19927ffccaa67551 | [
"MIT"
] | null | null | null | from django.shortcuts import render, HttpResponse, HttpResponseRedirect
from django.template import loader
from django.conf import settings
from django.contrib.auth.models import User
from rameniaapp.models import ReviewReport, ProfileReport, NoodleReport, Report, Review, Profile, Noodle
from django.views.generic import ListView, FormView, CreateView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from rameniaapp.decorators import user_is_moderator
from rameniaapp.actionhookutils import dispatch_hook
from rameniaapp.utils import UserIsModeratorMixin
from django.forms.widgets import Select
from django.contrib import messages
def get_return_path(report):
'''Util method to return a correct redirect path'''
if report.type == "RV":
return "review"
elif report.type == "ND":
return "noodle"
elif report.type == "PF":
return "profile" | 39.171533 | 104 | 0.644461 |
c7cce7b123c5282102e29d889ac9141ac4ccb76e | 10,135 | py | Python | pyparser.py | ddurvaux/PyUnpacker | 13c90379c26c4a9ae8c2c4d94e26f2de9709ae1d | [
"MIT"
] | null | null | null | pyparser.py | ddurvaux/PyUnpacker | 13c90379c26c4a9ae8c2c4d94e26f2de9709ae1d | [
"MIT"
] | 1 | 2017-02-06T11:06:11.000Z | 2017-02-06T11:07:29.000Z | pyparser.py | ddurvaux/PyUnpacker | 13c90379c26c4a9ae8c2c4d94e26f2de9709ae1d | [
"MIT"
] | null | null | null | #!/usr/bin/python
#
# This tool is an attempt to automate some taks related
# to malware unpacking.
#
# Most (if not all) of the tricks used in this tool
# directly comes from an excellent course given
# by Nicolas Brulez (@nicolasbrulez)
#
# Tool developped by David DURVAUX for Autopsit
# (commercial brand of N-Labs sprl)
#
# TODO
# - everything
# - VirusTotal Support
# - dynamic analysis (GDB? Valgring?)
# - static code analysis with Radare2
# - add argument for PEID
# - save status / restore (config/analysis)
# - extract fucnction without offset for comparison of samples
# - ..
#
#
__author__ = 'David DURVAUX'
__contact__ = 'david@autopsit.org'
__version__ = '0.01'
# Imports required by this tool
import os
import sys
import json
import pefile
import peutils
import argparse
from distorm3 import Decode, Decode16Bits, Decode32Bits, Decode64Bits, Decompose, DecomposeGenerator, DF_STOP_ON_FLOW_CONTROL
# Imports part of this tool
import static.vivframework
# --------------------------------------------------------------------------- #
# REPRESENTATION OF THE CONFIGURATION
# --------------------------------------------------------------------------- #
# --------------------------------------------------------------------------- #
# REPRESENTATION OF THE INFO RETRIEVED
# --------------------------------------------------------------------------- #
# --------------------------------------------------------------------------- #
# STATIC ANALYSIS OF BINARY
# --------------------------------------------------------------------------- #
# --------------------------------------------------------------------------- #
# MAIN SECTION OF CODE
# --------------------------------------------------------------------------- #
def start_analysis(binary, configuration):
sa = StaticAnalysis(binary, configuration)
sa.analyzeSections()
sa.callPEiD()
sa.graphSearch()
sa.isAntiDebug()
sa.searchVirtualAlloc()
sa.getPerFunctionHash() #TEST
#sa.decompile() # TEST
return
def main():
# Argument definition
parser = argparse.ArgumentParser(description='Analyse binaries and try to help with deobfuscation')
parser.add_argument('-b', '--binary', help='Binary to analyze')
parser.add_argument('-f', '--force', help='Force a fresh analysis, no restoration of previous work', action="store_true")
parser.add_argument('-y', '--yara', help='Path to YARA DB to use to scan binary')
parser.add_argument('-viv', '--vivisect', help='Path to vivisect installation')
# create a configuration holder
configuration = Configuration()
# Start the fun part :)
args = parser.parse_args()
# if force flag is defined, change behaviour
if args.force:
configuration.force = True
# set YARA DB signature
if args.yara:
if os.path.isfile(args.yara):
configuration.signatures = args.yara
else:
print "ERROR: %s not found!" % args.yara
exit()
# TEST - save configuration for re-use
#configuration.save()
configuration.load()
# set Vivisect path and Initialize
# currently only vivisect is supported
# this code need to be changed if other libraries get supported later
if args.vivisect:
if os.path.isdir(args.vivisect):
sys.path.append(args.vivisect)
else:
print "ERROR: %s not found!" % args.vivisect
exit()
# Check if an output directory is set
binary = None
if args.binary:
if os.path.isfile(args.binary):
binary = args.binary
start_analysis(binary, configuration)
else:
print "You need to specify a file to analyze"
exit()
if __name__ == "__main__":
main()
# --------------------------------------------------------------------------- #
# That's all folk ;)
# --------------------------------------------------------------------------- # | 29.207493 | 152 | 0.651998 |
c7cf1b7d56bb02ccf14d9d4fb7fbc22544c1690f | 512 | py | Python | mjml/elements/head/mj_style.py | ESA-CCI-ODP/mjml-stub | ffd824923de85f3c02fca7f83ef6b540be048414 | [
"MIT"
] | 23 | 2020-10-02T14:52:21.000Z | 2022-03-24T16:05:21.000Z | mjml/elements/head/mj_style.py | ESA-CCI-ODP/mjml-stub | ffd824923de85f3c02fca7f83ef6b540be048414 | [
"MIT"
] | 17 | 2020-10-07T14:48:06.000Z | 2022-03-18T13:56:11.000Z | mjml/elements/head/mj_style.py | ESA-CCI-ODP/mjml-stub | ffd824923de85f3c02fca7f83ef6b540be048414 | [
"MIT"
] | 8 | 2021-01-13T11:54:41.000Z | 2022-03-10T15:50:55.000Z |
from ._head_base import HeadComponent
__all__ = ['MjStyle']
| 24.380952 | 89 | 0.597656 |
c7cf29c510e55652c76da9423af99e7754022e49 | 3,399 | py | Python | model_zoo/official/nlp/bert/src/sample_process.py | i4oolish/mindspore | dac3be31d0f2c0a3516200f47af30980e566601b | [
"Apache-2.0"
] | 2 | 2020-08-12T16:14:40.000Z | 2020-12-04T03:05:57.000Z | model_zoo/official/nlp/bert/src/sample_process.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | null | null | null | model_zoo/official/nlp/bert/src/sample_process.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""process txt"""
import re
import json
def process_one_example_p(tokenizer, text, max_seq_len=128):
"""process one testline"""
textlist = list(text)
tokens = []
for _, word in enumerate(textlist):
token = tokenizer.tokenize(word)
tokens.extend(token)
if len(tokens) >= max_seq_len - 1:
tokens = tokens[0:(max_seq_len - 2)]
ntokens = []
segment_ids = []
label_ids = []
ntokens.append("[CLS]")
segment_ids.append(0)
for _, token in enumerate(tokens):
ntokens.append(token)
segment_ids.append(0)
ntokens.append("[SEP]")
segment_ids.append(0)
input_ids = tokenizer.convert_tokens_to_ids(ntokens)
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_len:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
label_ids.append(0)
ntokens.append("**NULL**")
assert len(input_ids) == max_seq_len
assert len(input_mask) == max_seq_len
assert len(segment_ids) == max_seq_len
feature = (input_ids, input_mask, segment_ids)
return feature
def label_generation(text="", probs=None, label2id_file=""):
"""generate label"""
data = [text]
probs = [probs]
result = []
label2id = json.loads(open(label2id_file).read())
id2label = [k for k, v in label2id.items()]
for index, prob in enumerate(probs):
for v in prob[1:len(data[index]) + 1]:
result.append(id2label[int(v)])
labels = {}
start = None
index = 0
for _, t in zip("".join(data), result):
if re.search("^[BS]", t):
if start is not None:
label = result[index - 1][2:]
if labels.get(label):
te_ = text[start:index]
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
labels[label] = {te_: [[start, index - 1]]}
start = index
if re.search("^O", t):
if start is not None:
label = result[index - 1][2:]
if labels.get(label):
te_ = text[start:index]
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
labels[label] = {te_: [[start, index - 1]]}
start = None
index += 1
if start is not None:
label = result[start][2:]
if labels.get(label):
te_ = text[start:index]
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
labels[label] = {te_: [[start, index - 1]]}
return labels
| 33.653465 | 78 | 0.562518 |
c7d08a1b7fd50820c50ef7603b8e08a3f497a3ac | 2,273 | py | Python | lang_model/data_loader.py | alex44jzy/FancyALMLDLNLP | c55a67a51de72339f4ab13bd46008eb418d293a3 | [
"MIT"
] | null | null | null | lang_model/data_loader.py | alex44jzy/FancyALMLDLNLP | c55a67a51de72339f4ab13bd46008eb418d293a3 | [
"MIT"
] | null | null | null | lang_model/data_loader.py | alex44jzy/FancyALMLDLNLP | c55a67a51de72339f4ab13bd46008eb418d293a3 | [
"MIT"
] | null | null | null | import torch
from torch.nn import functional as F
from torch.utils.data import Dataset
from gensim.corpora.dictionary import Dictionary
| 37.262295 | 114 | 0.635724 |
c7d12defacc5fa8896212434511fb502a03f0a3b | 74,691 | py | Python | models_nonconvex_simple2/ndcc13persp.py | grossmann-group/pyomo-MINLP-benchmarking | 714f0a0dffd61675649a805683c0627af6b4929e | [
"MIT"
] | null | null | null | models_nonconvex_simple2/ndcc13persp.py | grossmann-group/pyomo-MINLP-benchmarking | 714f0a0dffd61675649a805683c0627af6b4929e | [
"MIT"
] | null | null | null | models_nonconvex_simple2/ndcc13persp.py | grossmann-group/pyomo-MINLP-benchmarking | 714f0a0dffd61675649a805683c0627af6b4929e | [
"MIT"
] | null | null | null | # MINLP written by GAMS Convert at 08/20/20 01:30:45
#
# Equation counts
# Total E G L N X C B
# 297 170 42 85 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 673 631 42 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 2479 2353 126 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x2 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b547 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b548 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b549 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b550 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b551 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b552 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b553 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b554 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b555 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b556 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b557 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b558 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b559 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b560 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b561 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b562 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b563 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b564 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b565 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b566 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b567 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b568 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b569 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b570 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b571 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b572 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b573 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b574 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b575 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b576 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b577 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b578 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b579 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b580 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b581 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b582 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b583 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b584 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b585 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b586 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b587 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b588 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x589 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x590 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x591 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x592 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x593 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x594 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x595 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x596 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x597 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x598 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x599 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x600 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x601 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x602 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x603 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x604 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x605 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x606 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x607 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x608 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x609 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x610 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x611 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x612 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x613 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x614 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x615 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x616 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x617 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x618 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x619 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x620 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x621 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x622 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x623 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x624 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x625 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x626 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x627 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x628 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x629 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x630 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x632 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x633 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x634 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x635 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x636 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x637 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x638 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x639 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x640 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x641 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x642 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x643 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x644 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x645 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x646 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x647 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x648 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x649 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x650 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x651 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x652 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x653 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x654 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x655 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x656 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x657 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x658 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x659 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x660 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x661 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x662 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x663 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x664 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x665 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x666 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x667 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x668 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x669 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x670 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x671 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x672 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x673 = Var(within=Reals,bounds=(0,None),initialize=0)
m.obj = Objective(expr= 1.090016011*m.b547 + 3.10674202*m.b548 + 2.475702586*m.b549 + 1.966733944*m.b550
+ 1.090016011*m.b551 + 2.019536713*m.b552 + 3.10674202*m.b553 + 1.383540955*m.b554
+ 2.087059045*m.b555 + 3.720443668*m.b556 + 1.383540955*m.b557 + 1.794144217*m.b558
+ 3.50653318*m.b559 + 1.71812596*m.b560 + 3.834780538*m.b561 + 2.087059045*m.b562
+ 1.794144217*m.b563 + 2.239621249*m.b564 + 2.475702586*m.b565 + 2.019536713*m.b566
+ 3.720443668*m.b567 + 3.50653318*m.b568 + 2.239621249*m.b569 + 1.098732406*m.b570
+ 1.742557876*m.b571 + 1.098732406*m.b572 + 3.606882982*m.b573 + 1.71812596*m.b574
+ 2.074958698*m.b575 + 1.966733944*m.b576 + 2.074958698*m.b577 + 3.859970515*m.b578
+ 1.742557876*m.b579 + 3.859970515*m.b580 + 3.951460459*m.b581 + 3.834780538*m.b582
+ 3.606882982*m.b583 + 2.524064089*m.b584 + 2.524064089*m.b585 + 3.982701487*m.b586
+ 3.951460459*m.b587 + 3.982701487*m.b588, sense=minimize)
m.c2 = Constraint(expr= - m.x1 - m.x14 - m.x27 - m.x40 + m.x53 + m.x79 + m.x235 + m.x378 == -148)
m.c3 = Constraint(expr= - m.x2 - m.x15 - m.x28 - m.x41 + m.x54 + m.x80 + m.x236 + m.x379 == 12)
m.c4 = Constraint(expr= - m.x3 - m.x16 - m.x29 - m.x42 + m.x55 + m.x81 + m.x237 + m.x380 == 16)
m.c5 = Constraint(expr= - m.x4 - m.x17 - m.x30 - m.x43 + m.x56 + m.x82 + m.x238 + m.x381 == 21)
m.c6 = Constraint(expr= - m.x5 - m.x18 - m.x31 - m.x44 + m.x57 + m.x83 + m.x239 + m.x382 == 11)
m.c7 = Constraint(expr= - m.x6 - m.x19 - m.x32 - m.x45 + m.x58 + m.x84 + m.x240 + m.x383 == 24)
m.c8 = Constraint(expr= - m.x7 - m.x20 - m.x33 - m.x46 + m.x59 + m.x85 + m.x241 + m.x384 == 24)
m.c9 = Constraint(expr= - m.x8 - m.x21 - m.x34 - m.x47 + m.x60 + m.x86 + m.x242 + m.x385 == 8)
m.c10 = Constraint(expr= - m.x9 - m.x22 - m.x35 - m.x48 + m.x61 + m.x87 + m.x243 + m.x386 == 10)
m.c11 = Constraint(expr= - m.x10 - m.x23 - m.x36 - m.x49 + m.x62 + m.x88 + m.x244 + m.x387 == 18)
m.c12 = Constraint(expr= - m.x11 - m.x24 - m.x37 - m.x50 + m.x63 + m.x89 + m.x245 + m.x388 == 11)
m.c13 = Constraint(expr= - m.x12 - m.x25 - m.x38 - m.x51 + m.x64 + m.x90 + m.x246 + m.x389 == 20)
m.c14 = Constraint(expr= - m.x13 - m.x26 - m.x39 - m.x52 + m.x65 + m.x91 + m.x247 + m.x390 == 7)
m.c15 = Constraint(expr= m.x1 - m.x53 - m.x66 + m.x248 == 7)
m.c16 = Constraint(expr= m.x2 - m.x54 - m.x67 + m.x249 == -175)
m.c17 = Constraint(expr= m.x3 - m.x55 - m.x68 + m.x250 == 15)
m.c18 = Constraint(expr= m.x4 - m.x56 - m.x69 + m.x251 == 17)
m.c19 = Constraint(expr= m.x5 - m.x57 - m.x70 + m.x252 == 20)
m.c20 = Constraint(expr= m.x6 - m.x58 - m.x71 + m.x253 == 24)
m.c21 = Constraint(expr= m.x7 - m.x59 - m.x72 + m.x254 == 6)
m.c22 = Constraint(expr= m.x8 - m.x60 - m.x73 + m.x255 == 19)
m.c23 = Constraint(expr= m.x9 - m.x61 - m.x74 + m.x256 == 24)
m.c24 = Constraint(expr= m.x10 - m.x62 - m.x75 + m.x257 == 11)
m.c25 = Constraint(expr= m.x11 - m.x63 - m.x76 + m.x258 == 15)
m.c26 = Constraint(expr= m.x12 - m.x64 - m.x77 + m.x259 == 9)
m.c27 = Constraint(expr= m.x13 - m.x65 - m.x78 + m.x260 == 19)
m.c28 = Constraint(expr= m.x14 - m.x79 - m.x92 - m.x105 - m.x118 + m.x131 + m.x196 + m.x261 == 15)
m.c29 = Constraint(expr= m.x15 - m.x80 - m.x93 - m.x106 - m.x119 + m.x132 + m.x197 + m.x262 == 13)
m.c30 = Constraint(expr= m.x16 - m.x81 - m.x94 - m.x107 - m.x120 + m.x133 + m.x198 + m.x263 == -231)
m.c31 = Constraint(expr= m.x17 - m.x82 - m.x95 - m.x108 - m.x121 + m.x134 + m.x199 + m.x264 == 23)
m.c32 = Constraint(expr= m.x18 - m.x83 - m.x96 - m.x109 - m.x122 + m.x135 + m.x200 + m.x265 == 18)
m.c33 = Constraint(expr= m.x19 - m.x84 - m.x97 - m.x110 - m.x123 + m.x136 + m.x201 + m.x266 == 19)
m.c34 = Constraint(expr= m.x20 - m.x85 - m.x98 - m.x111 - m.x124 + m.x137 + m.x202 + m.x267 == 9)
m.c35 = Constraint(expr= m.x21 - m.x86 - m.x99 - m.x112 - m.x125 + m.x138 + m.x203 + m.x268 == 8)
m.c36 = Constraint(expr= m.x22 - m.x87 - m.x100 - m.x113 - m.x126 + m.x139 + m.x204 + m.x269 == 16)
m.c37 = Constraint(expr= m.x23 - m.x88 - m.x101 - m.x114 - m.x127 + m.x140 + m.x205 + m.x270 == 19)
m.c38 = Constraint(expr= m.x24 - m.x89 - m.x102 - m.x115 - m.x128 + m.x141 + m.x206 + m.x271 == 19)
m.c39 = Constraint(expr= m.x25 - m.x90 - m.x103 - m.x116 - m.x129 + m.x142 + m.x207 + m.x272 == 21)
m.c40 = Constraint(expr= m.x26 - m.x91 - m.x104 - m.x117 - m.x130 + m.x143 + m.x208 + m.x273 == 8)
m.c41 = Constraint(expr= m.x92 - m.x131 - m.x144 - m.x157 - m.x170 - m.x183 + m.x209 + m.x274 + m.x352 + m.x456 == 12)
m.c42 = Constraint(expr= m.x93 - m.x132 - m.x145 - m.x158 - m.x171 - m.x184 + m.x210 + m.x275 + m.x353 + m.x457 == 20)
m.c43 = Constraint(expr= m.x94 - m.x133 - m.x146 - m.x159 - m.x172 - m.x185 + m.x211 + m.x276 + m.x354 + m.x458 == 23)
m.c44 = Constraint(expr= m.x95 - m.x134 - m.x147 - m.x160 - m.x173 - m.x186 + m.x212 + m.x277 + m.x355 + m.x459
== -187)
m.c45 = Constraint(expr= m.x96 - m.x135 - m.x148 - m.x161 - m.x174 - m.x187 + m.x213 + m.x278 + m.x356 + m.x460 == 21)
m.c46 = Constraint(expr= m.x97 - m.x136 - m.x149 - m.x162 - m.x175 - m.x188 + m.x214 + m.x279 + m.x357 + m.x461 == 12)
m.c47 = Constraint(expr= m.x98 - m.x137 - m.x150 - m.x163 - m.x176 - m.x189 + m.x215 + m.x280 + m.x358 + m.x462 == 6)
m.c48 = Constraint(expr= m.x99 - m.x138 - m.x151 - m.x164 - m.x177 - m.x190 + m.x216 + m.x281 + m.x359 + m.x463 == 11)
m.c49 = Constraint(expr= m.x100 - m.x139 - m.x152 - m.x165 - m.x178 - m.x191 + m.x217 + m.x282 + m.x360 + m.x464
== 19)
m.c50 = Constraint(expr= m.x101 - m.x140 - m.x153 - m.x166 - m.x179 - m.x192 + m.x218 + m.x283 + m.x361 + m.x465 == 9)
m.c51 = Constraint(expr= m.x102 - m.x141 - m.x154 - m.x167 - m.x180 - m.x193 + m.x219 + m.x284 + m.x362 + m.x466
== 17)
m.c52 = Constraint(expr= m.x103 - m.x142 - m.x155 - m.x168 - m.x181 - m.x194 + m.x220 + m.x285 + m.x363 + m.x467
== 23)
m.c53 = Constraint(expr= m.x104 - m.x143 - m.x156 - m.x169 - m.x182 - m.x195 + m.x221 + m.x286 + m.x364 + m.x468
== 21)
m.c54 = Constraint(expr= m.x105 + m.x144 - m.x196 - m.x209 - m.x222 + m.x287 == 14)
m.c55 = Constraint(expr= m.x106 + m.x145 - m.x197 - m.x210 - m.x223 + m.x288 == 7)
m.c56 = Constraint(expr= m.x107 + m.x146 - m.x198 - m.x211 - m.x224 + m.x289 == 22)
m.c57 = Constraint(expr= m.x108 + m.x147 - m.x199 - m.x212 - m.x225 + m.x290 == 14)
m.c58 = Constraint(expr= m.x109 + m.x148 - m.x200 - m.x213 - m.x226 + m.x291 == -170)
m.c59 = Constraint(expr= m.x110 + m.x149 - m.x201 - m.x214 - m.x227 + m.x292 == 12)
m.c60 = Constraint(expr= m.x111 + m.x150 - m.x202 - m.x215 - m.x228 + m.x293 == 13)
m.c61 = Constraint(expr= m.x112 + m.x151 - m.x203 - m.x216 - m.x229 + m.x294 == 10)
m.c62 = Constraint(expr= m.x113 + m.x152 - m.x204 - m.x217 - m.x230 + m.x295 == 15)
m.c63 = Constraint(expr= m.x114 + m.x153 - m.x205 - m.x218 - m.x231 + m.x296 == 9)
m.c64 = Constraint(expr= m.x115 + m.x154 - m.x206 - m.x219 - m.x232 + m.x297 == 14)
m.c65 = Constraint(expr= m.x116 + m.x155 - m.x207 - m.x220 - m.x233 + m.x298 == 16)
m.c66 = Constraint(expr= m.x117 + m.x156 - m.x208 - m.x221 - m.x234 + m.x299 == 8)
m.c67 = Constraint(expr= m.x27 + m.x66 + m.x118 + m.x157 + m.x222 - m.x235 - m.x248 - m.x261 - m.x274 - m.x287
- m.x300 - m.x313 + m.x326 + m.x417 == 13)
m.c68 = Constraint(expr= m.x28 + m.x67 + m.x119 + m.x158 + m.x223 - m.x236 - m.x249 - m.x262 - m.x275 - m.x288
- m.x301 - m.x314 + m.x327 + m.x418 == 22)
m.c69 = Constraint(expr= m.x29 + m.x68 + m.x120 + m.x159 + m.x224 - m.x237 - m.x250 - m.x263 - m.x276 - m.x289
- m.x302 - m.x315 + m.x328 + m.x419 == 23)
m.c70 = Constraint(expr= m.x30 + m.x69 + m.x121 + m.x160 + m.x225 - m.x238 - m.x251 - m.x264 - m.x277 - m.x290
- m.x303 - m.x316 + m.x329 + m.x420 == 7)
m.c71 = Constraint(expr= m.x31 + m.x70 + m.x122 + m.x161 + m.x226 - m.x239 - m.x252 - m.x265 - m.x278 - m.x291
- m.x304 - m.x317 + m.x330 + m.x421 == 16)
m.c72 = Constraint(expr= m.x32 + m.x71 + m.x123 + m.x162 + m.x227 - m.x240 - m.x253 - m.x266 - m.x279 - m.x292
- m.x305 - m.x318 + m.x331 + m.x422 == -169)
m.c73 = Constraint(expr= m.x33 + m.x72 + m.x124 + m.x163 + m.x228 - m.x241 - m.x254 - m.x267 - m.x280 - m.x293
- m.x306 - m.x319 + m.x332 + m.x423 == 20)
m.c74 = Constraint(expr= m.x34 + m.x73 + m.x125 + m.x164 + m.x229 - m.x242 - m.x255 - m.x268 - m.x281 - m.x294
- m.x307 - m.x320 + m.x333 + m.x424 == 14)
m.c75 = Constraint(expr= m.x35 + m.x74 + m.x126 + m.x165 + m.x230 - m.x243 - m.x256 - m.x269 - m.x282 - m.x295
- m.x308 - m.x321 + m.x334 + m.x425 == 11)
m.c76 = Constraint(expr= m.x36 + m.x75 + m.x127 + m.x166 + m.x231 - m.x244 - m.x257 - m.x270 - m.x283 - m.x296
- m.x309 - m.x322 + m.x335 + m.x426 == 13)
m.c77 = Constraint(expr= m.x37 + m.x76 + m.x128 + m.x167 + m.x232 - m.x245 - m.x258 - m.x271 - m.x284 - m.x297
- m.x310 - m.x323 + m.x336 + m.x427 == 10)
m.c78 = Constraint(expr= m.x38 + m.x77 + m.x129 + m.x168 + m.x233 - m.x246 - m.x259 - m.x272 - m.x285 - m.x298
- m.x311 - m.x324 + m.x337 + m.x428 == 13)
m.c79 = Constraint(expr= m.x39 + m.x78 + m.x130 + m.x169 + m.x234 - m.x247 - m.x260 - m.x273 - m.x286 - m.x299
- m.x312 - m.x325 + m.x338 + m.x429 == 12)
m.c80 = Constraint(expr= m.x300 - m.x326 - m.x339 + m.x469 == 6)
m.c81 = Constraint(expr= m.x301 - m.x327 - m.x340 + m.x470 == 16)
m.c82 = Constraint(expr= m.x302 - m.x328 - m.x341 + m.x471 == 22)
m.c83 = Constraint(expr= m.x303 - m.x329 - m.x342 + m.x472 == 9)
m.c84 = Constraint(expr= m.x304 - m.x330 - m.x343 + m.x473 == 13)
m.c85 = Constraint(expr= m.x305 - m.x331 - m.x344 + m.x474 == 7)
m.c86 = Constraint(expr= m.x306 - m.x332 - m.x345 + m.x475 == -156)
m.c87 = Constraint(expr= m.x307 - m.x333 - m.x346 + m.x476 == 20)
m.c88 = Constraint(expr= m.x308 - m.x334 - m.x347 + m.x477 == 19)
m.c89 = Constraint(expr= m.x309 - m.x335 - m.x348 + m.x478 == 24)
m.c90 = Constraint(expr= m.x310 - m.x336 - m.x349 + m.x479 == 8)
m.c91 = Constraint(expr= m.x311 - m.x337 - m.x350 + m.x480 == 21)
m.c92 = Constraint(expr= m.x312 - m.x338 - m.x351 + m.x481 == 6)
m.c93 = Constraint(expr= m.x170 - m.x352 - m.x365 + m.x391 == 15)
m.c94 = Constraint(expr= m.x171 - m.x353 - m.x366 + m.x392 == 15)
m.c95 = Constraint(expr= m.x172 - m.x354 - m.x367 + m.x393 == 23)
m.c96 = Constraint(expr= m.x173 - m.x355 - m.x368 + m.x394 == 25)
m.c97 = Constraint(expr= m.x174 - m.x356 - m.x369 + m.x395 == 20)
m.c98 = Constraint(expr= m.x175 - m.x357 - m.x370 + m.x396 == 7)
m.c99 = Constraint(expr= m.x176 - m.x358 - m.x371 + m.x397 == 19)
m.c100 = Constraint(expr= m.x177 - m.x359 - m.x372 + m.x398 == -177)
m.c101 = Constraint(expr= m.x178 - m.x360 - m.x373 + m.x399 == 7)
m.c102 = Constraint(expr= m.x179 - m.x361 - m.x374 + m.x400 == 18)
m.c103 = Constraint(expr= m.x180 - m.x362 - m.x375 + m.x401 == 25)
m.c104 = Constraint(expr= m.x181 - m.x363 - m.x376 + m.x402 == 20)
m.c105 = Constraint(expr= m.x182 - m.x364 - m.x377 + m.x403 == 18)
m.c106 = Constraint(expr= m.x40 + m.x365 - m.x378 - m.x391 - m.x404 + m.x430 == 8)
m.c107 = Constraint(expr= m.x41 + m.x366 - m.x379 - m.x392 - m.x405 + m.x431 == 11)
m.c108 = Constraint(expr= m.x42 + m.x367 - m.x380 - m.x393 - m.x406 + m.x432 == 23)
m.c109 = Constraint(expr= m.x43 + m.x368 - m.x381 - m.x394 - m.x407 + m.x433 == 7)
m.c110 = Constraint(expr= m.x44 + m.x369 - m.x382 - m.x395 - m.x408 + m.x434 == 5)
m.c111 = Constraint(expr= m.x45 + m.x370 - m.x383 - m.x396 - m.x409 + m.x435 == 15)
m.c112 = Constraint(expr= m.x46 + m.x371 - m.x384 - m.x397 - m.x410 + m.x436 == 7)
m.c113 = Constraint(expr= m.x47 + m.x372 - m.x385 - m.x398 - m.x411 + m.x437 == 10)
m.c114 = Constraint(expr= m.x48 + m.x373 - m.x386 - m.x399 - m.x412 + m.x438 == -179)
m.c115 = Constraint(expr= m.x49 + m.x374 - m.x387 - m.x400 - m.x413 + m.x439 == 20)
m.c116 = Constraint(expr= m.x50 + m.x375 - m.x388 - m.x401 - m.x414 + m.x440 == 18)
m.c117 = Constraint(expr= m.x51 + m.x376 - m.x389 - m.x402 - m.x415 + m.x441 == 8)
m.c118 = Constraint(expr= m.x52 + m.x377 - m.x390 - m.x403 - m.x416 + m.x442 == 12)
m.c119 = Constraint(expr= m.x313 + m.x404 - m.x417 - m.x430 - m.x443 + m.x521 == 9)
m.c120 = Constraint(expr= m.x314 + m.x405 - m.x418 - m.x431 - m.x444 + m.x522 == 12)
m.c121 = Constraint(expr= m.x315 + m.x406 - m.x419 - m.x432 - m.x445 + m.x523 == 24)
m.c122 = Constraint(expr= m.x316 + m.x407 - m.x420 - m.x433 - m.x446 + m.x524 == 21)
m.c123 = Constraint(expr= m.x317 + m.x408 - m.x421 - m.x434 - m.x447 + m.x525 == 8)
m.c124 = Constraint(expr= m.x318 + m.x409 - m.x422 - m.x435 - m.x448 + m.x526 == 9)
m.c125 = Constraint(expr= m.x319 + m.x410 - m.x423 - m.x436 - m.x449 + m.x527 == 11)
m.c126 = Constraint(expr= m.x320 + m.x411 - m.x424 - m.x437 - m.x450 + m.x528 == 13)
m.c127 = Constraint(expr= m.x321 + m.x412 - m.x425 - m.x438 - m.x451 + m.x529 == 11)
m.c128 = Constraint(expr= m.x322 + m.x413 - m.x426 - m.x439 - m.x452 + m.x530 == -183)
m.c129 = Constraint(expr= m.x323 + m.x414 - m.x427 - m.x440 - m.x453 + m.x531 == 16)
m.c130 = Constraint(expr= m.x324 + m.x415 - m.x428 - m.x441 - m.x454 + m.x532 == 14)
m.c131 = Constraint(expr= m.x325 + m.x416 - m.x429 - m.x442 - m.x455 + m.x533 == 17)
m.c132 = Constraint(expr= m.x183 + m.x339 - m.x456 - m.x469 - m.x482 + m.x495 == 22)
m.c133 = Constraint(expr= m.x184 + m.x340 - m.x457 - m.x470 - m.x483 + m.x496 == 12)
m.c134 = Constraint(expr= m.x185 + m.x341 - m.x458 - m.x471 - m.x484 + m.x497 == 7)
m.c135 = Constraint(expr= m.x186 + m.x342 - m.x459 - m.x472 - m.x485 + m.x498 == 12)
m.c136 = Constraint(expr= m.x187 + m.x343 - m.x460 - m.x473 - m.x486 + m.x499 == 12)
m.c137 = Constraint(expr= m.x188 + m.x344 - m.x461 - m.x474 - m.x487 + m.x500 == 10)
m.c138 = Constraint(expr= m.x189 + m.x345 - m.x462 - m.x475 - m.x488 + m.x501 == 11)
m.c139 = Constraint(expr= m.x190 + m.x346 - m.x463 - m.x476 - m.x489 + m.x502 == 17)
m.c140 = Constraint(expr= m.x191 + m.x347 - m.x464 - m.x477 - m.x490 + m.x503 == 17)
m.c141 = Constraint(expr= m.x192 + m.x348 - m.x465 - m.x478 - m.x491 + m.x504 == 12)
m.c142 = Constraint(expr= m.x193 + m.x349 - m.x466 - m.x479 - m.x492 + m.x505 == -185)
m.c143 = Constraint(expr= m.x194 + m.x350 - m.x467 - m.x480 - m.x493 + m.x506 == 10)
m.c144 = Constraint(expr= m.x195 + m.x351 - m.x468 - m.x481 - m.x494 + m.x507 == 21)
m.c145 = Constraint(expr= m.x482 - m.x495 - m.x508 + m.x534 == 8)
m.c146 = Constraint(expr= m.x483 - m.x496 - m.x509 + m.x535 == 20)
m.c147 = Constraint(expr= m.x484 - m.x497 - m.x510 + m.x536 == 23)
m.c148 = Constraint(expr= m.x485 - m.x498 - m.x511 + m.x537 == 18)
m.c149 = Constraint(expr= m.x486 - m.x499 - m.x512 + m.x538 == 15)
m.c150 = Constraint(expr= m.x487 - m.x500 - m.x513 + m.x539 == 22)
m.c151 = Constraint(expr= m.x488 - m.x501 - m.x514 + m.x540 == 17)
m.c152 = Constraint(expr= m.x489 - m.x502 - m.x515 + m.x541 == 24)
m.c153 = Constraint(expr= m.x490 - m.x503 - m.x516 + m.x542 == 7)
m.c154 = Constraint(expr= m.x491 - m.x504 - m.x517 + m.x543 == 16)
m.c155 = Constraint(expr= m.x492 - m.x505 - m.x518 + m.x544 == 24)
m.c156 = Constraint(expr= m.x493 - m.x506 - m.x519 + m.x545 == -200)
m.c157 = Constraint(expr= m.x494 - m.x507 - m.x520 + m.x546 == 8)
m.c158 = Constraint(expr= m.x443 + m.x508 - m.x521 - m.x534 == 19)
m.c159 = Constraint(expr= m.x444 + m.x509 - m.x522 - m.x535 == 15)
m.c160 = Constraint(expr= m.x445 + m.x510 - m.x523 - m.x536 == 10)
m.c161 = Constraint(expr= m.x446 + m.x511 - m.x524 - m.x537 == 13)
m.c162 = Constraint(expr= m.x447 + m.x512 - m.x525 - m.x538 == 11)
m.c163 = Constraint(expr= m.x448 + m.x513 - m.x526 - m.x539 == 8)
m.c164 = Constraint(expr= m.x449 + m.x514 - m.x527 - m.x540 == 13)
m.c165 = Constraint(expr= m.x450 + m.x515 - m.x528 - m.x541 == 23)
m.c166 = Constraint(expr= m.x451 + m.x516 - m.x529 - m.x542 == 23)
m.c167 = Constraint(expr= m.x452 + m.x517 - m.x530 - m.x543 == 14)
m.c168 = Constraint(expr= m.x453 + m.x518 - m.x531 - m.x544 == 8)
m.c169 = Constraint(expr= m.x454 + m.x519 - m.x532 - m.x545 == 25)
m.c170 = Constraint(expr= m.x455 + m.x520 - m.x533 - m.x546 == -157)
m.c171 = Constraint(expr= - m.x1 - m.x2 - m.x3 - m.x4 - m.x5 - m.x6 - m.x7 - m.x8 - m.x9 - m.x10 - m.x11 - m.x12 - m.x13
+ m.x632 >= 0)
m.c172 = Constraint(expr= - m.x14 - m.x15 - m.x16 - m.x17 - m.x18 - m.x19 - m.x20 - m.x21 - m.x22 - m.x23 - m.x24
- m.x25 - m.x26 + m.x633 >= 0)
m.c173 = Constraint(expr= - m.x27 - m.x28 - m.x29 - m.x30 - m.x31 - m.x32 - m.x33 - m.x34 - m.x35 - m.x36 - m.x37
- m.x38 - m.x39 + m.x634 >= 0)
m.c174 = Constraint(expr= - m.x40 - m.x41 - m.x42 - m.x43 - m.x44 - m.x45 - m.x46 - m.x47 - m.x48 - m.x49 - m.x50
- m.x51 - m.x52 + m.x635 >= 0)
m.c175 = Constraint(expr= - m.x53 - m.x54 - m.x55 - m.x56 - m.x57 - m.x58 - m.x59 - m.x60 - m.x61 - m.x62 - m.x63
- m.x64 - m.x65 + m.x636 >= 0)
m.c176 = Constraint(expr= - m.x66 - m.x67 - m.x68 - m.x69 - m.x70 - m.x71 - m.x72 - m.x73 - m.x74 - m.x75 - m.x76
- m.x77 - m.x78 + m.x637 >= 0)
m.c177 = Constraint(expr= - m.x79 - m.x80 - m.x81 - m.x82 - m.x83 - m.x84 - m.x85 - m.x86 - m.x87 - m.x88 - m.x89
- m.x90 - m.x91 + m.x638 >= 0)
m.c178 = Constraint(expr= - m.x92 - m.x93 - m.x94 - m.x95 - m.x96 - m.x97 - m.x98 - m.x99 - m.x100 - m.x101 - m.x102
- m.x103 - m.x104 + m.x639 >= 0)
m.c179 = Constraint(expr= - m.x105 - m.x106 - m.x107 - m.x108 - m.x109 - m.x110 - m.x111 - m.x112 - m.x113 - m.x114
- m.x115 - m.x116 - m.x117 + m.x640 >= 0)
m.c180 = Constraint(expr= - m.x118 - m.x119 - m.x120 - m.x121 - m.x122 - m.x123 - m.x124 - m.x125 - m.x126 - m.x127
- m.x128 - m.x129 - m.x130 + m.x641 >= 0)
m.c181 = Constraint(expr= - m.x131 - m.x132 - m.x133 - m.x134 - m.x135 - m.x136 - m.x137 - m.x138 - m.x139 - m.x140
- m.x141 - m.x142 - m.x143 + m.x642 >= 0)
m.c182 = Constraint(expr= - m.x144 - m.x145 - m.x146 - m.x147 - m.x148 - m.x149 - m.x150 - m.x151 - m.x152 - m.x153
- m.x154 - m.x155 - m.x156 + m.x643 >= 0)
m.c183 = Constraint(expr= - m.x157 - m.x158 - m.x159 - m.x160 - m.x161 - m.x162 - m.x163 - m.x164 - m.x165 - m.x166
- m.x167 - m.x168 - m.x169 + m.x644 >= 0)
m.c184 = Constraint(expr= - m.x170 - m.x171 - m.x172 - m.x173 - m.x174 - m.x175 - m.x176 - m.x177 - m.x178 - m.x179
- m.x180 - m.x181 - m.x182 + m.x645 >= 0)
m.c185 = Constraint(expr= - m.x183 - m.x184 - m.x185 - m.x186 - m.x187 - m.x188 - m.x189 - m.x190 - m.x191 - m.x192
- m.x193 - m.x194 - m.x195 + m.x646 >= 0)
m.c186 = Constraint(expr= - m.x196 - m.x197 - m.x198 - m.x199 - m.x200 - m.x201 - m.x202 - m.x203 - m.x204 - m.x205
- m.x206 - m.x207 - m.x208 + m.x647 >= 0)
m.c187 = Constraint(expr= - m.x209 - m.x210 - m.x211 - m.x212 - m.x213 - m.x214 - m.x215 - m.x216 - m.x217 - m.x218
- m.x219 - m.x220 - m.x221 + m.x648 >= 0)
m.c188 = Constraint(expr= - m.x222 - m.x223 - m.x224 - m.x225 - m.x226 - m.x227 - m.x228 - m.x229 - m.x230 - m.x231
- m.x232 - m.x233 - m.x234 + m.x649 >= 0)
m.c189 = Constraint(expr= - m.x235 - m.x236 - m.x237 - m.x238 - m.x239 - m.x240 - m.x241 - m.x242 - m.x243 - m.x244
- m.x245 - m.x246 - m.x247 + m.x650 >= 0)
m.c190 = Constraint(expr= - m.x248 - m.x249 - m.x250 - m.x251 - m.x252 - m.x253 - m.x254 - m.x255 - m.x256 - m.x257
- m.x258 - m.x259 - m.x260 + m.x651 >= 0)
m.c191 = Constraint(expr= - m.x261 - m.x262 - m.x263 - m.x264 - m.x265 - m.x266 - m.x267 - m.x268 - m.x269 - m.x270
- m.x271 - m.x272 - m.x273 + m.x652 >= 0)
m.c192 = Constraint(expr= - m.x274 - m.x275 - m.x276 - m.x277 - m.x278 - m.x279 - m.x280 - m.x281 - m.x282 - m.x283
- m.x284 - m.x285 - m.x286 + m.x653 >= 0)
m.c193 = Constraint(expr= - m.x287 - m.x288 - m.x289 - m.x290 - m.x291 - m.x292 - m.x293 - m.x294 - m.x295 - m.x296
- m.x297 - m.x298 - m.x299 + m.x654 >= 0)
m.c194 = Constraint(expr= - m.x300 - m.x301 - m.x302 - m.x303 - m.x304 - m.x305 - m.x306 - m.x307 - m.x308 - m.x309
- m.x310 - m.x311 - m.x312 + m.x655 >= 0)
m.c195 = Constraint(expr= - m.x313 - m.x314 - m.x315 - m.x316 - m.x317 - m.x318 - m.x319 - m.x320 - m.x321 - m.x322
- m.x323 - m.x324 - m.x325 + m.x656 >= 0)
m.c196 = Constraint(expr= - m.x326 - m.x327 - m.x328 - m.x329 - m.x330 - m.x331 - m.x332 - m.x333 - m.x334 - m.x335
- m.x336 - m.x337 - m.x338 + m.x657 >= 0)
m.c197 = Constraint(expr= - m.x339 - m.x340 - m.x341 - m.x342 - m.x343 - m.x344 - m.x345 - m.x346 - m.x347 - m.x348
- m.x349 - m.x350 - m.x351 + m.x658 >= 0)
m.c198 = Constraint(expr= - m.x352 - m.x353 - m.x354 - m.x355 - m.x356 - m.x357 - m.x358 - m.x359 - m.x360 - m.x361
- m.x362 - m.x363 - m.x364 + m.x659 >= 0)
m.c199 = Constraint(expr= - m.x365 - m.x366 - m.x367 - m.x368 - m.x369 - m.x370 - m.x371 - m.x372 - m.x373 - m.x374
- m.x375 - m.x376 - m.x377 + m.x660 >= 0)
m.c200 = Constraint(expr= - m.x378 - m.x379 - m.x380 - m.x381 - m.x382 - m.x383 - m.x384 - m.x385 - m.x386 - m.x387
- m.x388 - m.x389 - m.x390 + m.x661 >= 0)
m.c201 = Constraint(expr= - m.x391 - m.x392 - m.x393 - m.x394 - m.x395 - m.x396 - m.x397 - m.x398 - m.x399 - m.x400
- m.x401 - m.x402 - m.x403 + m.x662 >= 0)
m.c202 = Constraint(expr= - m.x404 - m.x405 - m.x406 - m.x407 - m.x408 - m.x409 - m.x410 - m.x411 - m.x412 - m.x413
- m.x414 - m.x415 - m.x416 + m.x663 >= 0)
m.c203 = Constraint(expr= - m.x417 - m.x418 - m.x419 - m.x420 - m.x421 - m.x422 - m.x423 - m.x424 - m.x425 - m.x426
- m.x427 - m.x428 - m.x429 + m.x664 >= 0)
m.c204 = Constraint(expr= - m.x430 - m.x431 - m.x432 - m.x433 - m.x434 - m.x435 - m.x436 - m.x437 - m.x438 - m.x439
- m.x440 - m.x441 - m.x442 + m.x665 >= 0)
m.c205 = Constraint(expr= - m.x443 - m.x444 - m.x445 - m.x446 - m.x447 - m.x448 - m.x449 - m.x450 - m.x451 - m.x452
- m.x453 - m.x454 - m.x455 + m.x666 >= 0)
m.c206 = Constraint(expr= - m.x456 - m.x457 - m.x458 - m.x459 - m.x460 - m.x461 - m.x462 - m.x463 - m.x464 - m.x465
- m.x466 - m.x467 - m.x468 + m.x667 >= 0)
m.c207 = Constraint(expr= - m.x469 - m.x470 - m.x471 - m.x472 - m.x473 - m.x474 - m.x475 - m.x476 - m.x477 - m.x478
- m.x479 - m.x480 - m.x481 + m.x668 >= 0)
m.c208 = Constraint(expr= - m.x482 - m.x483 - m.x484 - m.x485 - m.x486 - m.x487 - m.x488 - m.x489 - m.x490 - m.x491
- m.x492 - m.x493 - m.x494 + m.x669 >= 0)
m.c209 = Constraint(expr= - m.x495 - m.x496 - m.x497 - m.x498 - m.x499 - m.x500 - m.x501 - m.x502 - m.x503 - m.x504
- m.x505 - m.x506 - m.x507 + m.x670 >= 0)
m.c210 = Constraint(expr= - m.x508 - m.x509 - m.x510 - m.x511 - m.x512 - m.x513 - m.x514 - m.x515 - m.x516 - m.x517
- m.x518 - m.x519 - m.x520 + m.x671 >= 0)
m.c211 = Constraint(expr= - m.x521 - m.x522 - m.x523 - m.x524 - m.x525 - m.x526 - m.x527 - m.x528 - m.x529 - m.x530
- m.x531 - m.x532 - m.x533 + m.x672 >= 0)
m.c212 = Constraint(expr= - m.x534 - m.x535 - m.x536 - m.x537 - m.x538 - m.x539 - m.x540 - m.x541 - m.x542 - m.x543
- m.x544 - m.x545 - m.x546 + m.x673 >= 0)
m.c213 = Constraint(expr=166*m.x632*m.b547 - 166*m.b547*m.x589 + m.x632*m.x589 <= 0)
m.c214 = Constraint(expr=463*m.x633*m.b548 - 463*m.b548*m.x590 + m.x633*m.x590 <= 0)
m.c215 = Constraint(expr=522*m.x634*m.b549 - 522*m.b549*m.x591 + m.x634*m.x591 <= 0)
m.c216 = Constraint(expr=141*m.x635*m.b550 - 141*m.b550*m.x592 + m.x635*m.x592 <= 0)
m.c217 = Constraint(expr=166*m.x636*m.b551 - 166*m.b551*m.x593 + m.x636*m.x593 <= 0)
m.c218 = Constraint(expr=265*m.x637*m.b552 - 265*m.b552*m.x594 + m.x637*m.x594 <= 0)
m.c219 = Constraint(expr=463*m.x638*m.b553 - 463*m.b553*m.x595 + m.x638*m.x595 <= 0)
m.c220 = Constraint(expr=456*m.x639*m.b554 - 456*m.b554*m.x596 + m.x639*m.x596 <= 0)
m.c221 = Constraint(expr=526*m.x640*m.b555 - 526*m.b555*m.x597 + m.x640*m.x597 <= 0)
m.c222 = Constraint(expr=152*m.x641*m.b556 - 152*m.b556*m.x598 + m.x641*m.x598 <= 0)
m.c223 = Constraint(expr=456*m.x642*m.b557 - 456*m.b557*m.x599 + m.x642*m.x599 <= 0)
m.c224 = Constraint(expr=384*m.x643*m.b558 - 384*m.b558*m.x600 + m.x643*m.x600 <= 0)
m.c225 = Constraint(expr=441*m.x644*m.b559 - 441*m.b559*m.x601 + m.x644*m.x601 <= 0)
m.c226 = Constraint(expr=309*m.x645*m.b560 - 309*m.b560*m.x602 + m.x645*m.x602 <= 0)
m.c227 = Constraint(expr=233*m.x646*m.b561 - 233*m.b561*m.x603 + m.x646*m.x603 <= 0)
m.c228 = Constraint(expr=526*m.x647*m.b562 - 526*m.b562*m.x604 + m.x647*m.x604 <= 0)
m.c229 = Constraint(expr=384*m.x648*m.b563 - 384*m.b563*m.x605 + m.x648*m.x605 <= 0)
m.c230 = Constraint(expr=203*m.x649*m.b564 - 203*m.b564*m.x606 + m.x649*m.x606 <= 0)
m.c231 = Constraint(expr=522*m.x650*m.b565 - 522*m.b565*m.x607 + m.x650*m.x607 <= 0)
m.c232 = Constraint(expr=265*m.x651*m.b566 - 265*m.b566*m.x608 + m.x651*m.x608 <= 0)
m.c233 = Constraint(expr=152*m.x652*m.b567 - 152*m.b567*m.x609 + m.x652*m.x609 <= 0)
m.c234 = Constraint(expr=441*m.x653*m.b568 - 441*m.b568*m.x610 + m.x653*m.x610 <= 0)
m.c235 = Constraint(expr=203*m.x654*m.b569 - 203*m.b569*m.x611 + m.x654*m.x611 <= 0)
m.c236 = Constraint(expr=284*m.x655*m.b570 - 284*m.b570*m.x612 + m.x655*m.x612 <= 0)
m.c237 = Constraint(expr=426*m.x656*m.b571 - 426*m.b571*m.x613 + m.x656*m.x613 <= 0)
m.c238 = Constraint(expr=284*m.x657*m.b572 - 284*m.b572*m.x614 + m.x657*m.x614 <= 0)
m.c239 = Constraint(expr=109*m.x658*m.b573 - 109*m.b573*m.x615 + m.x658*m.x615 <= 0)
m.c240 = Constraint(expr=309*m.x659*m.b574 - 309*m.b574*m.x616 + m.x659*m.x616 <= 0)
m.c241 = Constraint(expr=434*m.x660*m.b575 - 434*m.b575*m.x617 + m.x660*m.x617 <= 0)
m.c242 = Constraint(expr=141*m.x661*m.b576 - 141*m.b576*m.x618 + m.x661*m.x618 <= 0)
m.c243 = Constraint(expr=434*m.x662*m.b577 - 434*m.b577*m.x619 + m.x662*m.x619 <= 0)
m.c244 = Constraint(expr=403*m.x663*m.b578 - 403*m.b578*m.x620 + m.x663*m.x620 <= 0)
m.c245 = Constraint(expr=426*m.x664*m.b579 - 426*m.b579*m.x621 + m.x664*m.x621 <= 0)
m.c246 = Constraint(expr=403*m.x665*m.b580 - 403*m.b580*m.x622 + m.x665*m.x622 <= 0)
m.c247 = Constraint(expr=151*m.x666*m.b581 - 151*m.b581*m.x623 + m.x666*m.x623 <= 0)
m.c248 = Constraint(expr=233*m.x667*m.b582 - 233*m.b582*m.x624 + m.x667*m.x624 <= 0)
m.c249 = Constraint(expr=109*m.x668*m.b583 - 109*m.b583*m.x625 + m.x668*m.x625 <= 0)
m.c250 = Constraint(expr=367*m.x669*m.b584 - 367*m.b584*m.x626 + m.x669*m.x626 <= 0)
m.c251 = Constraint(expr=367*m.x670*m.b585 - 367*m.b585*m.x627 + m.x670*m.x627 <= 0)
m.c252 = Constraint(expr=382*m.x671*m.b586 - 382*m.b586*m.x628 + m.x671*m.x628 <= 0)
m.c253 = Constraint(expr=151*m.x672*m.b587 - 151*m.b587*m.x629 + m.x672*m.x629 <= 0)
m.c254 = Constraint(expr=382*m.x673*m.b588 - 382*m.b588*m.x630 + m.x673*m.x630 <= 0)
m.c255 = Constraint(expr= m.x589 + m.x590 + m.x591 + m.x592 + m.x593 + m.x594 + m.x595 + m.x596 + m.x597 + m.x598
+ m.x599 + m.x600 + m.x601 + m.x602 + m.x603 + m.x604 + m.x605 + m.x606 + m.x607 + m.x608
+ m.x609 + m.x610 + m.x611 + m.x612 + m.x613 + m.x614 + m.x615 + m.x616 + m.x617 + m.x618
+ m.x619 + m.x620 + m.x621 + m.x622 + m.x623 + m.x624 + m.x625 + m.x626 + m.x627 + m.x628
+ m.x629 + m.x630 <= 18536)
m.c256 = Constraint(expr= m.x1 + m.x2 + m.x3 + m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 + m.x12 + m.x13
- 166*m.b547 <= 0)
m.c257 = Constraint(expr= m.x14 + m.x15 + m.x16 + m.x17 + m.x18 + m.x19 + m.x20 + m.x21 + m.x22 + m.x23 + m.x24
+ m.x25 + m.x26 - 463*m.b548 <= 0)
m.c258 = Constraint(expr= m.x27 + m.x28 + m.x29 + m.x30 + m.x31 + m.x32 + m.x33 + m.x34 + m.x35 + m.x36 + m.x37
+ m.x38 + m.x39 - 522*m.b549 <= 0)
m.c259 = Constraint(expr= m.x40 + m.x41 + m.x42 + m.x43 + m.x44 + m.x45 + m.x46 + m.x47 + m.x48 + m.x49 + m.x50
+ m.x51 + m.x52 - 141*m.b550 <= 0)
m.c260 = Constraint(expr= m.x53 + m.x54 + m.x55 + m.x56 + m.x57 + m.x58 + m.x59 + m.x60 + m.x61 + m.x62 + m.x63
+ m.x64 + m.x65 - 166*m.b551 <= 0)
m.c261 = Constraint(expr= m.x66 + m.x67 + m.x68 + m.x69 + m.x70 + m.x71 + m.x72 + m.x73 + m.x74 + m.x75 + m.x76
+ m.x77 + m.x78 - 265*m.b552 <= 0)
m.c262 = Constraint(expr= m.x79 + m.x80 + m.x81 + m.x82 + m.x83 + m.x84 + m.x85 + m.x86 + m.x87 + m.x88 + m.x89
+ m.x90 + m.x91 - 463*m.b553 <= 0)
m.c263 = Constraint(expr= m.x92 + m.x93 + m.x94 + m.x95 + m.x96 + m.x97 + m.x98 + m.x99 + m.x100 + m.x101 + m.x102
+ m.x103 + m.x104 - 456*m.b554 <= 0)
m.c264 = Constraint(expr= m.x105 + m.x106 + m.x107 + m.x108 + m.x109 + m.x110 + m.x111 + m.x112 + m.x113 + m.x114
+ m.x115 + m.x116 + m.x117 - 526*m.b555 <= 0)
m.c265 = Constraint(expr= m.x118 + m.x119 + m.x120 + m.x121 + m.x122 + m.x123 + m.x124 + m.x125 + m.x126 + m.x127
+ m.x128 + m.x129 + m.x130 - 152*m.b556 <= 0)
m.c266 = Constraint(expr= m.x131 + m.x132 + m.x133 + m.x134 + m.x135 + m.x136 + m.x137 + m.x138 + m.x139 + m.x140
+ m.x141 + m.x142 + m.x143 - 456*m.b557 <= 0)
m.c267 = Constraint(expr= m.x144 + m.x145 + m.x146 + m.x147 + m.x148 + m.x149 + m.x150 + m.x151 + m.x152 + m.x153
+ m.x154 + m.x155 + m.x156 - 384*m.b558 <= 0)
m.c268 = Constraint(expr= m.x157 + m.x158 + m.x159 + m.x160 + m.x161 + m.x162 + m.x163 + m.x164 + m.x165 + m.x166
+ m.x167 + m.x168 + m.x169 - 441*m.b559 <= 0)
m.c269 = Constraint(expr= m.x170 + m.x171 + m.x172 + m.x173 + m.x174 + m.x175 + m.x176 + m.x177 + m.x178 + m.x179
+ m.x180 + m.x181 + m.x182 - 309*m.b560 <= 0)
m.c270 = Constraint(expr= m.x183 + m.x184 + m.x185 + m.x186 + m.x187 + m.x188 + m.x189 + m.x190 + m.x191 + m.x192
+ m.x193 + m.x194 + m.x195 - 233*m.b561 <= 0)
m.c271 = Constraint(expr= m.x196 + m.x197 + m.x198 + m.x199 + m.x200 + m.x201 + m.x202 + m.x203 + m.x204 + m.x205
+ m.x206 + m.x207 + m.x208 - 526*m.b562 <= 0)
m.c272 = Constraint(expr= m.x209 + m.x210 + m.x211 + m.x212 + m.x213 + m.x214 + m.x215 + m.x216 + m.x217 + m.x218
+ m.x219 + m.x220 + m.x221 - 384*m.b563 <= 0)
m.c273 = Constraint(expr= m.x222 + m.x223 + m.x224 + m.x225 + m.x226 + m.x227 + m.x228 + m.x229 + m.x230 + m.x231
+ m.x232 + m.x233 + m.x234 - 203*m.b564 <= 0)
m.c274 = Constraint(expr= m.x235 + m.x236 + m.x237 + m.x238 + m.x239 + m.x240 + m.x241 + m.x242 + m.x243 + m.x244
+ m.x245 + m.x246 + m.x247 - 522*m.b565 <= 0)
m.c275 = Constraint(expr= m.x248 + m.x249 + m.x250 + m.x251 + m.x252 + m.x253 + m.x254 + m.x255 + m.x256 + m.x257
+ m.x258 + m.x259 + m.x260 - 265*m.b566 <= 0)
m.c276 = Constraint(expr= m.x261 + m.x262 + m.x263 + m.x264 + m.x265 + m.x266 + m.x267 + m.x268 + m.x269 + m.x270
+ m.x271 + m.x272 + m.x273 - 152*m.b567 <= 0)
m.c277 = Constraint(expr= m.x274 + m.x275 + m.x276 + m.x277 + m.x278 + m.x279 + m.x280 + m.x281 + m.x282 + m.x283
+ m.x284 + m.x285 + m.x286 - 441*m.b568 <= 0)
m.c278 = Constraint(expr= m.x287 + m.x288 + m.x289 + m.x290 + m.x291 + m.x292 + m.x293 + m.x294 + m.x295 + m.x296
+ m.x297 + m.x298 + m.x299 - 203*m.b569 <= 0)
m.c279 = Constraint(expr= m.x300 + m.x301 + m.x302 + m.x303 + m.x304 + m.x305 + m.x306 + m.x307 + m.x308 + m.x309
+ m.x310 + m.x311 + m.x312 - 284*m.b570 <= 0)
m.c280 = Constraint(expr= m.x313 + m.x314 + m.x315 + m.x316 + m.x317 + m.x318 + m.x319 + m.x320 + m.x321 + m.x322
+ m.x323 + m.x324 + m.x325 - 426*m.b571 <= 0)
m.c281 = Constraint(expr= m.x326 + m.x327 + m.x328 + m.x329 + m.x330 + m.x331 + m.x332 + m.x333 + m.x334 + m.x335
+ m.x336 + m.x337 + m.x338 - 284*m.b572 <= 0)
m.c282 = Constraint(expr= m.x339 + m.x340 + m.x341 + m.x342 + m.x343 + m.x344 + m.x345 + m.x346 + m.x347 + m.x348
+ m.x349 + m.x350 + m.x351 - 109*m.b573 <= 0)
m.c283 = Constraint(expr= m.x352 + m.x353 + m.x354 + m.x355 + m.x356 + m.x357 + m.x358 + m.x359 + m.x360 + m.x361
+ m.x362 + m.x363 + m.x364 - 309*m.b574 <= 0)
m.c284 = Constraint(expr= m.x365 + m.x366 + m.x367 + m.x368 + m.x369 + m.x370 + m.x371 + m.x372 + m.x373 + m.x374
+ m.x375 + m.x376 + m.x377 - 434*m.b575 <= 0)
m.c285 = Constraint(expr= m.x378 + m.x379 + m.x380 + m.x381 + m.x382 + m.x383 + m.x384 + m.x385 + m.x386 + m.x387
+ m.x388 + m.x389 + m.x390 - 141*m.b576 <= 0)
m.c286 = Constraint(expr= m.x391 + m.x392 + m.x393 + m.x394 + m.x395 + m.x396 + m.x397 + m.x398 + m.x399 + m.x400
+ m.x401 + m.x402 + m.x403 - 434*m.b577 <= 0)
m.c287 = Constraint(expr= m.x404 + m.x405 + m.x406 + m.x407 + m.x408 + m.x409 + m.x410 + m.x411 + m.x412 + m.x413
+ m.x414 + m.x415 + m.x416 - 403*m.b578 <= 0)
m.c288 = Constraint(expr= m.x417 + m.x418 + m.x419 + m.x420 + m.x421 + m.x422 + m.x423 + m.x424 + m.x425 + m.x426
+ m.x427 + m.x428 + m.x429 - 426*m.b579 <= 0)
m.c289 = Constraint(expr= m.x430 + m.x431 + m.x432 + m.x433 + m.x434 + m.x435 + m.x436 + m.x437 + m.x438 + m.x439
+ m.x440 + m.x441 + m.x442 - 403*m.b580 <= 0)
m.c290 = Constraint(expr= m.x443 + m.x444 + m.x445 + m.x446 + m.x447 + m.x448 + m.x449 + m.x450 + m.x451 + m.x452
+ m.x453 + m.x454 + m.x455 - 151*m.b581 <= 0)
m.c291 = Constraint(expr= m.x456 + m.x457 + m.x458 + m.x459 + m.x460 + m.x461 + m.x462 + m.x463 + m.x464 + m.x465
+ m.x466 + m.x467 + m.x468 - 233*m.b582 <= 0)
m.c292 = Constraint(expr= m.x469 + m.x470 + m.x471 + m.x472 + m.x473 + m.x474 + m.x475 + m.x476 + m.x477 + m.x478
+ m.x479 + m.x480 + m.x481 - 109*m.b583 <= 0)
m.c293 = Constraint(expr= m.x482 + m.x483 + m.x484 + m.x485 + m.x486 + m.x487 + m.x488 + m.x489 + m.x490 + m.x491
+ m.x492 + m.x493 + m.x494 - 367*m.b584 <= 0)
m.c294 = Constraint(expr= m.x495 + m.x496 + m.x497 + m.x498 + m.x499 + m.x500 + m.x501 + m.x502 + m.x503 + m.x504
+ m.x505 + m.x506 + m.x507 - 367*m.b585 <= 0)
m.c295 = Constraint(expr= m.x508 + m.x509 + m.x510 + m.x511 + m.x512 + m.x513 + m.x514 + m.x515 + m.x516 + m.x517
+ m.x518 + m.x519 + m.x520 - 382*m.b586 <= 0)
m.c296 = Constraint(expr= m.x521 + m.x522 + m.x523 + m.x524 + m.x525 + m.x526 + m.x527 + m.x528 + m.x529 + m.x530
+ m.x531 + m.x532 + m.x533 - 151*m.b587 <= 0)
m.c297 = Constraint(expr= m.x534 + m.x535 + m.x536 + m.x537 + m.x538 + m.x539 + m.x540 + m.x541 + m.x542 + m.x543
+ m.x544 + m.x545 + m.x546 - 382*m.b588 <= 0)
| 53.085288 | 120 | 0.629634 |
c7d378679d5e763e0a3427a5a59048ba70934d41 | 4,322 | py | Python | tests/pytests/scenarios/multimaster/conftest.py | lllamnyp/salt | de112e5b362191e3708e170b7eb8e990787ad412 | [
"Apache-2.0"
] | null | null | null | tests/pytests/scenarios/multimaster/conftest.py | lllamnyp/salt | de112e5b362191e3708e170b7eb8e990787ad412 | [
"Apache-2.0"
] | null | null | null | tests/pytests/scenarios/multimaster/conftest.py | lllamnyp/salt | de112e5b362191e3708e170b7eb8e990787ad412 | [
"Apache-2.0"
] | null | null | null | import logging
import os
import shutil
import subprocess
import pytest
import salt.utils.platform
log = logging.getLogger(__name__)
| 31.547445 | 85 | 0.679084 |
c7d37af76275d31df153580818ea0db96b86762e | 1,210 | py | Python | supermario/supermario 1117/start_state.py | Kimmiryeong/2DGP_GameProject | ad3fb197aab27227fc92fd404b2c310f8d0827ca | [
"MIT"
] | null | null | null | supermario/supermario 1117/start_state.py | Kimmiryeong/2DGP_GameProject | ad3fb197aab27227fc92fd404b2c310f8d0827ca | [
"MIT"
] | null | null | null | supermario/supermario 1117/start_state.py | Kimmiryeong/2DGP_GameProject | ad3fb197aab27227fc92fd404b2c310f8d0827ca | [
"MIT"
] | null | null | null | import game_framework
from pico2d import *
import title_state
name = "StartState"
image = None
logo_time = 0.0
from pico2d import *
import title_state
name = "StartState"
image = None
logo_time = 0.0
| 11.747573 | 48 | 0.634711 |
c7d524f7dbf8736dbbb40f3bb15a61c60aba8191 | 22,620 | py | Python | egs/librispeech/ASR/transducer/test_rnn.py | rosrad/icefall | 6f282731286a6855658c6882c3c938437448e05e | [
"Apache-2.0"
] | null | null | null | egs/librispeech/ASR/transducer/test_rnn.py | rosrad/icefall | 6f282731286a6855658c6882c3c938437448e05e | [
"Apache-2.0"
] | null | null | null | egs/librispeech/ASR/transducer/test_rnn.py | rosrad/icefall | 6f282731286a6855658c6882c3c938437448e05e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2021 Xiaomi Corp. (authors: Fangjun Kuang)
#
# See ../../../../LICENSE for clarification regarding multiple authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
from transducer.rnn import (
LayerNormGRU,
LayerNormGRUCell,
LayerNormGRULayer,
LayerNormLSTM,
LayerNormLSTMCell,
LayerNormLSTMLayer,
)
torch.set_num_threads(1)
torch.set_num_interop_threads(1)
if __name__ == "__main__":
torch.manual_seed(20211202)
main()
| 29.530026 | 79 | 0.642706 |
c7d594ecefc0ecfe585fc9557bf2ed8617f874e6 | 1,944 | py | Python | settings.py | SalinderSidhu/CHIP8 | 46a01aa7675805b84809d1e9762905de8fdccc66 | [
"MIT"
] | 4 | 2015-12-22T15:03:43.000Z | 2016-07-28T08:11:48.000Z | settings.py | SalinderSidhu/CHIP8 | 46a01aa7675805b84809d1e9762905de8fdccc66 | [
"MIT"
] | null | null | null | settings.py | SalinderSidhu/CHIP8 | 46a01aa7675805b84809d1e9762905de8fdccc66 | [
"MIT"
] | null | null | null | import configparser
| 36.679245 | 79 | 0.646091 |
c7d59e3cde73fd0dad74b149197ee60ec8e8c83b | 3,900 | py | Python | demisto_sdk/commands/common/hook_validations/release_notes.py | yalonso7/demisto-sdk | 4b832078cdadb0b604a064532975e8be68ac726a | [
"MIT"
] | null | null | null | demisto_sdk/commands/common/hook_validations/release_notes.py | yalonso7/demisto-sdk | 4b832078cdadb0b604a064532975e8be68ac726a | [
"MIT"
] | null | null | null | demisto_sdk/commands/common/hook_validations/release_notes.py | yalonso7/demisto-sdk | 4b832078cdadb0b604a064532975e8be68ac726a | [
"MIT"
] | null | null | null | from __future__ import print_function
import itertools
from demisto_sdk.commands.common.constants import VALIDATED_PACK_ITEM_TYPES
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.hook_validations.base_validator import \
BaseValidator
from demisto_sdk.commands.common.tools import (get_latest_release_notes_text,
get_release_notes_file_path)
from demisto_sdk.commands.update_release_notes.update_rn import UpdateRN
| 50 | 120 | 0.661795 |
c7d5fc15217b2b0e024e35082215227dc7639d0e | 14,326 | py | Python | PyOpenGL/PyGame/ex06/src/mathematics.py | hoppfull/Legacy-Python | 43f465bfdb76c91f2ac16aabb0783fdf5f459adb | [
"MIT"
] | null | null | null | PyOpenGL/PyGame/ex06/src/mathematics.py | hoppfull/Legacy-Python | 43f465bfdb76c91f2ac16aabb0783fdf5f459adb | [
"MIT"
] | null | null | null | PyOpenGL/PyGame/ex06/src/mathematics.py | hoppfull/Legacy-Python | 43f465bfdb76c91f2ac16aabb0783fdf5f459adb | [
"MIT"
] | null | null | null | import numpy as np
| 34.603865 | 100 | 0.454279 |
c7d672fb0397af44cf591c05913dd9f20b250483 | 1,652 | py | Python | test_utils/mocks.py | radomd92/botjagwar | 1dc96600c40041057a9f9afde38c31ca34b8db38 | [
"MIT"
] | 7 | 2015-01-23T17:24:04.000Z | 2022-01-12T16:54:24.000Z | test_utils/mocks.py | radomd92/botjagwar | 1dc96600c40041057a9f9afde38c31ca34b8db38 | [
"MIT"
] | 18 | 2017-12-09T01:11:23.000Z | 2021-09-22T13:26:24.000Z | test_utils/mocks.py | radomd92/botjagwar | 1dc96600c40041057a9f9afde38c31ca34b8db38 | [
"MIT"
] | 1 | 2015-06-22T02:17:55.000Z | 2015-06-22T02:17:55.000Z | from xml.dom import minidom
import pywikibot
from api.decorator import time_this
SiteMock = pywikibot.Site
p = PageMock(SiteMock('en', 'wiktionary'), 'gaon')
e = p.get()
| 36.711111 | 78 | 0.624092 |
c7d6da38ffc0a1fb86619973f197115c4b076c8a | 5,796 | py | Python | dl_tensorflow/deepdream.py | jarvisqi/deep_learning | 988a5b0551ccf2c480a519c66aca149053826d30 | [
"MIT"
] | 32 | 2017-10-26T13:37:36.000Z | 2021-03-24T09:06:45.000Z | dl_tensorflow/deepdream.py | 2892778775/deep_learning | 988a5b0551ccf2c480a519c66aca149053826d30 | [
"MIT"
] | 3 | 2018-11-19T05:55:46.000Z | 2019-03-01T05:20:43.000Z | dl_tensorflow/deepdream.py | 2892778775/deep_learning | 988a5b0551ccf2c480a519c66aca149053826d30 | [
"MIT"
] | 38 | 2017-11-08T15:42:48.000Z | 2021-05-10T00:42:33.000Z | import os
from functools import partial
from io import BytesIO
import numpy as np
import PIL.Image
import scipy.misc
import tensorflow as tf
graph = tf.Graph()
sess = tf.InteractiveSession(graph=graph)
model_fn = "./models/tensorflow_inception_graph.pb"
with tf.gfile.FastGFile(model_fn, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
t_input = tf.placeholder(tf.float32, name="input")
imagenet_mean = 117.0
t_preprocessed = tf.expand_dims(t_input-imagenet_mean, 0)
tf.import_graph_def(graph_def, {"input": t_preprocessed})
k = np.float32([1, 4, 6, 4, 1])
k = np.outer(k, k)
k5x5 = k[:, :, None, None] / k.sum() * np.eye(3, dtype=np.float32)
#
# img
#
#
# imgn
if __name__ == '__main__':
img0 = PIL.Image.open('./images/test.jpg')
img0 = np.float32(img0)
render_deepdream(img0)
| 30.031088 | 104 | 0.619393 |
c7d6e3bbbed972de89ca1f857b7b3b2178ada3d2 | 1,829 | py | Python | admin.py | BlueBlock/usage-reporter | e30bbef6d281944d62f716c37aff17861a653967 | [
"MIT"
] | 4 | 2018-08-30T06:16:35.000Z | 2022-02-18T08:06:21.000Z | admin.py | BlueBlock/usage-reporter | e30bbef6d281944d62f716c37aff17861a653967 | [
"MIT"
] | 1 | 2018-03-29T17:04:44.000Z | 2018-03-29T17:04:44.000Z | admin.py | BlueBlock/usage-reporter | e30bbef6d281944d62f716c37aff17861a653967 | [
"MIT"
] | 4 | 2018-01-31T06:55:32.000Z | 2022-01-16T10:39:18.000Z | import calendar
import datetime
import logging
import os
import webapp2
import dbmodel
TESTING = os.environ.get('SERVER_SOFTWARE', '').startswith('Development')
app = webapp2.WSGIApplication([
('/tasks/admin/reset', ResetHandler)
], debug=TESTING)
| 35.173077 | 115 | 0.632586 |
c7d717769a7df13adf5117eb840b41a6b41f5506 | 2,708 | py | Python | napari/utils/colormaps/categorical_colormap_utils.py | Zac-HD/napari | 102a7e8f845893c874d2b86f9371d41130100b89 | [
"BSD-3-Clause"
] | 1 | 2021-04-24T10:10:54.000Z | 2021-04-24T10:10:54.000Z | napari/utils/colormaps/categorical_colormap_utils.py | Zac-HD/napari | 102a7e8f845893c874d2b86f9371d41130100b89 | [
"BSD-3-Clause"
] | 2 | 2021-05-17T02:15:08.000Z | 2022-03-12T21:19:52.000Z | napari/utils/colormaps/categorical_colormap_utils.py | Zac-HD/napari | 102a7e8f845893c874d2b86f9371d41130100b89 | [
"BSD-3-Clause"
] | null | null | null | from dataclasses import dataclass
from itertools import cycle
from typing import Dict, Union
import numpy as np
from ...layers.utils.color_transformations import (
transform_color,
transform_color_cycle,
)
| 25.308411 | 73 | 0.64771 |
c7d75d84ab48e0f55426fa5ef9b76cbde3951e30 | 7,027 | py | Python | src/ipywidgets_toggle_buttons/abc_toggle_buttons_with_hide.py | stas-prokopiev/ipywidgets_toggle_buttons | 84d1afde1d02c19fb6a41b20e17b9d2b1c7980e2 | [
"MIT"
] | null | null | null | src/ipywidgets_toggle_buttons/abc_toggle_buttons_with_hide.py | stas-prokopiev/ipywidgets_toggle_buttons | 84d1afde1d02c19fb6a41b20e17b9d2b1c7980e2 | [
"MIT"
] | null | null | null | src/ipywidgets_toggle_buttons/abc_toggle_buttons_with_hide.py | stas-prokopiev/ipywidgets_toggle_buttons | 84d1afde1d02c19fb6a41b20e17b9d2b1c7980e2 | [
"MIT"
] | null | null | null | """Abstract class for all toggle buttons"""
# Standard library imports
import logging
from collections import OrderedDict
# Third party imports
import ipywidgets
# Local imports
from .abc_toggle_buttons import BaseToggleButtons
from .layouts import DICT_LAYOUT_HBOX_ANY
LOGGER = logging.getLogger(__name__)
def _update_buttons_for_new_options(self):
"""Update buttons if options were changed"""
self._create_buttons_for_visible_options()
self._bool_is_hidden_options_created = False
# self._create_buttons_for_hidden_options()
def _create_scaffold_for_widget(self):
"""Create scaffold of ipywidget Boxes for self"""
# Main buttons box
self._widget_hbox_main = ipywidgets.HBox()
self._widget_hbox_main.layout = ipywidgets.Layout(**DICT_LAYOUT_HBOX_ANY)
# self._widget_hbox_main.layout.flex_flow = "row wrap"
# Middle buttons box
self._widget_hbox_middle_buttons = ipywidgets.HBox()
self._widget_hbox_middle_buttons.layout = ipywidgets.Layout(**DICT_LAYOUT_HBOX_ANY)
self._create_middle_buttons()
# Hidden buttons box
self._widget_hbox_hidden = ipywidgets.HBox()
self._widget_hbox_hidden.layout = ipywidgets.Layout(**DICT_LAYOUT_HBOX_ANY)
# self._widget_hbox_hidden.layout.flex_flow = "row wrap"
def _create_buttons_for_visible_options(self):
"""Create buttons for all visible options"""
self._dict_visible_button_by_option = OrderedDict()
int_button_width = self._get_button_width(self.options_visible)
list_buttons = []
for str_option in list(self.options_visible):
but_wid = ipywidgets.Button(
description=str_option,
layout={"width": "%dpx" % int_button_width}
)
but_wid.on_click(self._on_click_button_to_choose_option)
self._dict_visible_button_by_option[str_option] = but_wid
list_buttons.append(but_wid)
self._widget_hbox_main.children = list_buttons
def _create_middle_buttons(self):
"""Create buttons which are in charge what to do with hidden buttons"""
self._wid_but_hide_show = ipywidgets.ToggleButton(
value=False,
description="Show Hidden options",
button_style="info",
)
self._wid_but_hide_show.layout.width = "40%"
self._wid_but_hide_show.observe(
lambda _: self._update_widget_view(), "value")
self._widget_but_hidden_option_selected = ipywidgets.Button(
description="...", disabled=True)
self._widget_but_hidden_option_selected.layout.width = "40%"
self._widget_hbox_middle_buttons.children = [
self._widget_but_hidden_option_selected, self._wid_but_hide_show]
def _create_buttons_for_hidden_options(self):
"""Create buttons for all hidden options"""
self._dict_hidden_button_by_option = OrderedDict()
int_button_width = self._get_button_width(self.options_hidden)
list_buttons = []
for str_option in list(self.options_hidden):
but_wid = ipywidgets.Button(
description=str_option,
layout={"width": "%dpx" % int_button_width}
)
if str_option in self.value:
but_wid.button_style = "success"
but_wid.on_click(self._on_click_button_to_choose_option)
self._dict_hidden_button_by_option[str_option] = but_wid
list_buttons.append(but_wid)
self._widget_hbox_hidden.children = list_buttons
| 40.154286 | 91 | 0.672549 |
c7d7886d9a5f7ae38bdb7d01f1fc136b75bb2a50 | 3,899 | py | Python | Players/DWPMPlayer.py | jokvedaras/game-framework | 9ff60e15d1beff54f94e280501929664ce59afe7 | [
"Apache-2.0"
] | null | null | null | Players/DWPMPlayer.py | jokvedaras/game-framework | 9ff60e15d1beff54f94e280501929664ce59afe7 | [
"Apache-2.0"
] | null | null | null | Players/DWPMPlayer.py | jokvedaras/game-framework | 9ff60e15d1beff54f94e280501929664ce59afe7 | [
"Apache-2.0"
] | null | null | null | __author__ = 'Pat McClernan and Dan Wegmann'
import Player
import Message
# input
#0 for rock
#1 for paper
#2 for scissors
# past move is array of numbers
# our move followed by their move
#Our strategy is to look at all past moves
#In a large number of games, you would expect
# each move to be seen an even amount of times
#So our strategy is to take the least seen move
# and expect it to show up soon
# so we will play to beat that move
# Test driver
# Run by typing "python3 RpsPlayerExample.py"
if __name__ == "__main__":
player = PatAndDansRPSPlayer()
opponent = PatAndDansRPSPlayer()
players = [opponent, player]
fakemoves = (1, 2)
fakeresult = (0, 1)
player.notify(Message.Message.get_match_start_message(players))
player.notify(Message.Message.get_round_start_message(players))
move = player.play()
print ("Move played: ", move)
player.notify(Message.Message.get_round_end_message(players, fakemoves, fakeresult))
| 32.22314 | 108 | 0.598359 |
c7d7ef9a92fb0bfab05a3bc1de9e8efb6f62b67d | 1,023 | py | Python | example/example.py | mowshon/age-and-gender | e5c912f6ba739f30a45c04208b6d16500e4488cd | [
"MIT"
] | 81 | 2020-06-17T12:53:03.000Z | 2022-03-11T20:02:46.000Z | example/example.py | mowshon/age-and-gender | e5c912f6ba739f30a45c04208b6d16500e4488cd | [
"MIT"
] | 4 | 2020-06-18T09:28:12.000Z | 2021-07-13T09:16:29.000Z | example/example.py | mowshon/age-and-gender | e5c912f6ba739f30a45c04208b6d16500e4488cd | [
"MIT"
] | 17 | 2020-06-18T07:08:09.000Z | 2022-03-31T03:56:58.000Z | from age_and_gender import *
from PIL import Image, ImageDraw, ImageFont
data = AgeAndGender()
data.load_shape_predictor('models/shape_predictor_5_face_landmarks.dat')
data.load_dnn_gender_classifier('models/dnn_gender_classifier_v1.dat')
data.load_dnn_age_predictor('models/dnn_age_predictor_v1.dat')
filename = 'test-image.jpg'
img = Image.open(filename).convert("RGB")
result = data.predict(img)
font = ImageFont.truetype("Acme-Regular.ttf", 20)
for info in result:
shape = [(info['face'][0], info['face'][1]), (info['face'][2], info['face'][3])]
draw = ImageDraw.Draw(img)
gender = info['gender']['value'].title()
gender_percent = int(info['gender']['confidence'])
age = info['age']['value']
age_percent = int(info['age']['confidence'])
draw.text(
(info['face'][0] - 10, info['face'][3] + 10), f"{gender} (~{gender_percent}%)\n{age} y.o. (~{age_percent}%).",
fill='white', font=font, align='center'
)
draw.rectangle(shape, outline="red", width=5)
img.show()
| 31 | 118 | 0.672532 |
c7d86ca9e9717fc1914525f4cf4555781fc27cb0 | 1,463 | py | Python | code/generate_games.py | jppg/pygame-tictactoe | f7283a71bb289601b4b8ee0b0bdbe731e67fa8a7 | [
"MIT"
] | null | null | null | code/generate_games.py | jppg/pygame-tictactoe | f7283a71bb289601b4b8ee0b0bdbe731e67fa8a7 | [
"MIT"
] | null | null | null | code/generate_games.py | jppg/pygame-tictactoe | f7283a71bb289601b4b8ee0b0bdbe731e67fa8a7 | [
"MIT"
] | null | null | null | from tictactoe import TicTacToe
import random
import csv
import os
gameNr = 1
gameLimit = 10000
lst_moves_1 = []
lst_moves_2 = []
while gameNr <= gameLimit:
print("+++++++++++")
print("Game#", gameNr)
game = TicTacToe()
tmp_moves_1 = []
tmp_moves_2 = []
while game.get_winner() == 0 and game.possible_moves() > 0:
pos = game.get_positions().copy()
while game.possible_moves() > 0:
move = random.randint(0,9)
if game.play(int(move)):
if game.get_player() == 1:
tmp_moves_2.append([gameNr] + [game.get_turn() - 1] + pos + [move])
else:
tmp_moves_1.append([gameNr] + [game.get_turn() - 1] + pos + [move])
break
print("Winner of game ", gameNr, "is", game.get_winner())
if game.get_winner() == 1:
lst_moves_1.append(tmp_moves_1)
#lst_moves_1.append(tmp_moves_1[len(tmp_moves_1) - 1])
else:
#lst_moves_2.append(tmp_moves_2[len(tmp_moves_2) - 1])
lst_moves_2.append(tmp_moves_2)
#print("List X: ", lst_moves_1)
#print("List O: ", lst_moves_2)
game.print_board()
gameNr = gameNr + 1
with open('moves_1.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in lst_moves_1:
writer.writerows(row)
with open('moves_2.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in lst_moves_2:
writer.writerows(row) | 27.603774 | 87 | 0.580314 |
c7d9eaf5171771685897ba7e8ba2988b57091181 | 350 | py | Python | applications/CoSimulationApplication/custom_data_structure/pyKratos/IntervalUtility.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | 2 | 2019-10-25T09:28:10.000Z | 2019-11-21T12:51:46.000Z | applications/CoSimulationApplication/custom_data_structure/pyKratos/IntervalUtility.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | 13 | 2019-10-07T12:06:51.000Z | 2020-02-18T08:48:33.000Z | applications/CoSimulationApplication/custom_data_structure/pyKratos/IntervalUtility.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | null | null | null | from __future__ import print_function, absolute_import, division # makes these scripts backward compatible with python 2.6 and 2.7
# TODO this should be implemented, see "kratos/utilities/interval_utility.h" | 38.888889 | 131 | 0.757143 |
c7dc267a8e2592a1c24d3b8c06a265a370010c46 | 2,906 | py | Python | stixcore/tmtc/tests/test_packets.py | nicHoch/STIXCore | 16822bbb37046f8e6c03be51909cfc91e9822cf7 | [
"BSD-3-Clause"
] | 1 | 2022-03-31T13:42:43.000Z | 2022-03-31T13:42:43.000Z | stixcore/tmtc/tests/test_packets.py | nicHoch/STIXCore | 16822bbb37046f8e6c03be51909cfc91e9822cf7 | [
"BSD-3-Clause"
] | 192 | 2020-11-03T22:40:19.000Z | 2022-03-31T15:17:13.000Z | stixcore/tmtc/tests/test_packets.py | nicHoch/STIXCore | 16822bbb37046f8e6c03be51909cfc91e9822cf7 | [
"BSD-3-Clause"
] | 3 | 2020-11-09T15:05:18.000Z | 2022-01-21T07:52:51.000Z |
import bitstring
import pytest
from stixcore.data.test import test_data
from stixcore.idb.manager import IDBManager
from stixcore.tmtc.packets import (
SOURCE_PACKET_HEADER_STRUCTURE,
TC_DATA_HEADER_STRUCTURE,
TM_DATA_HEADER_STRUCTURE,
SourcePacketHeader,
TCPacket,
TMDataHeader,
TMPacket,
)
from stixcore.tmtc.tm.tm_1 import TM_1_1
def test_tm_packet(idb):
combind_structures = {**SOURCE_PACKET_HEADER_STRUCTURE, **TM_DATA_HEADER_STRUCTURE}
test_fmt = ', '.join(combind_structures.values())
test_values = {n: 2 ** int(v.split(':')[-1]) - 1 for n, v in
combind_structures.items()}
test_binary = bitstring.pack(test_fmt, *test_values.values())
tmtc_packet = TMPacket(test_binary, idb=idb)
assert all([getattr(tmtc_packet.source_packet_header, key) == test_values[key]
for key in SOURCE_PACKET_HEADER_STRUCTURE.keys() if not key.startswith('spare')])
assert all([getattr(tmtc_packet.data_header, key) == test_values[key]
for key in TM_DATA_HEADER_STRUCTURE.keys() if not key.startswith('spare')])
def test_tc_packet():
combind_structures = {**SOURCE_PACKET_HEADER_STRUCTURE, **TC_DATA_HEADER_STRUCTURE}
test_fmt = ', '.join(combind_structures.values())
test_values = {n: 2 ** int(v.split(':')[-1]) - 1 for n, v in
combind_structures.items()}
test_values['process_id'] = 90
test_values['packet_category'] = 12
test_binary = bitstring.pack(test_fmt, *test_values.values())
tmtc_packet = TCPacket(test_binary)
assert all([getattr(tmtc_packet.source_packet_header, key) == test_values[key]
for key in SOURCE_PACKET_HEADER_STRUCTURE.keys() if not key.startswith('spare')])
assert all([getattr(tmtc_packet.data_header, key) == test_values[key]
for key in TC_DATA_HEADER_STRUCTURE.keys() if not key.startswith('spare')])
def test_tm_1_1(idb):
packet = TM_1_1('0x0da1c066000d100101782628a9c4e71e1dacc0a0', idb=idb)
assert packet.source_packet_header.process_id == 90
assert packet.source_packet_header.packet_category == 1
assert packet.data_header.service_type == 1
assert packet.data_header.service_subtype == 1
| 41.514286 | 97 | 0.699931 |
c7dcc75b55961bd952da5e374d98d1ab7d3f5c96 | 40,969 | py | Python | python/thunder/rdds/fileio/seriesloader.py | broxtronix/thunder | 4dad77721e2c9e225f94a6a5366d51ec83ac4690 | [
"Apache-2.0"
] | null | null | null | python/thunder/rdds/fileio/seriesloader.py | broxtronix/thunder | 4dad77721e2c9e225f94a6a5366d51ec83ac4690 | [
"Apache-2.0"
] | null | null | null | python/thunder/rdds/fileio/seriesloader.py | broxtronix/thunder | 4dad77721e2c9e225f94a6a5366d51ec83ac4690 | [
"Apache-2.0"
] | null | null | null | """Provides SeriesLoader object and helpers, used to read Series data from disk or other filesystems.
"""
from collections import namedtuple
import json
from numpy import array, arange, frombuffer, load, ndarray, unravel_index, vstack
from numpy import dtype as dtypeFunc
from scipy.io import loadmat
from cStringIO import StringIO
import itertools
import struct
import urlparse
import math
from thunder.rdds.fileio.writers import getParallelWriterForPath
from thunder.rdds.keys import Dimensions
from thunder.rdds.fileio.readers import getFileReaderForPath, FileNotFoundError, appendExtensionToPathSpec
from thunder.rdds.imgblocks.blocks import SimpleBlocks
from thunder.rdds.series import Series
from thunder.utils.common import parseMemoryString, smallestFloatType
def writeSeriesConfig(outputDirPath, nkeys, nvalues, keyType='int16', valueType='int16',
confFilename="conf.json", overwrite=True, awsCredentialsOverride=None):
"""
Helper function to write out a conf.json file with required information to load Series binary data.
"""
import json
from thunder.rdds.fileio.writers import getFileWriterForPath
filewriterClass = getFileWriterForPath(outputDirPath)
# write configuration file
# config JSON keys are lowercased "valuetype", "keytype", not valueType, keyType
conf = {'input': outputDirPath,
'nkeys': nkeys, 'nvalues': nvalues,
'valuetype': str(valueType), 'keytype': str(keyType)}
confWriter = filewriterClass(outputDirPath, confFilename, overwrite=overwrite,
awsCredentialsOverride=awsCredentialsOverride)
confWriter.writeFile(json.dumps(conf, indent=2))
# touch "SUCCESS" file as final action
successWriter = filewriterClass(outputDirPath, "SUCCESS", overwrite=overwrite,
awsCredentialsOverride=awsCredentialsOverride)
successWriter.writeFile('')
| 48.772619 | 124 | 0.631648 |
c7dcceeeb44aada8315f0c77d81c291531d15b79 | 3,097 | py | Python | mxnet/local_forward.py | rai-project/onnx_examples | 45db7b3e03dd674f28aeef3fcb1e60f5bca47948 | [
"MIT"
] | null | null | null | mxnet/local_forward.py | rai-project/onnx_examples | 45db7b3e03dd674f28aeef3fcb1e60f5bca47948 | [
"MIT"
] | null | null | null | mxnet/local_forward.py | rai-project/onnx_examples | 45db7b3e03dd674f28aeef3fcb1e60f5bca47948 | [
"MIT"
] | null | null | null | # run local models given a path, default to './mxnet_models/'
import os
import argparse
import time
import mxnet as mx
import numpy as np
file_path = os.path.realpath(__file__)
dir_name = os.path.dirname(file_path)
os.environ["MXNET_CUDNN_AUTOTUNE_DEFAULT"] = "0"
def xprint(s):
pass
parser = argparse.ArgumentParser(
description='Predict ImageNet classes from a given image')
parser.add_argument('--model_name', type=str, required=False, default='resnet50_v1',
help='name of the model to use')
parser.add_argument('--batch_size', type=int, required=False, default=1,
help='batch size to use')
parser.add_argument('--input_dim', type=int, required=False, default=224,
help='input dimension')
parser.add_argument('--input_channels', type=int, required=False, default=3,
help='input channels')
parser.add_argument('--num_iterations', type=int, required=False, default=30,
help='number of iterations to run')
parser.add_argument('--num_warmup', type=int, required=False, default=5,
help='number of warmup iterations to run')
parser.add_argument('--model_idx', type=int, required=False, default=2,
help='model idx')
parser.add_argument('--profile', type=bool, required=False, default=False,
help='enable profiling')
opt = parser.parse_args()
model_name = opt.model_name
batch_size = opt.batch_size
input_dim = opt.input_dim
input_channels = opt.input_channels
num_iterations = opt.num_iterations
num_warmup = opt.num_warmup
model_idx = opt.model_idx
profile = opt.profile
ctx = mx.gpu() if len(mx.test_utils.list_gpus()) else mx.cpu()
sym, arg_params, aux_params = mx.model.load_checkpoint(
dir_name + '/mxnet_models/'+model_name, 0)
data_names = [
graph_input
for graph_input in sym.list_inputs()
if graph_input not in arg_params and graph_input not in aux_params
]
net = mx.mod.Module(
symbol=sym,
data_names=[data_names[0]],
context=ctx,
label_names=None,
)
input_shape = (batch_size, input_channels, input_dim, input_dim)
img = mx.random.uniform(
shape=input_shape, ctx=ctx)
net.bind(for_training=False, data_shapes=[
(data_names[0], input_shape)], label_shapes=net._label_shapes)
net.set_params(arg_params, aux_params, allow_missing=True)
for i in range(num_warmup):
forward_once()
res = []
if profile:
cuda_profiler_start()
for i in range(num_iterations):
t = forward_once()
res.append(t)
if profile:
cuda_profiler_stop()
res = np.multiply(res, 1000)
print("{},{},{},{},{},{}".format(model_idx+1, model_name, batch_size, np.min(res),
np.average(res), np.max(res)))
| 27.651786 | 84 | 0.683242 |
c7de097e9b9739100654b069d9cac10ffe5b515c | 1,198 | py | Python | tests/test_get_angles.py | Mopolino8/lammps-data-file | 5c9015d05fa1484a33c84e6cfb90cd4a7d99d133 | [
"MIT"
] | 13 | 2017-05-30T17:43:10.000Z | 2021-08-06T04:21:44.000Z | tests/test_get_angles.py | njustcodingjs/lammps-data-file | 3a0729b5ab4d2344326d09ac4ee1aab41442f14a | [
"MIT"
] | 2 | 2018-05-28T15:35:32.000Z | 2018-05-28T16:21:09.000Z | tests/test_get_angles.py | njustcodingjs/lammps-data-file | 3a0729b5ab4d2344326d09ac4ee1aab41442f14a | [
"MIT"
] | 10 | 2017-05-23T21:19:21.000Z | 2022-03-08T02:18:00.000Z | from lammps_data.angles import get_angles
| 33.277778 | 87 | 0.520033 |
c7dedb48cc1d235760b585e1ff0e7c005780aeec | 491 | py | Python | api/scheduler/migrations/0001_initial.py | jfaach/stock-app | 9cd0f98d3ec5d31dcd6680c5bf8b7b0fcdf025a6 | [
"CC0-1.0"
] | null | null | null | api/scheduler/migrations/0001_initial.py | jfaach/stock-app | 9cd0f98d3ec5d31dcd6680c5bf8b7b0fcdf025a6 | [
"CC0-1.0"
] | null | null | null | api/scheduler/migrations/0001_initial.py | jfaach/stock-app | 9cd0f98d3ec5d31dcd6680c5bf8b7b0fcdf025a6 | [
"CC0-1.0"
] | null | null | null | # Generated by Django 3.1.1 on 2020-12-16 03:07
from django.db import migrations, models
| 22.318182 | 114 | 0.578411 |
c7e12276bc98092252c4149244dfdf01adca03b0 | 477 | py | Python | 9-Wine-Scaling.py | Pawel762/Class-7_homework | e79d2f8d218980d814443951dae7840f521ba191 | [
"MIT"
] | null | null | null | 9-Wine-Scaling.py | Pawel762/Class-7_homework | e79d2f8d218980d814443951dae7840f521ba191 | [
"MIT"
] | null | null | null | 9-Wine-Scaling.py | Pawel762/Class-7_homework | e79d2f8d218980d814443951dae7840f521ba191 | [
"MIT"
] | null | null | null | from sklearn.preprocessing import StandardScaler
from sklearn.datasets import load_wine
from sklearn.model_selection import train_test_split
wine = load_wine()
columns_names = wine.feature_names
y = wine.target
X = wine.data
print('Pre scaling X')
print(X)
scaler = StandardScaler()
scaler.fit(X)
scaled_features = scaler.transform(X)
print('Post scaling X')
print(scaled_features)
X_train, X_test, y_train, y_test = train_test_split(scaled_features, y, test_size=0.375)
| 21.681818 | 88 | 0.796646 |
c7e14941f3967e5d720a9a0637e48720262f173d | 4,057 | py | Python | tests/conftest.py | szkkteam/flask-starter | 7019036e7ee017ca5df9059d0b4a0d29005beab5 | [
"MIT"
] | null | null | null | tests/conftest.py | szkkteam/flask-starter | 7019036e7ee017ca5df9059d0b4a0d29005beab5 | [
"MIT"
] | 2 | 2021-03-31T19:36:44.000Z | 2021-12-13T20:30:11.000Z | tests/conftest.py | szkkteam/flask-starter | 7019036e7ee017ca5df9059d0b4a0d29005beab5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Common Python library imports
import os
import pytest
# Pip package imports
from collections import namedtuple
from flask import template_rendered
from flask_security.signals import (
reset_password_instructions_sent,
user_confirmed,
user_registered,
)
# Internal package imports
from backend.app import _create_app
from backend.config import TestConfig
from backend.extensions import db as db_ext
from backend.extensions.mail import mail
from ._client import (
ApiTestClient,
ApiTestResponse,
HtmlTestClient,
HtmlTestResponse,
)
from ._model_factory import ModelFactory
| 22.792135 | 79 | 0.689426 |
c7e1894d1594534627afedcd4ba2104fda1ac3a6 | 927 | py | Python | setup.py | YiuRULE/nats.py | 3a78ba4c385e2069daf5ff560aadc30968af1ccd | [
"Apache-2.0"
] | null | null | null | setup.py | YiuRULE/nats.py | 3a78ba4c385e2069daf5ff560aadc30968af1ccd | [
"Apache-2.0"
] | null | null | null | setup.py | YiuRULE/nats.py | 3a78ba4c385e2069daf5ff560aadc30968af1ccd | [
"Apache-2.0"
] | null | null | null | from setuptools import setup
from nats.aio.client import __version__
EXTRAS = {
'nkeys': ['nkeys'],
}
setup(
name='nats-py',
version=__version__,
description='NATS client for Python',
long_description='Python client for NATS, a lightweight, high-performance cloud native messaging system',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10'
],
url='https://github.com/nats-io/nats.py',
author='Waldemar Quevedo',
author_email='wally@synadia.com',
license='Apache 2 License',
packages=['nats', 'nats.aio', 'nats.protocol', 'nats.js'],
zip_safe=True,
extras_require=EXTRAS
)
| 29.903226 | 109 | 0.636462 |
c7e2f163fdb11300c85e2c17e27cb56d8ee3f07e | 12,844 | py | Python | example_python_files/MagicDAQ,MABoard,FullDemo.py | MagicDAQ/magicdaq_docs | 896a2565a28d80c733d8a137211212816ef3fbe2 | [
"MIT"
] | 1 | 2021-05-20T21:11:13.000Z | 2021-05-20T21:11:13.000Z | example_python_files/MagicDAQ,MABoard,FullDemo.py | MagicDAQ/magicdaq_docs | 896a2565a28d80c733d8a137211212816ef3fbe2 | [
"MIT"
] | null | null | null | example_python_files/MagicDAQ,MABoard,FullDemo.py | MagicDAQ/magicdaq_docs | 896a2565a28d80c733d8a137211212816ef3fbe2 | [
"MIT"
] | null | null | null | ##############################################################
#*** MagicDAQ USB DAQ and M&A Board General Demo Script ***
##############################################################
#*** Websites ***
# MagicDAQ Website:
# https://www.magicdaq.com/
# API Docs Website:
# https://magicdaq.github.io/magicdaq_docs/
#*** Install MagicDAQ ***
# Download the MagicDAQ python package from pypi
# Run this command in a command prompt:
# python -m pip install magicdaq
# Further docs: https://magicdaq.github.io/magicdaq_docs/#/Install_MagicDAQ
# MagicDAQ is only compatible with Python 3 on Windows. It does not work on Linux at the moment. It does not work with Python 2.
#*** Using Auto Code Complete With PyCharm ***
# Using a code editor like Pycharm and want to get auto complete working for the MagicDAQ package?
# Docs: https://magicdaq.github.io/magicdaq_docs/#/PyCharmCodeCompletion
##############################################################
#*** Imports ***
##############################################################
import sys
import time
# Import MagicDAQ
print('*** MagicDAQ Install Check ***')
print('')
try:
# Import MagicDAQDevice object
from magicdaq.api_class import MagicDAQDevice
# Create daq_one object
daq_one = MagicDAQDevice()
print('GOOD: MagicDAQ API is installed properly.')
# Get MagicDAQ Driver Version
driver_version = daq_one.get_driver_version()
if driver_version == 1.0:
print('GOOD: MagicDAQ Driver is installed properly.')
print('You are ready to use MagicDAQ!')
else:
print('ERROR: MagicDAQ Driver version not expected value: '+str(driver_version))
print('Try installing MagicDAQ using pip again.')
print('https://magicdaq.github.io/magicdaq_docs/#/Install_MagicDAQ')
print('Feel free to email MagicDAQ Support at: support@magicdaq.com')
except Exception as exception_text:
print('Original exception: ')
print(exception_text)
print('')
print('ERROR: Unable to import MagicDAQ API.')
print('Mostly likely, MagicDAQ has not been properly downloaded and installed using pip.')
print('Please consult MagicDAQ API Docs: https://magicdaq.github.io/magicdaq_docs/#/Install_MagicDAQ')
print('Feel free to email MagicDAQ Support at: support@magicdaq.com')
sys.exit(0)
##############################################################
#*** MagicDAQ USB DAQ MDAQ300 Features Demo ***
##############################################################
# This portion of the script shows off some of the USB DAQ's features
# Hardware docs: https://www.magicdaq.com/product/magic-daq/
print('')
print('*** MagicDAQ USB DAQ Demo ***')
print('Ensure the USB DAQ is plugged into the computer using the USB cable.')
print('The DAQ does not need to be connected to the M&A board.')
print('')
user_input = input('Press any key to continue.')
#*** Open DAQ Device ***
# Remember, the daq_one object has already been created in the above 'Imports' section
# We must open the daq device before performing any hardware feature manipulation
# https://magicdaq.github.io/magicdaq_docs/#/MagicDAQ_Basics
daq_one.open_daq_device()
###############################################################
#*** Analog Output Demo: Constant, Sine, and PWM on AO1 Pin ***
###############################################################
print('')
print('--- Analog Output Demo: Constant, Sine, and PWM Output ---')
# Set constant 3 volt output voltage on AO1 pin
daq_one.set_analog_output(1,3)
print('Using an oscilloscope, place the scope probe on pin AO1 and connect the scope probe GND to one of the USB DAQs AGND pins')
print('You should now observe a constant 3V')
print('')
user_input = input('Press any key to continue.')
# Configure and start 300Hz sine wave with 2V amplitude on AO1 pin
daq_one.configure_analog_output_sine_wave(1,300,amplitude=2)
daq_one.start_analog_output_wave(1)
print('You should now observe a 300Hz sine wave with 2V amplitude.')
print('')
user_input = input('Press any key to continue.')
# Stop previous wave
daq_one.stop_analog_output_wave(1)
# Configure and start PWM wave, 200 Hz, 50% duty cycle, 3.3V amplitude
daq_one.configure_analog_output_pwm_wave(1,200,50,amplitude=3.3)
daq_one.start_analog_output_wave(1)
print('You should now observe a 200Hz PWM wave, 50% duty cycle, with 3.3V amplitude.')
print('')
user_input = input('Press any key to continue.')
# Stop the wave
daq_one.stop_analog_output_wave(1)
print('The wave should now stop. You could set it to GND using set_analog_ouput() if you wanted.')
print('')
user_input = input('Press any key to continue.')
###############################################################
#*** Pulse Counter Pin Demo: PWM waves ***
###############################################################
print('')
print('--- Pulse Counter Pin Demo: PWM Waves ---')
# Configure a 50 KHz frequency, 75% duty cycle, continuous PWM Wave on the counter pin (CTR0)
# Note that unlike the analog output pins, the CTR0 pin always outputs at an amplitude of 3.3v when producing PWM waves
daq_one.configure_counter_pwm(50000,75)
# Start counter wave
daq_one.start_counter_pwm()
print('Place your scope probe on pin CTR0')
print('You should see a 50kHz, 75% duty cycle PWM wave.')
print('')
user_input = input('Press any key to continue.')
# Now stopping the counter PWM wave
daq_one.stop_counter_pwm()
print('The PWM wave will now stop.')
print('')
user_input = input('Press any key to continue.')
###############################################################
#*** Pulse Counter Pin Demo: Pulse Counting ***
###############################################################
print('')
print('--- Pulse Counter Pin Demo: Pulse Counting ---')
print('Use a piece of wire to bridge CTR0 to DGND several times')
print('CTR0 has an internal pull up resistor. You are simulating a pulse pulling the voltage to GND.')
print('You will have 8 sec to simulate some pulses.')
print('')
user_input = input('Press any key when you are ready to start.')
# Start the Pulse Counter
# Pulses will be counted on the falling edge
daq_one.enable_pulse_counter()
# Sleep for 8 sec
time.sleep(8)
# Read number of pulses
print('Number of pulses counted: '+str(daq_one.read_pulse_counter()))
print('You are using a piece of wire, so it is likely bouncing on and off the screw terminal, counting many pulses')
print('')
user_input = input('Stop simulating pulses. Press any key to continue.')
print('')
print('Now clearing the pulse counter')
daq_one.clear_pulse_counter()
print('Pulse count after clearing: '+str(daq_one.read_pulse_counter()))
###############################################################
#*** Digital Pin Demo ***
###############################################################
print('')
print('--- Digital Pin Demo ---')
# Set P0.0 pin LOW
daq_one.set_digital_output(0,0)
print('Place scope probe on pin P0.0, pin should be LOW')
print('')
user_input = input('Press any key to continue.')
# Set P0.0 pin HIGH
daq_one.set_digital_output(0,1)
print('Place scope probe on pin P0.0, pin should be HIGH')
print('')
user_input = input('Press any key to continue.')
###############################################################
#*** Analog Input Pin Demo ***
###############################################################
print('')
print('--- Analog Input Pin Demo ---')
# Single ended voltage measurement
print('Apply voltage to AI0 pin. If you dont have a power supply handy, you can run a wire from the +5V pin to the AI0 pin.')
print('')
user_input = input('Press any key to continue.')
print('Voltage measured at AI0: '+str(daq_one.read_analog_input(0)))
print('If you are using the +5V pin, remember that this voltage is derived from the USB Power supply, so it will be what ever your USB bus ir producing, probably something slightly less than 5V.')
# If you want to perform a differential input measurement
# daq_one.read_diff_analog_input()
# https://magicdaq.github.io/magicdaq_docs/#/read_diff_analog_input
###############################################################
#*** M&A Board Demo ***
###############################################################
# M&A Board hardware spec:
# https://www.magicdaq.com/product/ma-board-full-kit/
print('')
print('*** M&A Board Demo ***')
print('Ensure the USB DAQ is connected to the M&A board using the ribbon cable.')
print('Ribbon cable pin out on page 6 of: ')
print('https://www.magicdaq.com/mdaq350datasheet/')
print('Use the provided power cable to apply power to the M&A board.')
print('')
user_input = input('Press any key to continue.')
###############################################################
#*** Relay Demo ***
###############################################################
print('')
print('--- Relay Demo ---')
print('Setting all relays to closed.')
daq_one.set_digital_output(7, 1)
daq_one.set_digital_output(6, 1)
daq_one.set_digital_output(5, 1)
daq_one.set_digital_output(4, 1)
time.sleep(1)
relay_count = 1
digital_pin_count = 7
while relay_count <= 4:
print('Relay #: ' + str(relay_count) + ' Digital Pin #: ' + str(digital_pin_count))
# Set relay to open
print('Setting relay to OPEN.')
daq_one.set_digital_output(digital_pin_count, 0)
time.sleep(1)
# Increment counters
relay_count += 1
digital_pin_count -= 1
print('')
print('')
user_input = input('Press any key to continue.')
###############################################################
#*** Vout Demo ***
###############################################################
print('')
print('--- Vout Demo ---')
print('Vout provides a variable voltage power output capable of up to 2A')
print('By characterizing your M&A board, or building a feedback loop; voltage accuracy of Vout can be made quite good.')
print('See notes on page 4 of the M&A data sheet.')
print('https://www.magicdaq.com/mdaq350datasheet/')
# See the M&A board data sheet for the equation that describes the Vout to Vout_set (0 and 2.77 here) relationship
print('')
print('Vout_set Set to 0V.')
print('Measure Vout with a multimeter. It should be about 10V')
daq_one.set_analog_output(0, 0)
print('')
user_input = input('Press any key to continue.')
print('Vout_set Set to 2.77V')
print('Measure Vout with a multimeter. It should be about 5V')
daq_one.set_analog_output(0, 2.77)
print('')
user_input = input('Press any key to continue.')
###############################################################
#*** Low Current Measurement Demo: A1 ***
###############################################################
print('')
print('--- A1 Low Current Measurement Demo ---')
print('Use the 3.3V board voltage and a 20K resistor to put 165uA through A1.')
print('')
user_input = input('Press any key to continue.')
# See the M&A board data sheet for the equation that describes the Vout to current relationship
pin_4_voltage = daq_one.read_analog_input(4)
print('Read voltage: ' + str(pin_4_voltage))
calculated_current_amps = pin_4_voltage / (332 * 97.863)
ua_current = round((calculated_current_amps / .000001), 3)
print('Calculated uA current: ' + str(ua_current))
###############################################################
#*** Current Measurement Demo: A2 ***
###############################################################
print('')
print('--- A2 Current Measurement Demo (+/- 5A max) ---')
print('Use an external 5V power supply and 5 ohm power resistor to put 1 Amp through A2.')
print('')
user_input = input('Press any key to continue.')
# See the M&A board data sheet for the equation that describes the Vout to current relationship
pin_5_voltage = daq_one.read_analog_input(5)
print('Read voltage: ' + str(pin_5_voltage))
calculated_current_amps = pin_5_voltage / (.01 * 200)
# ma_current = round((calculated_current_amps / .001), 3)
print('Calculated A current: ' + str(calculated_current_amps))
###############################################################
#*** Current Measurement Demo: A3 ***
###############################################################
print('')
print('--- A3 Current Measurement Demo (+/- 1.5A max) ---')
print('Use an external 5V power supply and 5 ohm power resistor to put 1 Amp through A3.')
print('')
user_input = input('Press any key to continue.')
# See the M&A board data sheet for the equation that describes the Vout to current relationship
pin_6_voltage = daq_one.read_analog_input(6)
print('Read voltage: ' + str(pin_6_voltage))
calculated_current_amps = pin_6_voltage / (.033 * 200)
ma_current = round((calculated_current_amps / .001), 3)
print('Calculated mA current: ' + str(ma_current))
###############################################################
#*** Demo Complete. ***
###############################################################
# Close connection to daq
daq_one.close_daq_device()
| 34.342246 | 196 | 0.617642 |
c7e321ea7df7191ba4707163a3bf9a97bdfd5999 | 252 | py | Python | src/onenutil/schemas/__init__.py | LemurPwned/onenote-utils | 07778e6b2433cf28fab2afdbb01a318f284989dc | [
"MIT"
] | null | null | null | src/onenutil/schemas/__init__.py | LemurPwned/onenote-utils | 07778e6b2433cf28fab2afdbb01a318f284989dc | [
"MIT"
] | null | null | null | src/onenutil/schemas/__init__.py | LemurPwned/onenote-utils | 07778e6b2433cf28fab2afdbb01a318f284989dc | [
"MIT"
] | null | null | null | from .results import (ArticleSearchResult, EmbeddingsResult, SearchResult,
TagResult, ZoteroExtractionResult)
__all__ = [
"TagResult", "EmbeddingsResult", "ZoteroExtractionResult", "SearchResult",
"ArticleSearchResult"
]
| 31.5 | 78 | 0.714286 |
c7e32e60b520a7528f6c33e61490ce039febd1e0 | 2,257 | py | Python | src/account/api/serializers.py | amirpsd/drf_blog_api | 58be081a450840114af021e7412e469fad90456d | [
"MIT"
] | 33 | 2022-02-11T12:16:29.000Z | 2022-03-26T15:08:47.000Z | src/account/api/serializers.py | amirpsd/django_blog_api | 58be081a450840114af021e7412e469fad90456d | [
"MIT"
] | null | null | null | src/account/api/serializers.py | amirpsd/django_blog_api | 58be081a450840114af021e7412e469fad90456d | [
"MIT"
] | 5 | 2022-02-11T13:03:52.000Z | 2022-03-28T16:04:32.000Z | from django.contrib.auth import get_user_model
from rest_framework import serializers
| 23.030612 | 70 | 0.613646 |
c7e5a0b18daf16984d985969f34fb443eae76979 | 3,733 | py | Python | generate_figure9.py | IBM/Simultaneous-diagonalization | 385545401395a2e07f109441db4751a5dcf8f0a4 | [
"Apache-2.0"
] | null | null | null | generate_figure9.py | IBM/Simultaneous-diagonalization | 385545401395a2e07f109441db4751a5dcf8f0a4 | [
"Apache-2.0"
] | null | null | null | generate_figure9.py | IBM/Simultaneous-diagonalization | 385545401395a2e07f109441db4751a5dcf8f0a4 | [
"Apache-2.0"
] | 1 | 2022-03-14T18:36:12.000Z | 2022-03-14T18:36:12.000Z | # Copyright 2022 IBM Inc. All rights reserved
# SPDX-License-Identifier: Apache2.0
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is part of the code to reproduce the results in the paper:
# E. van den Berg and Kristan Temme, "Circuit optimization of Hamiltonian
# simulation by simultaneous diagonalization of Pauli clusters," Quantum 4,
# p. 322, 2020. https://doi.org/10.22331/q-2020-09-12-322
import os
import cl
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from matplotlib.ticker import FuncFormatter
from itertools import permutations
# Make sure the figure directory exists
cl.ensureDirExists('fig')
# Create the test problem
M = cl.create_basic_problem(7,0)
C = cl.generate_full_rank_weights(20,7,seed=1)
M = np.dot(C,M) % 2
# Apply diagonalization and get the final Z matrix
T = cl.Tableau(M)
R = cl.RecordOperations(T.n)
T.addRecorder(R)
cl.zeroX_algorithm1_cz(T)
T = cl.Tableau(M)
R.apply(T)
Z = T.getZ()
# Plot the results
plotZ(Z,'fig/Figure_9a')
print("Original: %d" % cl.countCNot(Z))
idx = cl.orderZ(Z)
plotZ(Z[idx,:],'fig/Figure_9b')
print("Sorted : %d" % cl.countCNot(Z[idx,:]))
# Generate histogram of actual permutations
if (True) :
base = list(range(7))
count = []
for idx2 in permutations(base) :
idx1 = cl.orderZ(Z[:,idx2])
count.append(cl.countCNot(Z[idx1,:][:,idx2]))
# Count is always even
plt.hist(count,bins=list(range(min(count)-1,max(count)+2,2)),rwidth=0.9,density=True)
plt.gca().set_xticklabels([str(x) for x in range(min(count),max(count)+1,2)],fontsize=16)
plt.gca().set_xticks(list(range(min(count),max(count)+1,2)))
plt.gca().yaxis.set_major_formatter(FuncFormatter(format_percentage))
plt.xlabel('Number of CNOT gates',fontsize=16)
plt.ylabel("Percentage",fontsize=16)
for tick in plt.gca().yaxis.get_major_ticks():
tick.label.set_fontsize(16)
plt.gcf().tight_layout()
ratio = 0.5
xleft, xright = plt.gca().get_xlim()
ybottom, ytop = plt.gca().get_ylim()
plt.gca().set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)
plt.savefig("fig/Figure_9c-uncropped.pdf", transparent=True)
plt.close()
os.system("pdfcrop fig/Figure_9c-uncropped.pdf fig/Figure_9c.pdf")
| 31.905983 | 103 | 0.682561 |
c7e5bf2a376cfb8077d1056296fc71ad74e416d7 | 793 | py | Python | undeployed/legacy/Landsat/L7GapFiller_ArcInterface.py | NASA-DEVELOP/dnppy | 8f7ef6f0653f5a4ea730ee557c72a2c89c06ce0b | [
"NASA-1.3"
] | 65 | 2015-09-10T12:59:56.000Z | 2022-02-27T22:09:03.000Z | undeployed/legacy/Landsat/L7GapFiller_ArcInterface.py | snowzm/dnppy | 8f7ef6f0653f5a4ea730ee557c72a2c89c06ce0b | [
"NASA-1.3"
] | 40 | 2015-04-08T19:23:30.000Z | 2015-08-04T15:53:11.000Z | undeployed/legacy/Landsat/L7GapFiller_ArcInterface.py | snowzm/dnppy | 8f7ef6f0653f5a4ea730ee557c72a2c89c06ce0b | [
"NASA-1.3"
] | 45 | 2015-08-14T19:09:38.000Z | 2022-02-15T18:53:16.000Z | #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: qgeddes
#
# Created: 25/04/2013
# Copyright: (c) qgeddes 2013
# Licence: <your licence>
#-------------------------------------------------------------------------------
import L7GapFiller
Scenes=arcpy.GetParameterAsText(0)
Scenes=Scenes.split(";")
OutputFolder=arcpy.GetParameterAsText(1)
OutputFile= arcpy.GetParameterAsText(2)
Output=OutputFolder+"\\"+OutputFile
CloudMasks= arcpy.GetParameterAsText(3)
CloudMasks= CloudMasks.split(";")
Z=arcpy.GetParameter(4)
arcpy.AddMessage(Z)
arcpy.env.scratchWorkspace=OutputFolder
arcpy.CheckOutExtension("Spatial")
arcpy.env.overwriteOutput=True
L7GapFiller.L7GapFill(Scenes, Output,CloudMasks,Z)
| 26.433333 | 80 | 0.600252 |
c7e62258b56e4e6157b37bc5877b4350133a63c1 | 1,676 | py | Python | tests/sentry/api/serializers/test_saved_search.py | practo/sentry | 82f530970ce205696469fa702246396acfd947a1 | [
"BSD-3-Clause"
] | 4 | 2019-05-27T13:55:07.000Z | 2021-03-30T07:05:09.000Z | tests/sentry/api/serializers/test_saved_search.py | practo/sentry | 82f530970ce205696469fa702246396acfd947a1 | [
"BSD-3-Clause"
] | 99 | 2019-05-20T14:16:33.000Z | 2021-01-19T09:25:15.000Z | tests/sentry/api/serializers/test_saved_search.py | practo/sentry | 82f530970ce205696469fa702246396acfd947a1 | [
"BSD-3-Clause"
] | 1 | 2020-08-10T07:55:40.000Z | 2020-08-10T07:55:40.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
from sentry.api.serializers import serialize
from sentry.models import SavedSearch
from sentry.models.savedsearch import DEFAULT_SAVED_SEARCHES
from sentry.testutils import TestCase
| 33.52 | 70 | 0.648568 |
c7e63e3b77d732305764d664c862b2625865bf3a | 864 | py | Python | xastropy/files/general.py | bpholden/xastropy | 66aff0995a84c6829da65996d2379ba4c946dabe | [
"BSD-3-Clause"
] | 3 | 2015-08-23T00:32:58.000Z | 2020-12-31T02:37:52.000Z | xastropy/files/general.py | Kristall-WangShiwei/xastropy | 723fe56cb48d5a5c4cdded839082ee12ef8c6732 | [
"BSD-3-Clause"
] | 104 | 2015-07-17T18:31:54.000Z | 2018-06-29T17:04:09.000Z | xastropy/files/general.py | Kristall-WangShiwei/xastropy | 723fe56cb48d5a5c4cdded839082ee12ef8c6732 | [
"BSD-3-Clause"
] | 16 | 2015-07-17T15:50:37.000Z | 2019-04-21T03:42:47.000Z | """
#;+
#; NAME:
#; general
#; Version 1.0
#;
#; PURPOSE:
#; Module for monkeying with files and filenames
#; 172Sep-2014 by JXP
#;-
#;------------------------------------------------------------------------------
"""
# Import libraries
import numpy as np
from astropy.io import fits
from astropy.io import ascii
import os, pdb
#### ###############################
# Deal with .gz extensions, usually on FITS files
# See if filenm exists, if so pass it back
#
| 19.2 | 80 | 0.508102 |
c7e69418daeb84532c16aa76c96e7a0136b72521 | 655 | py | Python | setup.py | muatik/genderizer | 9866bf0371d1d984f6c4465ff78025d911f6a648 | [
"MIT"
] | 54 | 2015-01-19T22:53:48.000Z | 2021-06-23T03:48:05.000Z | setup.py | nejdetckenobi/genderizer | 9866bf0371d1d984f6c4465ff78025d911f6a648 | [
"MIT"
] | 4 | 2016-05-23T13:52:12.000Z | 2021-05-14T10:24:37.000Z | setup.py | nejdetckenobi/genderizer | 9866bf0371d1d984f6c4465ff78025d911f6a648 | [
"MIT"
] | 18 | 2015-01-30T00:06:40.000Z | 2021-03-12T14:56:12.000Z | #!/usr/bin/env python
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
setup(name='genderizer',
version='0.1.2.3',
license='MIT',
description='Genderizer tries to infer gender information looking at first name and/or making text analysis',
long_description=open('README.md').read(),
url='https://github.com/muatik/genderizer',
author='Mustafa Atik',
author_email='muatik@gmail.com',
maintainer='Mustafa Atik',
maintainer_email='muatik@gmail.com',
packages=['genderizer'],
package_data={'genderizer': ['data/*']},
platforms='any') | 31.190476 | 115 | 0.668702 |
c7e75b487c0cdec2958e2495ad3a66ff9804a5e3 | 1,855 | py | Python | ingestion/tests/unit/great_expectations/test_ometa_validation_action.py | ulixius9/OpenMetadata | f121698d968717f0932f685ef2a512c2a4d92438 | [
"Apache-2.0"
] | null | null | null | ingestion/tests/unit/great_expectations/test_ometa_validation_action.py | ulixius9/OpenMetadata | f121698d968717f0932f685ef2a512c2a4d92438 | [
"Apache-2.0"
] | null | null | null | ingestion/tests/unit/great_expectations/test_ometa_validation_action.py | ulixius9/OpenMetadata | f121698d968717f0932f685ef2a512c2a4d92438 | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test suite for the action module implementation
"""
import os
from unittest import mock
from jinja2 import Environment
from pytest import mark
from metadata.great_expectations.action import OpenMetadataValidationAction
from metadata.great_expectations.utils.ometa_config_handler import render_template
def test_create_jinja_environment(fixture_jinja_environment):
"""Test create jinja environment"""
assert isinstance(fixture_jinja_environment, Environment)
| 34.351852 | 82 | 0.755256 |
c7e7bdfc8b236f444e8faf6ff083ca3ec5dec358 | 1,285 | py | Python | tests/integration/Containers.py | adnrs96/runtime | e824224317e6aa108cf06968474fc44fa33488d6 | [
"Apache-2.0"
] | null | null | null | tests/integration/Containers.py | adnrs96/runtime | e824224317e6aa108cf06968474fc44fa33488d6 | [
"Apache-2.0"
] | null | null | null | tests/integration/Containers.py | adnrs96/runtime | e824224317e6aa108cf06968474fc44fa33488d6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from storyruntime.Containers import Containers
from storyruntime.constants.ServiceConstants import ServiceConstants
import storyscript
def test_containers_format_command(story):
"""
Ensures a simple resolve can be performed
"""
story_text = 'alpine echo msg:"foo"\n'
story.context = {}
story.app.services = {
'alpine': {
ServiceConstants.config: {
'actions': {
'echo': {
'arguments': {'msg': {'type': 'string'}}
}
}
}
}
}
story.tree = storyscript.Api.loads(story_text).result()['tree']
assert Containers.format_command(
story, story.line('1'), 'alpine', 'echo'
) == ['echo', '{"msg":"foo"}']
| 26.770833 | 68 | 0.529183 |
c7e91e12c70be5743a54ddceae5d419516ca3301 | 1,367 | py | Python | project_name/core/admin.py | cosmunsoftwares/django-boilerplate | 147aa7f59901d0fb95d41acf8ec118c6830267f8 | [
"MIT"
] | 3 | 2018-11-30T19:51:35.000Z | 2020-10-20T00:28:49.000Z | project_name/core/admin.py | cosmun-softwares/django-boilerplate | 147aa7f59901d0fb95d41acf8ec118c6830267f8 | [
"MIT"
] | 6 | 2020-04-09T20:00:45.000Z | 2022-02-10T08:25:47.000Z | project_name/core/admin.py | cosmunsoftwares/django-boilerplate | 147aa7f59901d0fb95d41acf8ec118c6830267f8 | [
"MIT"
] | 1 | 2018-08-27T21:44:44.000Z | 2018-08-27T21:44:44.000Z | from django.contrib import admin
from django.shortcuts import redirect
from django.utils.safestring import mark_safe
from django.contrib.admin.widgets import AdminFileWidget
def redirect_one_object(model, obj):
response = redirect(f'/admin/{model._meta.app_label}/{model._meta.model_name}/add/')
if obj:
response = redirect(f'/admin/{model._meta.app_label}/{model._meta.model_name}/{obj.pk}/change/')
return response
def thumbnail(obj, size='col-md-2'):
return mark_safe('<img src="{}" class="img-thumbnail {} p-0">'.format(obj.url, size))
| 37.972222 | 104 | 0.688369 |
c7e9c8cc7086c2b1fd149895cfcda90298ab4af1 | 1,222 | py | Python | src/5vents.py | subhash686/aoc-2021 | a01fa07f94148b7072c3ba4c854b546862d3486a | [
"Apache-2.0"
] | null | null | null | src/5vents.py | subhash686/aoc-2021 | a01fa07f94148b7072c3ba4c854b546862d3486a | [
"Apache-2.0"
] | null | null | null | src/5vents.py | subhash686/aoc-2021 | a01fa07f94148b7072c3ba4c854b546862d3486a | [
"Apache-2.0"
] | null | null | null | import os
plane = [[0 for i in range(1000)] for j in range(1000)]
count = [0]
if __name__ == "__main__":
overlapping_vents()
| 22.218182 | 55 | 0.488543 |
c7eb057d4134335a7eb1bab05618a4866e334bff | 1,217 | py | Python | problems/test_0073_m_plus_n_space.py | chrisxue815/leetcode_python | dec3c160d411a5c19dc8e9d96e7843f0e4c36820 | [
"Unlicense"
] | 1 | 2017-06-17T23:47:17.000Z | 2017-06-17T23:47:17.000Z | problems/test_0073_m_plus_n_space.py | chrisxue815/leetcode_python | dec3c160d411a5c19dc8e9d96e7843f0e4c36820 | [
"Unlicense"
] | null | null | null | problems/test_0073_m_plus_n_space.py | chrisxue815/leetcode_python | dec3c160d411a5c19dc8e9d96e7843f0e4c36820 | [
"Unlicense"
] | null | null | null | import unittest
if __name__ == '__main__':
unittest.main()
| 23.403846 | 76 | 0.419063 |
c7eb49aae87e95e2b4d243e5c05c7251bfbcbd52 | 2,508 | py | Python | xlsxwriter/test/worksheet/test_write_print_options.py | Aeon1/XlsxWriter | 6871b6c3fe6c294632054ea91f23d9e27068bcc1 | [
"BSD-2-Clause-FreeBSD"
] | 2 | 2019-07-25T06:08:09.000Z | 2019-11-01T02:33:56.000Z | xlsxwriter/test/worksheet/test_write_print_options.py | Aeon1/XlsxWriter | 6871b6c3fe6c294632054ea91f23d9e27068bcc1 | [
"BSD-2-Clause-FreeBSD"
] | 13 | 2019-07-14T00:29:05.000Z | 2019-11-26T06:16:46.000Z | xlsxwriter/test/worksheet/test_write_print_options.py | Aeon1/XlsxWriter | 6871b6c3fe6c294632054ea91f23d9e27068bcc1 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2019, John McNamara, jmcnamara@cpan.org
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
| 28.179775 | 79 | 0.637161 |
c7ebfcaf02d689a33ed8274d051230038106dff7 | 1,011 | py | Python | neo4j_helper.py | smartaec/OpenBridgeGraph | 61ca64ed339af4e77d928f83934a308277a79d81 | [
"MIT"
] | null | null | null | neo4j_helper.py | smartaec/OpenBridgeGraph | 61ca64ed339af4e77d928f83934a308277a79d81 | [
"MIT"
] | null | null | null | neo4j_helper.py | smartaec/OpenBridgeGraph | 61ca64ed339af4e77d928f83934a308277a79d81 | [
"MIT"
] | null | null | null | from neo4j.v1 import GraphDatabase #neo4j==1.7.0
uri="bolt://localhost:7687"
driver=GraphDatabase.driver(uri, auth=("neo4j", "testneo4j"))
#execute_read(print_query,'Alice') | 29.735294 | 105 | 0.69634 |
c7edb1043a4f03dfdc950843e15b617197779da3 | 9,077 | py | Python | tests/unit/test_juju.py | KellenRenshaw/hotsos | e3fc51ab7f8af606a5846a3486a7fda23d761583 | [
"Apache-2.0"
] | null | null | null | tests/unit/test_juju.py | KellenRenshaw/hotsos | e3fc51ab7f8af606a5846a3486a7fda23d761583 | [
"Apache-2.0"
] | null | null | null | tests/unit/test_juju.py | KellenRenshaw/hotsos | e3fc51ab7f8af606a5846a3486a7fda23d761583 | [
"Apache-2.0"
] | null | null | null | import os
import tempfile
import mock
from . import utils
from hotsos.core.config import setup_config
from hotsos.core.ycheck.scenarios import YScenarioChecker
from hotsos.core.issues.utils import KnownBugsStore, IssuesStore
from hotsos.plugin_extensions.juju import summary
JOURNALCTL_CAPPEDPOSITIONLOST = """
Dec 21 14:07:53 juju-1 mongod.37017[17873]: [replication-18] CollectionCloner ns:juju.txns.log finished cloning with status: QueryPlanKilled: PlanExecutor killed: CappedPositionLost: CollectionScan died due to position in capped collection being deleted. Last seen record id: RecordId(204021366)
Dec 21 14:07:53 juju-1 mongod.37017[17873]: [replication-18] collection clone for 'juju.txns.log' failed due to QueryPlanKilled: While cloning collection 'juju.txns.log' there was an error 'PlanExecutor killed: CappedPositionLost: CollectionScan died due to position in capped collection being deleted. Last seen record id: RecordId(204021366)'
""" # noqa
RABBITMQ_CHARM_LOGS = """
2021-02-17 08:18:44 ERROR juju.worker.dependency engine.go:671 "uniter" manifold worker returned unexpected error: failed to initialize uniter for "unit-rabbitmq-server-0": cannot create relation state tracker: cannot remove persisted state, relation 236 has members
2021-02-17 08:20:34 ERROR juju.worker.dependency engine.go:671 "uniter" manifold worker returned unexpected error: failed to initialize uniter for "unit-rabbitmq-server-0": cannot create relation state tracker: cannot remove persisted state, relation 236 has members
""" # noqa
UNIT_LEADERSHIP_ERROR = """
2021-09-16 10:28:25 WARNING leader-elected ERROR cannot write leadership settings: cannot write settings: failed to merge leadership settings: application "keystone": prerequisites failed: "keystone/2" is not leader of "keystone"
2021-09-16 10:28:47 WARNING leader-elected ERROR cannot write leadership settings: cannot write settings: failed to merge leadership settings: application "keystone": prerequisites failed: "keystone/2" is not leader of "keystone"
2021-09-16 10:29:06 WARNING leader-elected ERROR cannot write leadership settings: cannot write settings: failed to merge leadership settings: application "keystone": prerequisites failed: "keystone/2" is not leader of "keystone"
2021-09-16 10:29:53 WARNING leader-elected ERROR cannot write leadership settings: cannot write settings: failed to merge leadership settings: application "keystone": prerequisites failed: "keystone/2" is not leader of "keystone"
2021-09-16 10:30:41 WARNING leader-elected ERROR cannot write leadership settings: cannot write settings: failed to merge leadership settings: application "keystone": prerequisites failed: "keystone/2" is not leader of "keystone"
""" # noqa
| 51.282486 | 344 | 0.637435 |
c7ef7d842b61d4e084cbe5d2d84903334c53e8d0 | 9,626 | py | Python | tools/SPGAN/main.py | by-liu/OpenUnReID | 2260d8e16588a992631c9c84e6cee4304ae8593d | [
"Apache-2.0"
] | null | null | null | tools/SPGAN/main.py | by-liu/OpenUnReID | 2260d8e16588a992631c9c84e6cee4304ae8593d | [
"Apache-2.0"
] | null | null | null | tools/SPGAN/main.py | by-liu/OpenUnReID | 2260d8e16588a992631c9c84e6cee4304ae8593d | [
"Apache-2.0"
] | null | null | null | import argparse
import collections
import shutil
import sys
import time
from datetime import timedelta
from pathlib import Path
import torch
from torch.nn.parallel import DataParallel, DistributedDataParallel
try:
# PyTorch >= 1.6 supports mixed precision training
from torch.cuda.amp import autocast
amp_support = True
except:
amp_support = False
from openunreid.apis import GANBaseRunner, set_random_seed, infer_gan
from openunreid.core.solvers import build_lr_scheduler, build_optimizer
from openunreid.data import (
build_test_dataloader,
build_train_dataloader,
build_val_dataloader,
)
from openunreid.models import build_gan_model
from openunreid.models.losses import build_loss
from openunreid.models.utils.extract import extract_features
from openunreid.utils.config import (
cfg,
cfg_from_list,
cfg_from_yaml_file,
log_config_to_file,
)
from openunreid.utils.dist_utils import init_dist, synchronize
from openunreid.utils.file_utils import mkdir_if_missing
from openunreid.utils.logger import Logger
if __name__ == '__main__':
main()
| 31.980066 | 117 | 0.60108 |
c7efcc01c957ea47bff3471d2bc47b9aa1291cde | 1,907 | py | Python | utility/data_download.py | LatvianPython/wind-experience | b634c020dff0a01152bb95b38e5f6f0e368d47f5 | [
"MIT"
] | 2 | 2018-12-20T20:31:21.000Z | 2018-12-29T14:51:42.000Z | utility/data_download.py | LatvianPython/wind-experience | b634c020dff0a01152bb95b38e5f6f0e368d47f5 | [
"MIT"
] | null | null | null | utility/data_download.py | LatvianPython/wind-experience | b634c020dff0a01152bb95b38e5f6f0e368d47f5 | [
"MIT"
] | null | null | null | import logging
import requests
import multiprocessing
import pathlib
from typing import List
from typing import Optional
from typing import Tuple
from typing import Dict
from joblib import delayed
from joblib import Parallel
from datetime import date
from datetime import timedelta
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
| 32.322034 | 89 | 0.677504 |
c7f2afbcc386f15d0c1677f0f7647f383dcc88bb | 7,625 | py | Python | model/net_qspline_A.py | jercoco/QSQF | 6c435f8d4e1baf1937b06a52e63446f9a29f5ad8 | [
"Apache-2.0"
] | null | null | null | model/net_qspline_A.py | jercoco/QSQF | 6c435f8d4e1baf1937b06a52e63446f9a29f5ad8 | [
"Apache-2.0"
] | null | null | null | model/net_qspline_A.py | jercoco/QSQF | 6c435f8d4e1baf1937b06a52e63446f9a29f5ad8 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 21 19:52:22 2020
#Plan A
@author: 18096
"""
'''Defines the neural network, loss function and metrics'''
#from functools import reduce
import torch
import torch.nn as nn
from torch.nn.functional import pad
from torch.autograd import Variable
import logging
logger = logging.getLogger('DeepAR.Net')
| 40.131579 | 89 | 0.571148 |
c7f39bdc2218cef3b2fe963ee01b122a395a8bc3 | 227 | py | Python | tests/repositories/helpers/methods/test_reinstall_if_needed.py | traibnn/integration | cf5920a677fdaa8408074e533371141828b0b30f | [
"MIT"
] | 1 | 2021-07-31T00:34:30.000Z | 2021-07-31T00:34:30.000Z | tests/repositories/helpers/methods/test_reinstall_if_needed.py | traibnn/integration | cf5920a677fdaa8408074e533371141828b0b30f | [
"MIT"
] | 45 | 2021-07-21T13:32:44.000Z | 2022-03-28T06:15:40.000Z | tests/repositories/helpers/methods/test_reinstall_if_needed.py | traibnn/integration | cf5920a677fdaa8408074e533371141828b0b30f | [
"MIT"
] | null | null | null | import pytest
| 25.222222 | 55 | 0.784141 |
c7f3bbfe8ecf852146009a98359ee99148f7760a | 11,124 | py | Python | workflow_parser/datasource/log_engine.py | cyx1231st/workflow_parser | d2e78c191c75c7addda89e6e336be90f6ca9717d | [
"Apache-2.0"
] | null | null | null | workflow_parser/datasource/log_engine.py | cyx1231st/workflow_parser | d2e78c191c75c7addda89e6e336be90f6ca9717d | [
"Apache-2.0"
] | null | null | null | workflow_parser/datasource/log_engine.py | cyx1231st/workflow_parser | d2e78c191c75c7addda89e6e336be90f6ca9717d | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2017 Yingxin Cheng
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from abc import ABCMeta
from abc import abstractmethod
from collections import defaultdict
import os
from os import path
import sys
from .. import reserved_vars as rv
from ..service_registry import Component
from ..service_registry import ServiceRegistry
from . import Line
from . import Source
from .exc import LogError
# step1: load related log files
# step2: read sources
| 36.352941 | 89 | 0.567242 |
c7f405a9090e4db54d759cf9f413be8921191675 | 3,890 | py | Python | IPython/lib/tests/test_irunner_pylab_magic.py | dchichkov/ipython | 8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4 | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/lib/tests/test_irunner_pylab_magic.py | dchichkov/ipython | 8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4 | [
"BSD-3-Clause-Clear"
] | 3 | 2015-04-01T13:14:57.000Z | 2015-05-26T16:01:37.000Z | IPython/lib/tests/test_irunner_pylab_magic.py | dchichkov/ipython | 8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-10-06T07:59:25.000Z | 2021-10-06T07:59:25.000Z | """Test suite for pylab_import_all magic
Modified from the irunner module but using regex.
"""
# Global to make tests extra verbose and help debugging
VERBOSE = True
# stdlib imports
import StringIO
import sys
import unittest
import re
# IPython imports
from IPython.lib import irunner
from IPython.testing import decorators
def pylab_not_importable():
"""Test if importing pylab fails with RuntimeError (true when having no display)"""
try:
import pylab
return False
except RuntimeError:
return True
# Testing code begins
| 32.689076 | 87 | 0.608226 |
c7f4992bb494868e3842c501796146ce55443adc | 2,241 | py | Python | checkpoint.py | GooLee0123/MBRNN | c313bc286b34a2f6e0cbc1ec0941c511ff8dc8d3 | [
"MIT"
] | 1 | 2021-12-07T03:59:51.000Z | 2021-12-07T03:59:51.000Z | checkpoint.py | GooLee0123/MBRNN | c313bc286b34a2f6e0cbc1ec0941c511ff8dc8d3 | [
"MIT"
] | null | null | null | checkpoint.py | GooLee0123/MBRNN | c313bc286b34a2f6e0cbc1ec0941c511ff8dc8d3 | [
"MIT"
] | 1 | 2022-02-23T02:15:56.000Z | 2022-02-23T02:15:56.000Z | import logging
import os
import shutil
import time
import torch
model_state = 'model_state.pt'
trainer_state = 'trainer_state.pt'
| 30.69863 | 76 | 0.583222 |
c7f4e1c0cff8588ab79a5f138125b800da16d5b8 | 4,250 | py | Python | test/eval_mines_color.py | alalagong/LEDNet | 5dee5ee4edc75c24e6cda50dc1661d8f0b1e6469 | [
"MIT"
] | 3 | 2019-08-13T07:21:23.000Z | 2020-06-27T16:18:22.000Z | test/eval_mines_color.py | alalagong/LEDNet | 5dee5ee4edc75c24e6cda50dc1661d8f0b1e6469 | [
"MIT"
] | 1 | 2020-12-14T05:56:44.000Z | 2020-12-14T05:56:44.000Z | test/eval_mines_color.py | alalagong/LEDNet | 5dee5ee4edc75c24e6cda50dc1661d8f0b1e6469 | [
"MIT"
] | 1 | 2019-11-13T12:09:58.000Z | 2019-11-13T12:09:58.000Z | import numpy as np
import torch
import os
import cv2
import importlib
from dataset import *
from PIL import Image
from argparse import ArgumentParser
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision.transforms import Compose, CenterCrop, Normalize, Resize
from torchvision.transforms import ToTensor, ToPILImage
from dataset import cityscapes
from lednet import Net
from transform import Relabel, ToLabel, Colorize
import visdom
NUM_CHANNELS = 3
NUM_CLASSES = 20
#* ***********************************************
image_transform = ToPILImage()
input_transform_cityscapes = Compose([
Resize((512, 1024), Image.BILINEAR),
ToTensor(),
# Normalize([.485, .456, .406], [.229, .224, .225]),
])
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--state')
parser.add_argument('--loadDir', default="../save/logs(KITTI)/")
parser.add_argument('--loadWeights', default="model_best.pth")
parser.add_argument('--loadModel', default="lednet.py")
parser.add_argument('--subset', default="val") # can be val, test, train, demoSequence
parser.add_argument('--datadir', default="")
parser.add_argument('--num-workers', type=int, default=4)
parser.add_argument('--batch-size', type=int, default=1)
parser.add_argument('--cpu', action='store_true')
parser.add_argument('--visualize', action='store_true')
main(parser.parse_args())
| 31.481481 | 141 | 0.675059 |
1bdbd0dddd803ccbb1c990600d899d8ab9de0788 | 2,440 | py | Python | tests/test_resource_linkage.py | firesock/pydantic-jsonapi | b7dc891892ab3439a71f78a9a5fd067c4d651ca8 | [
"MIT"
] | null | null | null | tests/test_resource_linkage.py | firesock/pydantic-jsonapi | b7dc891892ab3439a71f78a9a5fd067c4d651ca8 | [
"MIT"
] | null | null | null | tests/test_resource_linkage.py | firesock/pydantic-jsonapi | b7dc891892ab3439a71f78a9a5fd067c4d651ca8 | [
"MIT"
] | null | null | null | import pytest
from pytest import raises
from pydantic_jsonapi.resource_linkage import ResourceLinkage
from pydantic import BaseModel, ValidationError
| 34.857143 | 97 | 0.527869 |
1bdd2e9e5e9fd87db022a69e90bc6723cd058b21 | 2,046 | py | Python | src/tensorflow/keras_cnn.py | del680202/MachineLearning-memo | 29284ca24041969eeb59851a43ab6c28c685fae5 | [
"Apache-2.0"
] | 4 | 2017-04-24T15:01:55.000Z | 2019-11-03T11:11:54.000Z | src/tensorflow/keras_cnn.py | aasd145tw/MachineLearning-memo | 29284ca24041969eeb59851a43ab6c28c685fae5 | [
"Apache-2.0"
] | null | null | null | src/tensorflow/keras_cnn.py | aasd145tw/MachineLearning-memo | 29284ca24041969eeb59851a43ab6c28c685fae5 | [
"Apache-2.0"
] | 12 | 2017-05-10T13:39:17.000Z | 2019-12-15T14:01:05.000Z | import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
import keras.callbacks
import keras.backend.tensorflow_backend as KTF
import tensorflow as tf
batch_size = 128
nb_classes = 10
nb_epoch = 20
nb_data = 28*28
log_filepath = '/tmp/keras_log'
# load data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# reshape
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1]*X_train.shape[2])
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1]*X_test.shape[2])
# rescale
X_train = X_train.astype(np.float32)
X_train /= 255
X_test = X_test.astype(np.float32)
X_test /= 255
# convert class vectors to binary class matrices (one hot vectors)
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
old_session = KTF.get_session()
with tf.Graph().as_default():
session = tf.Session('')
KTF.set_session(session)
KTF.set_learning_phase(1)
# build model
model = Sequential()
model.add(Dense(512, input_shape=(nb_data,), init='normal',name='dense1'))
model.add(Activation('relu', name='relu1'))
model.add(Dropout(0.2, name='dropout1'))
model.add(Dense(512, init='normal', name='dense2'))
model.add(Activation('relu', name='relu2'))
model.add(Dropout(0.2, name='dropout2'))
model.add(Dense(10, init='normal', name='dense3'))
model.add(Activation('softmax', name='softmax1'))
model.summary()
model.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.001), metrics=['accuracy'])
tb_cb = keras.callbacks.TensorBoard(log_dir=log_filepath, histogram_freq=1)
cbks = [tb_cb]
history = model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch = nb_epoch, verbose=1, callbacks=cbks)
score = model.evaluate(X_test, Y_test, verbose=0)
print('Test score:', score[0])
print('Test accuracy;', score[1])
KTF.set_session(old_session)
| 31 | 112 | 0.725806 |
1be156b5a97033cae1d2dce7ad771f398dbde2ad | 4,942 | py | Python | tests/blas/nodes/ger_test.py | xiacijie/dace | 2d942440b1d7b139ba112434bfa78f754e10bfe5 | [
"BSD-3-Clause"
] | 1 | 2021-07-26T07:58:06.000Z | 2021-07-26T07:58:06.000Z | tests/blas/nodes/ger_test.py | xiacijie/dace | 2d942440b1d7b139ba112434bfa78f754e10bfe5 | [
"BSD-3-Clause"
] | null | null | null | tests/blas/nodes/ger_test.py | xiacijie/dace | 2d942440b1d7b139ba112434bfa78f754e10bfe5 | [
"BSD-3-Clause"
] | 1 | 2021-03-04T13:01:48.000Z | 2021-03-04T13:01:48.000Z | #!/usr/bin/env python3
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
from dace.transformation.dataflow.streaming_memory import StreamingMemory
from dace.transformation.interstate.sdfg_nesting import InlineSDFG
from dace.transformation.interstate.fpga_transform_sdfg import FPGATransformSDFG
import numpy as np
import argparse
import scipy
import dace
from dace.memlet import Memlet
import dace.libraries.blas as blas
from dace.libraries.standard.memory import aligned_ndarray
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("N", type=int, nargs="?", default=256)
parser.add_argument("M", type=int, nargs="?", default=512)
parser.add_argument("tile_size_x", type=int, nargs="?", default=16)
parser.add_argument("tile_size_y", type=int, nargs="?", default=32)
parser.add_argument("alpha", type=np.float32, nargs="?", default=1.0)
parser.add_argument("--target", dest="target", default="pure")
parser.add_argument("--eps", type=float, default=1e-6)
parser.add_argument("--veclen", type=int, default=8)
args = parser.parse_args()
n = args.N
m = args.M
tile_size_x = args.tile_size_x
tile_size_y = args.tile_size_y
alpha = args.alpha
veclen = args.veclen
if args.target == "pure":
ger_node, state, sdfg = pure_graph("pure", dace.float32, veclen)
ger_node.expand(sdfg, state)
sdfg.apply_transformations_repeated([InlineSDFG])
elif args.target == "fpga":
sdfg = fpga_graph(dace.float32, veclen, tile_size_x, tile_size_y)
else:
print("Unsupported target")
exit(-1)
x = aligned_ndarray(np.random.rand(m).astype(np.float32), alignment=4*veclen)
y = aligned_ndarray(np.random.rand(n).astype(np.float32), alignment=4*veclen)
A = aligned_ndarray(np.random.rand(m, n).astype(np.float32), alignment=4*veclen)
res = aligned_ndarray(np.empty(A.shape, dtype=A.dtype), alignment=4*veclen)
ref = aligned_ndarray(np.empty(A.shape, dtype=A.dtype), alignment=4*veclen)
res[:] = A[:]
ref[:] = A[:]
sdfg(x=x, y=y, A=A, res=res, m=dace.int32(m), n=dace.int32(n), alpha=alpha)
ref = scipy.linalg.blas.sger(alpha=alpha, x=x, y=y, a=ref)
diff = np.linalg.norm(res - ref)
if diff >= args.eps * n * m:
raise RuntimeError(f"Validation failed: {diff}")
else:
print("Validation successful.")
| 33.849315 | 84 | 0.633347 |
1be16c8b647df2316a1c8f8f394a926e8273c86d | 1,925 | py | Python | spp.py | ninfueng/torch-cifar | f829c3375a9d9823cef4659f8bdfbd3800d51e80 | [
"MIT"
] | null | null | null | spp.py | ninfueng/torch-cifar | f829c3375a9d9823cef4659f8bdfbd3800d51e80 | [
"MIT"
] | null | null | null | spp.py | ninfueng/torch-cifar | f829c3375a9d9823cef4659f8bdfbd3800d51e80 | [
"MIT"
] | null | null | null | import math
from typing import List, Union
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
if __name__ == "__main__":
input = torch.zeros(1, 512, 13, 13)
output = spatial_pyramid_pool(input, [1, 2, 3], "max")
print(output.shape)
| 29.166667 | 88 | 0.603636 |
1be1d0ad6c2cd6a6b3082cd64ad7f9633b3033de | 21,417 | py | Python | src/SparseSC/utils/AzureBatch/azure_batch_client.py | wofein/SparseSC | fd8125015c65829458bfee2ae94c24981112d2d8 | [
"MIT"
] | null | null | null | src/SparseSC/utils/AzureBatch/azure_batch_client.py | wofein/SparseSC | fd8125015c65829458bfee2ae94c24981112d2d8 | [
"MIT"
] | null | null | null | src/SparseSC/utils/AzureBatch/azure_batch_client.py | wofein/SparseSC | fd8125015c65829458bfee2ae94c24981112d2d8 | [
"MIT"
] | null | null | null | """
usage requires these additional modules
pip install azure-batch azure-storage-blob jsonschema pyyaml && pip install git+https://github.com/microsoft/SparseSC.git@ad4bf27edb28f517508f6934f21eb65d17fb6543 && scgrad start
usage:
from SparseSC import fit, aggregate_batch_results
from SparseSC.utils.azure_batch_client import BatchConfig, run
_TIMESTAMP = datetime.utcnow().strftime("%Y%m%d%H%M%S")
BATCH_DIR= "path/to/my/batch_config/"
fit(x=x,..., batchDir=BATCH_DIR)
my_config = BatchConfig(
BATCH_ACCOUNT_NAME="MySecret",
BATCH_ACCOUNT_KEY="MySecret",
BATCH_ACCOUNT_URL="MySecret",
STORAGE_ACCOUNT_NAME="MySecret",
STORAGE_ACCOUNT_KEY="MySecret",
POOL_ID="my-compute-pool",
POOL_NODE_COUNT=0,
POOL_LOW_PRIORITY_NODE_COUNT=20,
POOL_VM_SIZE="STANDARD_A1_v2",
DELETE_POOL_WHEN_DONE=False,
JOB_ID="my-job" + _TIMESTAMP,
DELETE_JOB_WHEN_DONE=False,
CONTAINER_NAME="my-blob-container",
BATCH_DIRECTORY=BATCH_DIR,
)
run(my_config)
fitted_model = aggregate_batch_results("path/to/my/batch_config")
"""
# pylint: disable=differing-type-doc, differing-param-doc, missing-param-doc, missing-raises-doc, missing-return-doc
from __future__ import print_function
import datetime
import io
import os
import sys
import time
import pathlib
import importlib
from collections import defaultdict
import azure.storage.blob as azureblob
from azure.storage.blob.models import ContainerPermissions
import azure.batch.batch_service_client as batch
import azure.batch.batch_auth as batch_auth
import azure.batch.models as models
from SparseSC.cli.stt import get_config
from ..print_progress import print_progress
from .BatchConfig import BatchConfig, validate_config
from yaml import load
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
from .constants import (
_STANDARD_OUT_FILE_NAME,
_CONTAINER_OUTPUT_FILE,
_CONTAINER_INPUT_FILE,
_BATCH_CV_FILE_NAME,
)
FOLD_FILE_PATTERN = "fold_{}.yaml"
# pylint: disable=bad-continuation, invalid-name, protected-access, line-too-long, fixme
sys.path.append(".")
sys.path.append("..")
# Update the Batch and Storage account credential strings in config.py with values
# unique to your accounts. These are used when constructing connection strings
# for the Batch and Storage client objects.
def build_output_sas_url(config, _blob_client):
"""
build a sas token for the output container
"""
sas_token = _blob_client.generate_container_shared_access_signature(
config.CONTAINER_NAME,
ContainerPermissions.READ
+ ContainerPermissions.WRITE
+ ContainerPermissions.DELETE
+ ContainerPermissions.LIST,
datetime.datetime.utcnow() + datetime.timedelta(hours=config.STORAGE_ACCESS_DURATION_HRS),
start=datetime.datetime.utcnow(),
)
_sas_url = "https://{}.blob.core.windows.net/{}?{}".format(
config.STORAGE_ACCOUNT_NAME, config.CONTAINER_NAME, sas_token
)
return _sas_url
def print_batch_exception(batch_exception):
"""
Prints the contents of the specified Batch exception.
:param batch_exception:
"""
print("-------------------------------------------")
print("Exception encountered:")
if (
batch_exception.error
and batch_exception.error.message
and batch_exception.error.message.value
):
print(batch_exception.error.message.value)
if batch_exception.error.values:
print()
for mesg in batch_exception.error.values:
print("{}:\t{}".format(mesg.key, mesg.value))
print("-------------------------------------------")
def build_output_file(container_sas_url, fold_number):
"""
Uploads a local file to an Azure Blob storage container.
:rtype: `azure.batch.models.ResourceFile`
:return: A ResourceFile initialized with a SAS URL appropriate for Batch
tasks.
"""
# where to store the outputs
container_dest = models.OutputFileBlobContainerDestination(
container_url=container_sas_url, path=FOLD_FILE_PATTERN.format(fold_number)
)
dest = models.OutputFileDestination(container=container_dest)
# under what conditions should you attempt to extract the outputs?
upload_options = models.OutputFileUploadOptions(
upload_condition=models.OutputFileUploadCondition.task_success
)
# https://docs.microsoft.com/en-us/azure/batch/batch-task-output-files#specify-output-files-for-task-output
return models.OutputFile(
file_pattern=_CONTAINER_OUTPUT_FILE,
destination=dest,
upload_options=upload_options,
)
def upload_file_to_container(block_blob_client, container_name, file_path, duration_hours=24):
"""
Uploads a local file to an Azure Blob storage container.
:param block_blob_client: A blob service client.
:type block_blob_client: `azure.storage.blob.BlockBlobService`
:param str container_name: The name of the Azure Blob storage container.
:param str file_path: The local path to the file.
:rtype: `azure.batch.models.ResourceFile`
:return: A ResourceFile initialized with a SAS URL appropriate for Batch
tasks.
"""
blob_name = os.path.basename(file_path)
print("Uploading file {} to container [{}]...".format(file_path, container_name))
block_blob_client.create_blob_from_path(container_name, blob_name, file_path)
sas_token = block_blob_client.generate_blob_shared_access_signature(
container_name,
blob_name,
permission=azureblob.BlobPermissions.READ,
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=duration_hours),
)
sas_url = block_blob_client.make_blob_url(
container_name, blob_name, sas_token=sas_token
)
return models.ResourceFile(http_url=sas_url, file_path=_CONTAINER_INPUT_FILE)
def create_pool(config, batch_service_client):
"""
Creates a pool of compute nodes with the specified OS settings.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str pool_id: An ID for the new pool.
:param str publisher: Marketplace image publisher
:param str offer: Marketplace image offer
:param str sku: Marketplace image sku
"""
# Create a new pool of Linux compute nodes using an Azure Virtual Machines
# Marketplace image. For more information about creating pools of Linux
# nodes, see:
# https://azure.microsoft.com/documentation/articles/batch-linux-nodes/
image_ref_to_use = models.ImageReference(
publisher="microsoft-azure-batch",
offer="ubuntu-server-container",
sku="16-04-lts",
version="latest",
)
if config.REGISTRY_USERNAME:
registry = batch.models.ContainerRegistry(
user_name=config.REGISTRY_USERNAME,
password=config.REGISTRY_PASSWORD,
registry_server=config.REGISTRY_SERVER,
)
container_conf = batch.models.ContainerConfiguration(
container_image_names=[config.DOCKER_CONTAINER],
container_registries=[registry],
)
else:
container_conf = batch.models.ContainerConfiguration(
container_image_names=[config.DOCKER_CONTAINER]
)
new_pool = batch.models.PoolAddParameter(
id=config.POOL_ID,
virtual_machine_configuration=batch.models.VirtualMachineConfiguration(
image_reference=image_ref_to_use,
container_configuration=container_conf,
node_agent_sku_id="batch.node.ubuntu 16.04",
),
vm_size=config.POOL_VM_SIZE,
target_dedicated_nodes=config.POOL_NODE_COUNT,
target_low_priority_nodes=config.POOL_LOW_PRIORITY_NODE_COUNT,
)
batch_service_client.pool.add(new_pool)
def create_job(batch_service_client, job_id, pool_id):
"""
Creates a job with the specified ID, associated with the specified pool.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str job_id: The ID for the job.
:param str pool_id: The ID for the pool.
"""
print("Creating job [{}]...".format(job_id))
job_description = batch.models.JobAddParameter(
id=job_id, pool_info=batch.models.PoolInformation(pool_id=pool_id)
)
batch_service_client.job.add(job_description)
def add_tasks(
config,
_blob_client,
batch_service_client,
container_sas_url,
job_id,
_input_file,
count,
):
"""
Adds a task for each input file in the collection to the specified job.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str job_id: The ID of the job to which to add the tasks.
:param list input_files: The input files
:param output_container_sas_token: A SAS token granting write access to
the specified Azure Blob storage container.
"""
print("Adding {} tasks to job [{}]...".format(count, job_id))
tasks = list()
for fold_number in range(count):
output_file = build_output_file(container_sas_url, fold_number)
# command_line = '/bin/bash -c \'echo "Hello World" && echo "hello: world" > output.yaml\''
command_line = "/bin/bash -c 'stt {} {} {}'".format(
_CONTAINER_INPUT_FILE, _CONTAINER_OUTPUT_FILE, fold_number
)
task_container_settings = models.TaskContainerSettings(
image_name=config.DOCKER_CONTAINER
)
tasks.append(
batch.models.TaskAddParameter(
id="Task_{}".format(fold_number),
command_line=command_line,
resource_files=[_input_file],
output_files=[output_file],
container_settings=task_container_settings,
)
)
batch_service_client.task.add_collection(job_id, tasks)
def wait_for_tasks_to_complete(batch_service_client, job_id, timeout):
"""
Returns when all tasks in the specified job reach the Completed state.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str job_id: The id of the job whose tasks should be to monitored.
:param timedelta timeout: The duration to wait for task completion. If all
tasks in the specified job do not reach Completed state within this time
period, an exception will be raised.
"""
_start_time = datetime.datetime.now()
timeout_expiration = _start_time + timeout
# print( "Monitoring all tasks for 'Completed' state, timeout in {}...".format(timeout), end="",)
while datetime.datetime.now() < timeout_expiration:
sys.stdout.flush()
tasks = [t for t in batch_service_client.task.list(job_id)]
incomplete_tasks = [
task for task in tasks if task.state != models.TaskState.completed
]
hours, remainder = divmod((datetime.datetime.now() - _start_time).seconds, 3600)
minutes, seconds = divmod(remainder, 60)
print_progress(
len(tasks) - len(incomplete_tasks),
len(tasks),
prefix="Time elapsed {:02}:{:02}:{:02}".format(
int(hours), int(minutes), int(seconds)
),
decimals=1,
bar_length=min(len(tasks), 50),
)
error_codes = [t.execution_info.exit_code for t in tasks if t.execution_info and t.execution_info.exit_code ]
if error_codes:
codes = defaultdict(lambda : 0)
for cd in error_codes:
codes[cd] +=1
# import pdb; pdb.set_trace()
raise RuntimeError( "\nSome tasks have exited with a non-zero exit code including: " + ", ".join([ "{}({})".format(k,v) for k, v in codes.items() ] ))
if not incomplete_tasks:
print()
return True
time.sleep(1)
print()
raise RuntimeError(
"ERROR: Tasks did not reach 'Completed' state within "
"timeout period of " + str(timeout)
)
def print_task_output(batch_service_client, job_id, encoding=None):
"""Prints the stdout.txt file for each task in the job.
:param batch_client: The batch client to use.
:type batch_client: `batchserviceclient.BatchServiceClient`
:param str job_id: The id of the job with task output files to print.
"""
print("Printing task output...")
tasks = batch_service_client.task.list(job_id)
for task in tasks:
node_id = batch_service_client.task.get(job_id, task.id).node_info.node_id
print("Task: {}".format(task.id))
print("Node: {}".format(node_id))
stream = batch_service_client.file.get_from_task(
job_id, task.id, _STANDARD_OUT_FILE_NAME
)
file_text = _read_stream_as_string(stream, encoding)
print("Standard output:")
print(file_text)
def _read_stream_as_string(stream, encoding):
"""Read stream as string
:param stream: input stream generator
:param str encoding: The encoding of the file. The default is utf-8.
:return: The file content.
:rtype: str
"""
output = io.BytesIO()
try:
for data in stream:
output.write(data)
if encoding is None:
encoding = "utf-8"
return output.getvalue().decode(encoding)
finally:
output.close()
raise RuntimeError("could not write data to stream or decode bytes")
def run(config: BatchConfig, wait=True) -> None:
r"""
:param config: A :class:`BatchConfig` instance with the Azure Batch run parameters
:type config: :class:BatchConfig
:param boolean wait: If true, wait for the batch to complete and then
download the results to file
:raises BatchErrorException: If raised by the Azure Batch Python SDK
"""
# pylint: disable=too-many-locals
# replace any missing values in the configuration with environment variables
config = validate_config(config)
start_time = datetime.datetime.now().replace(microsecond=0)
print(
'Synthetic Controls Run "{}" start time: {}'.format(config.JOB_ID, start_time)
)
print()
_LOCAL_INPUT_FILE = os.path.join(config.BATCH_DIRECTORY, _BATCH_CV_FILE_NAME)
v_pen, w_pen, model_data = get_config(_LOCAL_INPUT_FILE)
n_folds = len(model_data["folds"]) * len(v_pen) * len(w_pen)
# Create the blob client, for use in obtaining references to
# blob storage containers and uploading files to containers.
blob_client = azureblob.BlockBlobService(
account_name=config.STORAGE_ACCOUNT_NAME, account_key=config.STORAGE_ACCOUNT_KEY
)
# Use the blob client to create the containers in Azure Storage if they
# don't yet exist.
blob_client.create_container(config.CONTAINER_NAME, fail_on_exist=False)
CONTAINER_SAS_URL = build_output_sas_url(config, blob_client)
# The collection of data files that are to be processed by the tasks.
input_file_path = os.path.join(sys.path[0], _LOCAL_INPUT_FILE)
# Upload the data files.
input_file = upload_file_to_container(
blob_client, config.CONTAINER_NAME, input_file_path, config.STORAGE_ACCESS_DURATION_HRS
)
# Create a Batch service client. We'll now be interacting with the Batch
# service in addition to Storage
credentials = batch_auth.SharedKeyCredentials(
config.BATCH_ACCOUNT_NAME, config.BATCH_ACCOUNT_KEY
)
batch_client = batch.BatchServiceClient(
credentials, batch_url=config.BATCH_ACCOUNT_URL
)
try:
# Create the pool that will contain the compute nodes that will execute the
# tasks.
try:
create_pool(config, batch_client)
print("Created pool: ", config.POOL_ID)
except models.BatchErrorException:
print("Using pool: ", config.POOL_ID)
# Create the job that will run the tasks.
create_job(batch_client, config.JOB_ID, config.POOL_ID)
# Add the tasks to the job.
add_tasks(
config,
blob_client,
batch_client,
CONTAINER_SAS_URL,
config.JOB_ID,
input_file,
n_folds,
)
if not wait:
return
# Pause execution until tasks reach Completed state.
wait_for_tasks_to_complete(
batch_client, config.JOB_ID, datetime.timedelta(hours=config.STORAGE_ACCESS_DURATION_HRS)
)
_download_files(config, blob_client, config.BATCH_DIRECTORY, n_folds)
except models.BatchErrorException as err:
print_batch_exception(err)
raise err
# Clean up storage resources
# TODO: re-enable this and delete the output container too
# -- print("Deleting container [{}]...".format(input_container_name))
# -- blob_client.delete_container(input_container_name)
# Print out some timing info
end_time = datetime.datetime.now().replace(microsecond=0)
print()
print("Sample end: {}".format(end_time))
print("Elapsed time: {}".format(end_time - start_time))
print()
# Clean up Batch resources (if the user so chooses).
if config.DELETE_POOL_WHEN_DONE:
batch_client.pool.delete(config.POOL_ID)
if config.DELETE_JOB_WHEN_DONE:
batch_client.job.delete(config.JOB_ID)
def load_results(config: BatchConfig) -> None:
r"""
:param config: A :class:`BatchConfig` instance with the Azure Batch run parameters
:type config: :class:BatchConfig
:raises BatchErrorException: If raised by the Azure Batch Python SDK
"""
# pylint: disable=too-many-locals
# replace any missing values in the configuration with environment variables
config = validate_config(config)
start_time = datetime.datetime.now().replace(microsecond=0)
print('Load result for job "{}" start time: {}'.format(config.JOB_ID, start_time))
print()
_LOCAL_INPUT_FILE = os.path.join(config.BATCH_DIRECTORY, _BATCH_CV_FILE_NAME)
v_pen, w_pen, model_data = get_config(_LOCAL_INPUT_FILE)
n_folds = len(model_data["folds"]) * len(v_pen) * len(w_pen)
# Create the blob client, for use in obtaining references to
# blob storage containers and uploading files to containers.
blob_client = azureblob.BlockBlobService(
account_name=config.STORAGE_ACCOUNT_NAME, account_key=config.STORAGE_ACCOUNT_KEY
)
# Create a Batch service client. We'll now be interacting with the Batch
# service in addition to Storage
credentials = batch_auth.SharedKeyCredentials(
config.BATCH_ACCOUNT_NAME, config.BATCH_ACCOUNT_KEY
)
batch_client = batch.BatchServiceClient(
credentials, batch_url=config.BATCH_ACCOUNT_URL
)
try:
# Pause execution until tasks reach Completed state.
wait_for_tasks_to_complete(
batch_client, config.JOB_ID, datetime.timedelta(hours=config.STORAGE_ACCESS_DURATION_HRS)
)
_download_files(config, blob_client, config.BATCH_DIRECTORY, n_folds)
except models.BatchErrorException as err:
print_batch_exception(err)
raise err
# Clean up storage resources
# TODO: re-enable this and delete the output container too
# -- print("Deleting container [{}]...".format(input_container_name))
# -- blob_client.delete_container(input_container_name)
# Print out some timing info
end_time = datetime.datetime.now().replace(microsecond=0)
print()
print("Sample end: {}".format(end_time))
print("Elapsed time: {}".format(end_time - start_time))
print()
# Clean up Batch resources (if the user so chooses).
if config.DELETE_POOL_WHEN_DONE:
batch_client.pool.delete(config.POOL_ID)
if config.DELETE_JOB_WHEN_DONE:
batch_client.job.delete(config.JOB_ID)
if __name__ == "__main__":
# TODO: this is not an ideal API
config_module = importlib.__import__("config")
run(config_module.config)
| 34.487923 | 178 | 0.693561 |
1be2bb16aca1a3770cbb4668f10786667f95971a | 63 | py | Python | src/vilbert/datasets/__init__.py | NoOneUST/COMP5212 | 171b564f08841e426545f58e3b52870c0e090586 | [
"MIT"
] | 3 | 2020-04-05T06:50:46.000Z | 2020-04-05T08:20:33.000Z | src/vilbert/datasets/__init__.py | NoOneUST/COMP5212Project | 171b564f08841e426545f58e3b52870c0e090586 | [
"MIT"
] | 2 | 2021-05-21T16:24:54.000Z | 2022-02-10T01:21:54.000Z | src/vilbert/datasets/__init__.py | NoOneUST/COMP5212Project | 171b564f08841e426545f58e3b52870c0e090586 | [
"MIT"
] | 1 | 2020-06-15T16:22:20.000Z | 2020-06-15T16:22:20.000Z | from .visual_entailment_dataset import VisualEntailmentDataset
| 31.5 | 62 | 0.920635 |
1be2fe74c868aa22cedb699484c807fd62b32107 | 14,174 | py | Python | Dungeoneer/Treasure.py | jameslemon81/Dungeoneer | 8a2a1bfea06ae09f1898583999bf449c82ba4ce9 | [
"BSD-3-Clause"
] | 12 | 2015-01-29T17:15:46.000Z | 2022-02-23T05:58:49.000Z | Dungeoneer/Treasure.py | jameslemon81/Dungeoneer | 8a2a1bfea06ae09f1898583999bf449c82ba4ce9 | [
"BSD-3-Clause"
] | null | null | null | Dungeoneer/Treasure.py | jameslemon81/Dungeoneer | 8a2a1bfea06ae09f1898583999bf449c82ba4ce9 | [
"BSD-3-Clause"
] | 8 | 2016-07-04T18:09:50.000Z | 2022-02-23T05:58:48.000Z | # Basic Fantasy RPG Dungeoneer Suite
# Copyright 2007-2012 Chris Gonnerman
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright
# notice, self list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, self list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of the author nor the names of any contributors
# may be used to endorse or promote products derived from self software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
# Treasure.py -- generate treasures for Basic Fantasy RPG
###############################################################################
import Gems, Art, Coins, Magic, Unknown
import Dice
import string
_treasure_table = {
# lair treasure
'A': [
(50, _gen_coins, ("cp", 5, 6, 0, 100)),
(60, _gen_coins, ("sp", 5, 6, 0, 100)),
(40, _gen_coins, ("ep", 5, 4, 0, 100)),
(70, _gen_coins, ("gp", 10, 6, 0, 100)),
(50, _gen_coins, ("pp", 1, 10, 0, 100)),
(50, _gen_gems, (6, 6, 0, 1)),
(50, _gen_art, (6, 6, 0, 1)),
(30, _gen_magic, ("Any", 0, 0, 3, 1)),
],
'B': [
(75, _gen_coins, ("cp", 5, 10, 0, 100)),
(50, _gen_coins, ("sp", 5, 6, 0, 100)),
(50, _gen_coins, ("ep", 5, 4, 0, 100)),
(50, _gen_coins, ("gp", 3, 6, 0, 100)),
(25, _gen_gems, (1, 6, 0, 1)),
(25, _gen_art, (1, 6, 0, 1)),
(10, _gen_magic, ("AW", 0, 0, 1, 1)),
],
'C': [
(60, _gen_coins, ("cp", 6, 6, 0, 100)),
(60, _gen_coins, ("sp", 5, 4, 0, 100)),
(30, _gen_coins, ("ep", 2, 6, 0, 100)),
(25, _gen_gems, (1, 4, 0, 1)),
(25, _gen_art, (1, 4, 0, 1)),
(15, _gen_magic, ("Any", 1, 2, 0, 1)),
],
'D': [
(30, _gen_coins, ("cp", 4, 6, 0, 100)),
(45, _gen_coins, ("sp", 6, 6, 0, 100)),
(90, _gen_coins, ("gp", 5, 8, 0, 100)),
(30, _gen_gems, (1, 8, 0, 1)),
(30, _gen_art, (1, 8, 0, 1)),
(20, _gen_magic, [
("Any", 1, 2, 0, 1),
("Potion", 0, 0, 1, 1),
]
),
],
'E': [
(30, _gen_coins, ("cp", 2, 8, 0, 100)),
(60, _gen_coins, ("sp", 6, 10, 0, 100)),
(50, _gen_coins, ("ep", 3, 8, 0, 100)),
(50, _gen_coins, ("gp", 4, 10, 0, 100)),
(10, _gen_gems, (1, 10, 0, 1)),
(10, _gen_art, (1, 10, 0, 1)),
(30, _gen_magic, [
("Any", 1, 4, 0, 1),
("Scroll", 0, 0, 1, 1),
]
),
],
'F': [
(40, _gen_coins, ("sp", 3, 8, 0, 100)),
(50, _gen_coins, ("ep", 4, 8, 0, 100)),
(85, _gen_coins, ("gp", 6, 10, 0, 100)),
(70, _gen_coins, ("pp", 2, 8, 0, 100)),
(20, _gen_gems, (2, 12, 0, 1)),
(20, _gen_art, (1, 12, 0, 1)),
(35, _gen_magic, [
("Non-Weapon", 1, 4, 0, 1),
("Scroll", 0, 0, 1, 1),
("Potion", 0, 0, 1, 1),
]
),
],
'G': [
(90, _gen_coins, ("gp", 4, 6, 0, 1000)),
(75, _gen_coins, ("pp", 5, 8, 0, 100)),
(25, _gen_gems, (3, 6, 0, 1)),
(25, _gen_art, (1, 10, 0, 1)),
(50, _gen_magic, [
("Any", 1, 4, 0, 1),
("Scroll", 0, 0, 1, 1),
]
),
],
'H': [
(75, _gen_coins, ("cp", 8, 10, 0, 100)),
(75, _gen_coins, ("sp", 6, 10, 0, 1000)),
(75, _gen_coins, ("ep", 3, 10, 0, 1000)),
(75, _gen_coins, ("gp", 5, 8, 0, 1000)),
(75, _gen_coins, ("pp", 9, 8, 0, 100)),
(50, _gen_gems, ( 1, 100, 0, 1)),
(50, _gen_art, (10, 4, 0, 1)),
(20, _gen_magic, [
("Any", 1, 4, 0, 1),
("Scroll", 0, 0, 1, 1),
("Potion", 0, 0, 1, 1),
]
),
],
'I': [
(80, _gen_coins, ("pp", 3, 10, 0, 100)),
(50, _gen_gems, (2, 6, 0, 1)),
(50, _gen_art, (2, 6, 0, 1)),
(15, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'J': [
(45, _gen_coins, ("cp", 3, 8, 0, 100)),
(45, _gen_coins, ("sp", 1, 8, 0, 100)),
],
'K': [
(90, _gen_coins, ("cp", 2, 10, 0, 100)),
(35, _gen_coins, ("sp", 1, 8, 0, 100)),
],
'L': [
(50, _gen_gems, (1, 4, 0, 1)),
],
'M': [
(90, _gen_coins, ("gp", 4, 10, 0, 100)),
(90, _gen_coins, ("pp", 2, 8, 0, 1000)),
],
'N': [
(40, _gen_magic, ("Potion", 2, 4, 0, 1)),
],
'O': [
(50, _gen_magic, ("Scroll", 1, 4, 0, 1)),
],
# personal treasure
'P': [
(100, _gen_coins, ("cp", 3, 8, 0, 1)),
],
'Q': [
(100, _gen_coins, ("sp", 3, 6, 0, 1)),
],
'R': [
(100, _gen_coins, ("ep", 2, 6, 0, 1)),
],
'S': [
(100, _gen_coins, ("gp", 2, 4, 0, 1)),
],
'T': [
(100, _gen_coins, ("pp", 1, 6, 0, 1)),
],
'U': [
( 50, _gen_coins, ("cp", 1, 20, 0, 1)),
( 50, _gen_coins, ("sp", 1, 20, 0, 1)),
( 25, _gen_coins, ("gp", 1, 20, 0, 1)),
( 5, _gen_gems, (1, 4, 0, 1)),
( 5, _gen_art, (1, 4, 0, 1)),
( 2, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'V': [
( 25, _gen_coins, ("sp", 1, 20, 0, 1)),
( 25, _gen_coins, ("ep", 1, 20, 0, 1)),
( 50, _gen_coins, ("gp", 1, 20, 0, 1)),
( 25, _gen_coins, ("pp", 1, 20, 0, 1)),
( 10, _gen_gems, (1, 4, 0, 1)),
( 10, _gen_art, (1, 4, 0, 1)),
( 5, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'U1': [
( 75, _gen_coins, ("cp", 1, 8, 0, 100)),
( 50, _gen_coins, ("sp", 1, 6, 0, 100)),
( 25, _gen_coins, ("ep", 1, 4, 0, 100)),
( 7, _gen_coins, ("gp", 1, 4, 0, 100)),
( 1, _gen_coins, ("pp", 1, 4, 0, 100)),
( 7, _gen_gems, (1, 4, 0, 1)),
( 3, _gen_art, (1, 4, 0, 1)),
( 2, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'U2': [
( 50, _gen_coins, ("cp", 1, 10, 0, 100)),
( 50, _gen_coins, ("sp", 1, 8, 0, 100)),
( 25, _gen_coins, ("ep", 1, 6, 0, 100)),
( 20, _gen_coins, ("gp", 1, 6, 0, 100)),
( 2, _gen_coins, ("pp", 1, 4, 0, 100)),
( 10, _gen_gems, (1, 6, 0, 1)),
( 7, _gen_art, (1, 4, 0, 1)),
( 5, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'U3': [
( 30, _gen_coins, ("cp", 2, 6, 0, 100)),
( 50, _gen_coins, ("sp", 1, 10, 0, 100)),
( 25, _gen_coins, ("ep", 1, 8, 0, 100)),
( 50, _gen_coins, ("gp", 1, 6, 0, 100)),
( 4, _gen_coins, ("pp", 1, 4, 0, 100)),
( 15, _gen_gems, (1, 6, 0, 1)),
( 7, _gen_art, (1, 6, 0, 1)),
( 8, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'U45': [
( 20, _gen_coins, ("cp", 3, 6, 0, 100)),
( 50, _gen_coins, ("sp", 2, 6, 0, 100)),
( 25, _gen_coins, ("ep", 1, 10, 0, 100)),
( 50, _gen_coins, ("gp", 2, 6, 0, 100)),
( 8, _gen_coins, ("pp", 1, 4, 0, 100)),
( 20, _gen_gems, (1, 8, 0, 1)),
( 10, _gen_art, (1, 6, 0, 1)),
( 12, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'U67': [
( 15, _gen_coins, ("cp", 4, 6, 0, 100)),
( 50, _gen_coins, ("sp", 3, 6, 0, 100)),
( 25, _gen_coins, ("ep", 1, 12, 0, 100)),
( 70, _gen_coins, ("gp", 2, 8, 0, 100)),
( 15, _gen_coins, ("pp", 1, 4, 0, 100)),
( 30, _gen_gems, (1, 8, 0, 1)),
( 15, _gen_art, (1, 6, 0, 1)),
( 16, _gen_magic, ("Any", 0, 0, 1, 1)),
],
'U8': [
( 10, _gen_coins, ("cp", 5, 6, 0, 100)),
( 50, _gen_coins, ("sp", 5, 6, 0, 100)),
( 25, _gen_coins, ("ep", 2, 8, 0, 100)),
( 75, _gen_coins, ("gp", 4, 6, 0, 100)),
( 30, _gen_coins, ("pp", 1, 4, 0, 100)),
( 40, _gen_gems, (1, 8, 0, 1)),
( 30, _gen_art, (1, 8, 0, 1)),
( 20, _gen_magic, ("Any", 0, 0, 1, 1)),
],
# coinage
'cp': [
(100, _gen_coins, ("cp", 0, 0, 1, 1)),
],
'sp': [
(100, _gen_coins, ("sp", 0, 0, 1, 1)),
],
'ep': [
(100, _gen_coins, ("ep", 0, 0, 1, 1)),
],
'gp': [
(100, _gen_coins, ("gp", 0, 0, 1, 1)),
],
'pp': [
(100, _gen_coins, ("pp", 0, 0, 1, 1)),
],
# magic classes
'MAGIC': [ (100, _gen_magic, ("Any", 0, 0, 1, 1)), ],
'POTION': [ (100, _gen_magic, ("Potion", 0, 0, 1, 1)), ],
'SCROLL': [ (100, _gen_magic, ("Scroll", 0, 0, 1, 1)), ],
'RING': [ (100, _gen_magic, ("Ring", 0, 0, 1, 1)), ],
'WSR': [ (100, _gen_magic, ("WSR", 0, 0, 1, 1)), ],
'MISC': [ (100, _gen_magic, ("Misc", 0, 0, 1, 1)), ],
'ARMOR': [ (100, _gen_magic, ("Armor", 0, 0, 1, 1)), ],
'WEAPON': [ (100, _gen_magic, ("Weapon", 0, 0, 1, 1)), ],
}
_treasure_table['U4'] = _treasure_table['U45']
_treasure_table['U5'] = _treasure_table['U45']
_treasure_table['U6'] = _treasure_table['U67']
_treasure_table['U7'] = _treasure_table['U67']
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print "Usage: Treasure.py treasuretype [ treasuretype ... ]"
sys.exit(0)
types, tr = Factory(sys.argv[1:])
print "Treasure Type " + string.upper(types)
vtot = 0.0
ocat = ''
qty_len = 1
for t in tr:
qty_len = max(len(str(t.qty)), qty_len)
qty_fmt = "%" + str(qty_len) + "d"
for t in tr:
if t.cat != ocat:
print t.cat
ocat = t.cat
if t.value != 0:
print " ", qty_fmt % t.qty, t.name, t.value, "GP ea.", \
t.value * t.qty, "GP total"
else:
print " ", qty_fmt % t.qty, t.name
for i in t.desc:
print " ", i
vtot = vtot + (t.qty * t.value)
print "----- Total Value", vtot, "GP\n"
# end of script.
| 32.734411 | 79 | 0.417172 |
1be31bb2955f81221fbda20bbf33d2351c12d6c3 | 20,773 | py | Python | covid19/COVID19/code/controller/main.py | zhanqingheng/COVID-19 | d050ad2effedb9090865d1104ccd5c5d04343f53 | [
"MIT"
] | 16 | 2020-06-08T10:14:13.000Z | 2022-03-30T02:44:04.000Z | covid19/COVID19/code/controller/main.py | zhanqingheng/COVID-19 | d050ad2effedb9090865d1104ccd5c5d04343f53 | [
"MIT"
] | 1 | 2021-11-18T10:03:42.000Z | 2021-11-18T10:03:42.000Z | covid19/COVID19/code/controller/main.py | zhanqingheng/COVID-19 | d050ad2effedb9090865d1104ccd5c5d04343f53 | [
"MIT"
] | 4 | 2021-03-06T04:44:03.000Z | 2021-12-09T07:22:50.000Z | from flask import Flask, current_app
from flask import render_template
from flask import jsonify
from jieba.analyse import extract_tags
import string
from DB import chinaSQL
from DB import worldSQL
app = Flask(__name__, template_folder='../../web', static_folder='../../static')
if __name__ == '__main__':
app.run()
| 33.078025 | 98 | 0.588264 |
1be38ec637c07219a45f7c7ba15326a16a343d58 | 396 | py | Python | T2API/migrations/0008_product_weight.py | hackhb18-T2/api | c42be466492d07d6451ff3145985cd8cc0927257 | [
"Apache-2.0"
] | null | null | null | T2API/migrations/0008_product_weight.py | hackhb18-T2/api | c42be466492d07d6451ff3145985cd8cc0927257 | [
"Apache-2.0"
] | null | null | null | T2API/migrations/0008_product_weight.py | hackhb18-T2/api | c42be466492d07d6451ff3145985cd8cc0927257 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.0.2 on 2018-02-17 10:50
from django.db import migrations, models
| 20.842105 | 63 | 0.60101 |
1be41a8ed3e94194a6131c0c94be533e83696d98 | 3,402 | py | Python | contrib/cirrus/podbot.py | juhp/libpod | bc7afd6d71da4173e4894ff352667a25987fa2ea | [
"Apache-2.0"
] | 2 | 2021-09-20T00:29:06.000Z | 2021-11-28T08:36:20.000Z | contrib/cirrus/podbot.py | juhp/libpod | bc7afd6d71da4173e4894ff352667a25987fa2ea | [
"Apache-2.0"
] | 2 | 2020-01-04T03:31:18.000Z | 2021-05-17T09:54:03.000Z | contrib/cirrus/podbot.py | juhp/libpod | bc7afd6d71da4173e4894ff352667a25987fa2ea | [
"Apache-2.0"
] | 1 | 2019-04-08T21:58:07.000Z | 2019-04-08T21:58:07.000Z | #!/usr/bin/env python3
# Simple and dumb script to send a message to the #podman IRC channel on frenode
# Based on example from: https://pythonspot.com/building-an-irc-bot/
import os
import time
import random
import errno
import socket
import sys
if len(sys.argv) < 3:
print("Error: Must pass desired nick and message as parameters")
else:
irc = IRC("irc.freenode.net", sys.argv[1], "#podman")
err = irc.connect(*os.environ.get('IRCID', 'Big Bug').split(" ", 2))
if not err:
irc.message(" ".join(sys.argv[2:]))
time.sleep(5.0) # avoid join/quit spam
irc.quit()
| 34.363636 | 87 | 0.569959 |
1be5b77cc2bbea8d65329992b137d52e24f4e227 | 441 | py | Python | changes/api/build_coverage.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 443 | 2015-01-03T16:28:39.000Z | 2021-04-26T16:39:46.000Z | changes/api/build_coverage.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 12 | 2015-07-30T19:07:16.000Z | 2016-11-07T23:11:21.000Z | changes/api/build_coverage.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 47 | 2015-01-09T10:04:00.000Z | 2020-11-18T17:58:19.000Z | from changes.api.base import APIView
from changes.lib.coverage import get_coverage_by_build_id, merged_coverage_data
from changes.models.build import Build
| 25.941176 | 79 | 0.730159 |
1be723fadb484c2875b98748f51d456625b23262 | 5,251 | py | Python | topopt/mechanisms/problems.py | arnavbansal2764/topopt | 74d8f17568a9d3349632e23840a9dc5b0d6c4d1f | [
"MIT"
] | 53 | 2020-04-14T10:13:04.000Z | 2022-02-24T03:16:57.000Z | topopt/mechanisms/problems.py | arnavbansal2764/topopt | 74d8f17568a9d3349632e23840a9dc5b0d6c4d1f | [
"MIT"
] | 5 | 2020-11-12T23:56:30.000Z | 2021-09-30T19:24:06.000Z | topopt/mechanisms/problems.py | arnavbansal2764/topopt | 74d8f17568a9d3349632e23840a9dc5b0d6c4d1f | [
"MIT"
] | 15 | 2020-02-12T01:32:07.000Z | 2022-02-20T02:44:55.000Z | """Compliant mechanism synthesis problems using topology optimization."""
import numpy
import scipy.sparse
from ..problems import ElasticityProblem
from .boundary_conditions import MechanismSynthesisBoundaryConditions
from ..utils import deleterowcol
| 33.234177 | 79 | 0.591316 |