hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0bb8e30ded6e839a96a8ac9f64f609621cb56e4a | 2,055 | py | Python | S4/S4 Library/simulation/careers/pick_career_by_agent_interaction.py | NeonOcean/Environment | ca658cf66e8fd6866c22a4a0136d415705b36d26 | [
"CC-BY-4.0"
] | 1 | 2021-05-20T19:33:37.000Z | 2021-05-20T19:33:37.000Z | S4/S4 Library/simulation/careers/pick_career_by_agent_interaction.py | NeonOcean/Environment | ca658cf66e8fd6866c22a4a0136d415705b36d26 | [
"CC-BY-4.0"
] | null | null | null | S4/S4 Library/simulation/careers/pick_career_by_agent_interaction.py | NeonOcean/Environment | ca658cf66e8fd6866c22a4a0136d415705b36d26 | [
"CC-BY-4.0"
] | null | null | null | from event_testing.resolver import SingleSimResolver
from sims4.resources import Types
from sims4.tuning.tunable import TunableList, TunableReference
from sims4.tuning.tunable_base import GroupNames
from traits.trait_tracker import TraitPickerSuperInteraction
import services
| 60.441176 | 583 | 0.734793 |
0bb9728183f6cd95e86f2c16d976742c14283f39 | 149 | py | Python | api/urls.py | kirklennon/Clickbait | 9ce97d38b3dce78ce151b285a0cc55ddbb7b58be | [
"MIT"
] | 1 | 2020-08-29T09:31:22.000Z | 2020-08-29T09:31:22.000Z | api/urls.py | kirklennon/Clickbait | 9ce97d38b3dce78ce151b285a0cc55ddbb7b58be | [
"MIT"
] | null | null | null | api/urls.py | kirklennon/Clickbait | 9ce97d38b3dce78ce151b285a0cc55ddbb7b58be | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('json', views.api, name='api'),
] | 21.285714 | 40 | 0.644295 |
0bba1e28f68dedeccae5371afea0ac4ab68e2473 | 68,549 | py | Python | tests/examples/minlplib/waterno2_03.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | 2 | 2021-07-03T13:19:10.000Z | 2022-02-06T10:48:13.000Z | tests/examples/minlplib/waterno2_03.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | 1 | 2021-07-04T14:52:14.000Z | 2021-07-15T10:17:11.000Z | tests/examples/minlplib/waterno2_03.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | null | null | null | # MINLP written by GAMS Convert at 04/21/18 13:55:18
#
# Equation counts
# Total E G L N X C B
# 617 367 103 147 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 499 472 27 0 0 0 0 0
# FX 6 6 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 1636 1333 303 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.b2 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b3 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b4 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b5 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b6 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b7 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b8 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b9 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b10 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b11 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b12 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b13 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b14 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b15 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b16 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b17 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b18 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b19 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b20 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b21 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b22 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b23 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b24 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b25 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b26 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b27 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b28 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x29 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x30 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x33 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x37 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x41 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x45 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x48 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x54 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x58 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x62 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x66 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x69 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x72 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x75 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x79 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x83 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x87 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x90 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x93 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x122 = Var(within=Reals,bounds=(3.5,3.5),initialize=3.5)
m.x123 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x124 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x125 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x126 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x127 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x128 = Var(within=Reals,bounds=(4.1,4.1),initialize=4.1)
m.x129 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x130 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x131 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x132 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x133 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x134 = Var(within=Reals,bounds=(4,4),initialize=4)
m.x135 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x136 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x137 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x138 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x139 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x140 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x141 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x143 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x145 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x147 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x149 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x151 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x153 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x155 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x157 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x159 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x161 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x163 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x165 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x167 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x169 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x171 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x173 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x175 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x177 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x179 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x181 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x183 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x185 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x187 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x189 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x191 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x193 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x195 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x196 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x197 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x198 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x199 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x200 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x201 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x202 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x203 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x204 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x206 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x208 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x211 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x214 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x217 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x219 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x221 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x223 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x224 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x225 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x226 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x227 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x228 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x229 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x230 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x231 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x232 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x233 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x234 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x235 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x236 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x237 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x238 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x239 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x240 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x241 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x242 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x243 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x244 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x245 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x246 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x247 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x248 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x249 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x250 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x251 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x252 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x253 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x254 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x255 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x256 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x257 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x258 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x259 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x260 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x261 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x262 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x263 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x264 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x265 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x266 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x267 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x268 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x269 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x270 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x271 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x272 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x273 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x274 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x275 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x276 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x277 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x278 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x279 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x284 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x289 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x294 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x299 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x304 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x309 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x314 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x319 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x324 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x329 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x334 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x339 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x346 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x349 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x354 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x359 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x364 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x369 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x374 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x379 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x384 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x389 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x394 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x399 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x404 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x409 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x476 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x477 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x478 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x479 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x480 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x481 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x482 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x483 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x484 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x485 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x486 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x487 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x488 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x489 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x490 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x491 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x492 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x493 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x494 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x495 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x496 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x497 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x498 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x499 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.obj = Objective(expr= m.x278 + m.x283 + m.x288 + m.x293 + m.x298 + m.x303 + m.x308 + m.x313 + m.x318 + m.x323
+ m.x328 + m.x333 + m.x338 + m.x345 + m.x348 + m.x353 + m.x358 + m.x363 + m.x368 + m.x373
+ m.x378 + m.x383 + m.x388 + m.x393 + m.x398 + m.x403 + m.x408, sense=minimize)
m.c2 = Constraint(expr= m.x141 + 27.42831624*m.x143 + 37.5407324*m.x145 - 57.2814121*m.x147 == 0)
m.c3 = Constraint(expr= m.x149 + 27.42831624*m.x151 - 57.2814121*m.x153 + 37.5407324*m.x155 == 0)
m.c4 = Constraint(expr= m.x157 + 27.42831624*m.x159 - 57.2814121*m.x161 + 37.5407324*m.x163 == 0)
m.c5 = Constraint(expr= - 57.2814121*m.x147 + m.x165 + 27.42831624*m.x167 + 37.5407324*m.x169 == 0)
m.c6 = Constraint(expr= - 57.2814121*m.x153 + m.x171 + 37.5407324*m.x173 + 27.42831624*m.x175 == 0)
m.c7 = Constraint(expr= - 57.2814121*m.x161 + m.x177 + 37.5407324*m.x179 + 27.42831624*m.x181 == 0)
m.c8 = Constraint(expr= - 57.2814121*m.x147 + m.x183 + 37.5407324*m.x185 + 27.42831624*m.x187 == 0)
m.c9 = Constraint(expr= - 57.2814121*m.x153 + m.x189 + 27.42831624*m.x191 + 37.5407324*m.x193 == 0)
m.c10 = Constraint(expr= m.x29 + 27.42831624*m.x30 + 37.5407324*m.x31 - 57.2814121*m.x161 == 0)
m.c11 = Constraint(expr= m.x32 - 76.45219958*m.x33 + 43.14087708*m.x34 + 50.37356589*m.x35 == 0)
m.c12 = Constraint(expr= m.x36 + 50.37356589*m.x37 - 76.45219958*m.x38 + 43.14087708*m.x39 == 0)
m.c13 = Constraint(expr= m.x40 + 43.14087708*m.x41 + 50.37356589*m.x42 - 76.45219958*m.x43 == 0)
m.c14 = Constraint(expr= - 76.45219958*m.x33 + m.x44 + 43.14087708*m.x45 + 50.37356589*m.x46 == 0)
m.c15 = Constraint(expr= - 76.45219958*m.x38 + m.x47 + 50.37356589*m.x48 + 43.14087708*m.x49 == 0)
m.c16 = Constraint(expr= - 76.45219958*m.x43 + m.x50 + 43.14087708*m.x51 + 50.37356589*m.x52 == 0)
m.c17 = Constraint(expr= m.x53 + 58.31011875*m.x54 - 69.39622571*m.x55 - 25.39911174*m.x56 == 0)
m.c18 = Constraint(expr= m.x57 - 25.39911174*m.x58 + 58.31011875*m.x59 - 69.39622571*m.x60 == 0)
m.c19 = Constraint(expr= m.x61 - 69.39622571*m.x62 + 58.31011875*m.x63 - 25.39911174*m.x64 == 0)
m.c20 = Constraint(expr= - 69.39622571*m.x55 + m.x65 + 58.31011875*m.x66 - 25.39911174*m.x67 == 0)
m.c21 = Constraint(expr= - 69.39622571*m.x60 + m.x68 - 25.39911174*m.x69 + 58.31011875*m.x70 == 0)
m.c22 = Constraint(expr= - 69.39622571*m.x62 + m.x71 + 58.31011875*m.x72 - 25.39911174*m.x73 == 0)
m.c23 = Constraint(expr= m.x74 - 2.03724124*m.x75 + 63.61644904*m.x76 - 34.92732674*m.x77 == 0)
m.c24 = Constraint(expr= m.x78 - 2.03724124*m.x79 - 34.92732674*m.x80 + 63.61644904*m.x81 == 0)
m.c25 = Constraint(expr= m.x82 - 2.03724124*m.x83 - 34.92732674*m.x84 + 63.61644904*m.x85 == 0)
m.c26 = Constraint(expr= - 34.92732674*m.x77 + m.x86 + 63.61644904*m.x87 - 2.03724124*m.x88 == 0)
m.c27 = Constraint(expr= - 34.92732674*m.x80 + m.x89 + 63.61644904*m.x90 - 2.03724124*m.x91 == 0)
m.c28 = Constraint(expr= - 34.92732674*m.x84 + m.x92 - 2.03724124*m.x93 + 63.61644904*m.x94 == 0)
m.c29 = Constraint(expr= m.x95 + m.x96 + m.x97 >= 0.875)
m.c30 = Constraint(expr= - m.x98 + m.x99 == 0)
m.c31 = Constraint(expr= - m.x100 + m.x101 == 0)
m.c32 = Constraint(expr= - m.x102 + m.x103 == 0)
m.c33 = Constraint(expr= - m.x104 + m.x105 == 0)
m.c34 = Constraint(expr= - m.x106 + m.x107 == 0)
m.c35 = Constraint(expr= - m.x108 + m.x109 == 0)
m.c36 = Constraint(expr= m.x104 - m.x110 == 0)
m.c37 = Constraint(expr= m.x106 - m.x111 == 0)
m.c38 = Constraint(expr= m.x108 - m.x112 == 0)
m.c39 = Constraint(expr= - m.x113 + m.x114 == 0)
m.c40 = Constraint(expr= - m.x115 + m.x116 == 0)
m.c41 = Constraint(expr= - m.x117 + m.x118 == 0)
m.c42 = Constraint(expr= m.x119 == 0.296666667)
m.c43 = Constraint(expr= m.x120 == 0.294444444)
m.c44 = Constraint(expr= m.x121 == 0.283888889)
m.c45 = Constraint(expr= m.x95 - m.x99 == 0)
m.c46 = Constraint(expr= m.x96 - m.x101 == 0)
m.c47 = Constraint(expr= m.x97 - m.x103 == 0)
m.c48 = Constraint(expr= 3600*m.x98 - 3600*m.x105 + 1800*m.x122 - 1800*m.x123 == 0)
m.c49 = Constraint(expr= 3600*m.x100 - 3600*m.x107 + 1800*m.x124 - 1800*m.x125 == 0)
m.c50 = Constraint(expr= 3600*m.x102 - 3600*m.x109 + 1800*m.x126 - 1800*m.x127 == 0)
m.c51 = Constraint(expr= 3600*m.x110 - 3600*m.x114 + 720*m.x128 - 720*m.x129 == 0)
m.c52 = Constraint(expr= 3600*m.x111 - 3600*m.x116 + 720*m.x130 - 720*m.x131 == 0)
m.c53 = Constraint(expr= 3600*m.x112 - 3600*m.x118 + 720*m.x132 - 720*m.x133 == 0)
m.c54 = Constraint(expr= 3600*m.x113 - 3600*m.x119 + 1600*m.x134 - 1600*m.x135 == 0)
m.c55 = Constraint(expr= 3600*m.x115 - 3600*m.x120 + 1600*m.x136 - 1600*m.x137 == 0)
m.c56 = Constraint(expr= 3600*m.x117 - 3600*m.x121 + 1600*m.x138 - 1600*m.x139 == 0)
m.c57 = Constraint(expr= - m.x123 + m.x124 == 0)
m.c58 = Constraint(expr= - m.x125 + m.x126 == 0)
m.c59 = Constraint(expr= - m.x129 + m.x130 == 0)
m.c60 = Constraint(expr= - m.x131 + m.x132 == 0)
m.c61 = Constraint(expr= - m.x135 + m.x136 == 0)
m.c62 = Constraint(expr= - m.x137 + m.x138 == 0)
m.c63 = Constraint(expr= - 0.2*m.b2 + m.x140 >= 0)
m.c64 = Constraint(expr= - 0.2*m.b3 + m.x142 >= 0)
m.c65 = Constraint(expr= - 0.2*m.b4 + m.x144 >= 0)
m.c66 = Constraint(expr= - 0.2*m.b5 + m.x146 >= 0)
m.c67 = Constraint(expr= - 0.2*m.b6 + m.x148 >= 0)
m.c68 = Constraint(expr= - 0.2*m.b7 + m.x150 >= 0)
m.c69 = Constraint(expr= - 0.2*m.b8 + m.x152 >= 0)
m.c70 = Constraint(expr= - 0.2*m.b9 + m.x154 >= 0)
m.c71 = Constraint(expr= - 0.2*m.b10 + m.x156 >= 0)
m.c72 = Constraint(expr= - 0.25*m.b11 + m.x158 >= 0)
m.c73 = Constraint(expr= - 0.25*m.b12 + m.x160 >= 0)
m.c74 = Constraint(expr= - 0.25*m.b13 + m.x162 >= 0)
m.c75 = Constraint(expr= - 0.25*m.b14 + m.x164 >= 0)
m.c76 = Constraint(expr= - 0.25*m.b15 + m.x166 >= 0)
m.c77 = Constraint(expr= - 0.25*m.b16 + m.x168 >= 0)
m.c78 = Constraint(expr= - 0.4*m.b17 + m.x170 >= 0)
m.c79 = Constraint(expr= - 0.4*m.b18 + m.x172 >= 0)
m.c80 = Constraint(expr= - 0.4*m.b19 + m.x174 >= 0)
m.c81 = Constraint(expr= - 0.4*m.b20 + m.x176 >= 0)
m.c82 = Constraint(expr= - 0.4*m.b21 + m.x178 >= 0)
m.c83 = Constraint(expr= - 0.4*m.b22 + m.x180 >= 0)
m.c84 = Constraint(expr= - 0.24*m.b23 + m.x182 >= 0)
m.c85 = Constraint(expr= - 0.24*m.b24 + m.x184 >= 0)
m.c86 = Constraint(expr= - 0.24*m.b25 + m.x186 >= 0)
m.c87 = Constraint(expr= - 0.24*m.b26 + m.x188 >= 0)
m.c88 = Constraint(expr= - 0.24*m.b27 + m.x190 >= 0)
m.c89 = Constraint(expr= - 0.24*m.b28 + m.x192 >= 0)
m.c90 = Constraint(expr= - 0.8*m.b2 + m.x140 <= 0)
m.c91 = Constraint(expr= - 0.8*m.b3 + m.x142 <= 0)
m.c92 = Constraint(expr= - 0.8*m.b4 + m.x144 <= 0)
m.c93 = Constraint(expr= - 0.8*m.b5 + m.x146 <= 0)
m.c94 = Constraint(expr= - 0.8*m.b6 + m.x148 <= 0)
m.c95 = Constraint(expr= - 0.8*m.b7 + m.x150 <= 0)
m.c96 = Constraint(expr= - 0.8*m.b8 + m.x152 <= 0)
m.c97 = Constraint(expr= - 0.8*m.b9 + m.x154 <= 0)
m.c98 = Constraint(expr= - 0.8*m.b10 + m.x156 <= 0)
m.c99 = Constraint(expr= - 0.5*m.b11 + m.x158 <= 0)
m.c100 = Constraint(expr= - 0.5*m.b12 + m.x160 <= 0)
m.c101 = Constraint(expr= - 0.5*m.b13 + m.x162 <= 0)
m.c102 = Constraint(expr= - 0.5*m.b14 + m.x164 <= 0)
m.c103 = Constraint(expr= - 0.5*m.b15 + m.x166 <= 0)
m.c104 = Constraint(expr= - 0.5*m.b16 + m.x168 <= 0)
m.c105 = Constraint(expr= - 0.7*m.b17 + m.x170 <= 0)
m.c106 = Constraint(expr= - 0.7*m.b18 + m.x172 <= 0)
m.c107 = Constraint(expr= - 0.7*m.b19 + m.x174 <= 0)
m.c108 = Constraint(expr= - 0.7*m.b20 + m.x176 <= 0)
m.c109 = Constraint(expr= - 0.7*m.b21 + m.x178 <= 0)
m.c110 = Constraint(expr= - 0.7*m.b22 + m.x180 <= 0)
m.c111 = Constraint(expr= - 0.58*m.b23 + m.x182 <= 0)
m.c112 = Constraint(expr= - 0.58*m.b24 + m.x184 <= 0)
m.c113 = Constraint(expr= - 0.58*m.b25 + m.x186 <= 0)
m.c114 = Constraint(expr= - 0.58*m.b26 + m.x188 <= 0)
m.c115 = Constraint(expr= - 0.58*m.b27 + m.x190 <= 0)
m.c116 = Constraint(expr= - 0.58*m.b28 + m.x192 <= 0)
m.c117 = Constraint(expr= - m.x122 + m.x194 == 60)
m.c118 = Constraint(expr= - m.x124 + m.x195 == 60)
m.c119 = Constraint(expr= - m.x126 + m.x196 == 60)
m.c120 = Constraint(expr= - m.x128 + m.x197 == 90)
m.c121 = Constraint(expr= - m.x130 + m.x198 == 90)
m.c122 = Constraint(expr= - m.x132 + m.x199 == 90)
m.c123 = Constraint(expr= - m.x134 + m.x200 == 103)
m.c124 = Constraint(expr= - m.x136 + m.x201 == 103)
m.c125 = Constraint(expr= - m.x138 + m.x202 == 103)
m.c126 = Constraint(expr= - m.x194 + m.x203 - m.x204 == 0)
m.c127 = Constraint(expr= - m.x195 + m.x205 - m.x206 == 0)
m.c128 = Constraint(expr= - m.x196 + m.x207 - m.x208 == 0)
m.c129 = Constraint(expr= m.x209 - m.x210 - m.x211 == 0)
m.c130 = Constraint(expr= m.x212 - m.x213 - m.x214 == 0)
m.c131 = Constraint(expr= m.x215 - m.x216 - m.x217 == 0)
m.c132 = Constraint(expr= - m.x200 + m.x218 - m.x219 == 0)
m.c133 = Constraint(expr= - m.x201 + m.x220 - m.x221 == 0)
m.c134 = Constraint(expr= - m.x202 + m.x222 - m.x223 == 0)
m.c135 = Constraint(expr= m.x203 - m.x224 - m.x225 == 0)
m.c136 = Constraint(expr= m.x205 - m.x226 - m.x227 == 0)
m.c137 = Constraint(expr= m.x207 - m.x228 - m.x229 == 0)
m.c138 = Constraint(expr= - m.x194 + m.x209 - m.x230 == 0)
m.c139 = Constraint(expr= - m.x195 + m.x212 - m.x231 == 0)
m.c140 = Constraint(expr= - m.x196 + m.x215 - m.x232 == 0)
m.c141 = Constraint(expr= - m.x197 + m.x218 - m.x233 == 0)
m.c142 = Constraint(expr= - m.x198 + m.x220 - m.x234 == 0)
m.c143 = Constraint(expr= - m.x199 + m.x222 - m.x235 == 0)
m.c144 = Constraint(expr= 0.2*m.b2 - m.x140 + m.x236 <= 0.2)
m.c145 = Constraint(expr= 0.2*m.b3 - m.x142 + m.x237 <= 0.2)
m.c146 = Constraint(expr= 0.2*m.b4 - m.x144 + m.x238 <= 0.2)
m.c147 = Constraint(expr= 0.2*m.b5 - m.x146 + m.x239 <= 0.2)
m.c148 = Constraint(expr= 0.2*m.b6 - m.x148 + m.x240 <= 0.2)
m.c149 = Constraint(expr= 0.2*m.b7 - m.x150 + m.x241 <= 0.2)
m.c150 = Constraint(expr= 0.2*m.b8 - m.x152 + m.x242 <= 0.2)
m.c151 = Constraint(expr= 0.2*m.b9 - m.x154 + m.x243 <= 0.2)
m.c152 = Constraint(expr= 0.2*m.b10 - m.x156 + m.x244 <= 0.2)
m.c153 = Constraint(expr= 0.25*m.b11 - m.x158 + m.x245 <= 0.25)
m.c154 = Constraint(expr= 0.25*m.b12 - m.x160 + m.x246 <= 0.25)
m.c155 = Constraint(expr= 0.25*m.b13 - m.x162 + m.x247 <= 0.25)
m.c156 = Constraint(expr= 0.25*m.b14 - m.x164 + m.x248 <= 0.25)
m.c157 = Constraint(expr= 0.25*m.b15 - m.x166 + m.x249 <= 0.25)
m.c158 = Constraint(expr= 0.25*m.b16 - m.x168 + m.x250 <= 0.25)
m.c159 = Constraint(expr= 0.4*m.b17 - m.x170 + m.x251 <= 0.4)
m.c160 = Constraint(expr= 0.4*m.b18 - m.x172 + m.x252 <= 0.4)
m.c161 = Constraint(expr= 0.4*m.b19 - m.x174 + m.x253 <= 0.4)
m.c162 = Constraint(expr= 0.4*m.b20 - m.x176 + m.x254 <= 0.4)
m.c163 = Constraint(expr= 0.4*m.b21 - m.x178 + m.x255 <= 0.4)
m.c164 = Constraint(expr= 0.4*m.b22 - m.x180 + m.x256 <= 0.4)
m.c165 = Constraint(expr= 0.24*m.b23 - m.x182 + m.x257 <= 0.24)
m.c166 = Constraint(expr= 0.24*m.b24 - m.x184 + m.x258 <= 0.24)
m.c167 = Constraint(expr= 0.24*m.b25 - m.x186 + m.x259 <= 0.24)
m.c168 = Constraint(expr= 0.24*m.b26 - m.x188 + m.x260 <= 0.24)
m.c169 = Constraint(expr= 0.24*m.b27 - m.x190 + m.x261 <= 0.24)
m.c170 = Constraint(expr= 0.24*m.b28 - m.x192 + m.x262 <= 0.24)
m.c171 = Constraint(expr= - m.x140 + m.x236 >= 0)
m.c172 = Constraint(expr= - m.x142 + m.x237 >= 0)
m.c173 = Constraint(expr= - m.x144 + m.x238 >= 0)
m.c174 = Constraint(expr= - m.x146 + m.x239 >= 0)
m.c175 = Constraint(expr= - m.x148 + m.x240 >= 0)
m.c176 = Constraint(expr= - m.x150 + m.x241 >= 0)
m.c177 = Constraint(expr= - m.x152 + m.x242 >= 0)
m.c178 = Constraint(expr= - m.x154 + m.x243 >= 0)
m.c179 = Constraint(expr= - m.x156 + m.x244 >= 0)
m.c180 = Constraint(expr= - m.x158 + m.x245 >= 0)
m.c181 = Constraint(expr= - m.x160 + m.x246 >= 0)
m.c182 = Constraint(expr= - m.x162 + m.x247 >= 0)
m.c183 = Constraint(expr= - m.x164 + m.x248 >= 0)
m.c184 = Constraint(expr= - m.x166 + m.x249 >= 0)
m.c185 = Constraint(expr= - m.x168 + m.x250 >= 0)
m.c186 = Constraint(expr= - m.x170 + m.x251 >= 0)
m.c187 = Constraint(expr= - m.x172 + m.x252 >= 0)
m.c188 = Constraint(expr= - m.x174 + m.x253 >= 0)
m.c189 = Constraint(expr= - m.x176 + m.x254 >= 0)
m.c190 = Constraint(expr= - m.x178 + m.x255 >= 0)
m.c191 = Constraint(expr= - m.x180 + m.x256 >= 0)
m.c192 = Constraint(expr= - m.x182 + m.x257 >= 0)
m.c193 = Constraint(expr= - m.x184 + m.x258 >= 0)
m.c194 = Constraint(expr= - m.x186 + m.x259 >= 0)
m.c195 = Constraint(expr= - m.x188 + m.x260 >= 0)
m.c196 = Constraint(expr= - m.x190 + m.x261 >= 0)
m.c197 = Constraint(expr= - m.x192 + m.x262 >= 0)
m.c198 = Constraint(expr= - 0.6*m.b2 + m.x236 <= 0.2)
m.c199 = Constraint(expr= - 0.6*m.b3 + m.x237 <= 0.2)
m.c200 = Constraint(expr= - 0.6*m.b4 + m.x238 <= 0.2)
m.c201 = Constraint(expr= - 0.6*m.b5 + m.x239 <= 0.2)
m.c202 = Constraint(expr= - 0.6*m.b6 + m.x240 <= 0.2)
m.c203 = Constraint(expr= - 0.6*m.b7 + m.x241 <= 0.2)
m.c204 = Constraint(expr= - 0.6*m.b8 + m.x242 <= 0.2)
m.c205 = Constraint(expr= - 0.6*m.b9 + m.x243 <= 0.2)
m.c206 = Constraint(expr= - 0.6*m.b10 + m.x244 <= 0.2)
m.c207 = Constraint(expr= - 0.25*m.b11 + m.x245 <= 0.25)
m.c208 = Constraint(expr= - 0.25*m.b12 + m.x246 <= 0.25)
m.c209 = Constraint(expr= - 0.25*m.b13 + m.x247 <= 0.25)
m.c210 = Constraint(expr= - 0.25*m.b14 + m.x248 <= 0.25)
m.c211 = Constraint(expr= - 0.25*m.b15 + m.x249 <= 0.25)
m.c212 = Constraint(expr= - 0.25*m.b16 + m.x250 <= 0.25)
m.c213 = Constraint(expr= - 0.3*m.b17 + m.x251 <= 0.4)
m.c214 = Constraint(expr= - 0.3*m.b18 + m.x252 <= 0.4)
m.c215 = Constraint(expr= - 0.3*m.b19 + m.x253 <= 0.4)
m.c216 = Constraint(expr= - 0.3*m.b20 + m.x254 <= 0.4)
m.c217 = Constraint(expr= - 0.3*m.b21 + m.x255 <= 0.4)
m.c218 = Constraint(expr= - 0.3*m.b22 + m.x256 <= 0.4)
m.c219 = Constraint(expr= - 0.34*m.b23 + m.x257 <= 0.24)
m.c220 = Constraint(expr= - 0.34*m.b24 + m.x258 <= 0.24)
m.c221 = Constraint(expr= - 0.34*m.b25 + m.x259 <= 0.24)
m.c222 = Constraint(expr= - 0.34*m.b26 + m.x260 <= 0.24)
m.c223 = Constraint(expr= - 0.34*m.b27 + m.x261 <= 0.24)
m.c224 = Constraint(expr= - 0.34*m.b28 + m.x262 <= 0.24)
m.c225 = Constraint(expr= - 0.4*m.b2 + m.x263 <= 0.6)
m.c226 = Constraint(expr= - 0.4*m.b3 + m.x264 <= 0.6)
m.c227 = Constraint(expr= - 0.4*m.b4 + m.x265 <= 0.6)
m.c228 = Constraint(expr= - 0.2*m.b11 + m.x266 <= 0.8)
m.c229 = Constraint(expr= - 0.2*m.b12 + m.x267 <= 0.8)
m.c230 = Constraint(expr= - 0.2*m.b13 + m.x268 <= 0.8)
m.c231 = Constraint(expr= - 0.15*m.b17 + m.x269 <= 0.85)
m.c232 = Constraint(expr= - 0.15*m.b18 + m.x270 <= 0.85)
m.c233 = Constraint(expr= - 0.15*m.b19 + m.x271 <= 0.85)
m.c234 = Constraint(expr= - 0.3*m.b23 + m.x272 <= 0.7)
m.c235 = Constraint(expr= - 0.3*m.b24 + m.x273 <= 0.7)
m.c236 = Constraint(expr= - 0.3*m.b25 + m.x274 <= 0.7)
m.c237 = Constraint(expr= m.b2 - m.b5 >= 0)
m.c238 = Constraint(expr= m.b3 - m.b6 >= 0)
m.c239 = Constraint(expr= m.b4 - m.b7 >= 0)
m.c240 = Constraint(expr= m.b5 - m.b8 >= 0)
m.c241 = Constraint(expr= m.b6 - m.b9 >= 0)
m.c242 = Constraint(expr= m.b7 - m.b10 >= 0)
m.c243 = Constraint(expr= m.b11 - m.b14 >= 0)
m.c244 = Constraint(expr= m.b12 - m.b15 >= 0)
m.c245 = Constraint(expr= m.b13 - m.b16 >= 0)
m.c246 = Constraint(expr= m.b17 - m.b20 >= 0)
m.c247 = Constraint(expr= m.b18 - m.b21 >= 0)
m.c248 = Constraint(expr= m.b19 - m.b22 >= 0)
m.c249 = Constraint(expr= m.b23 - m.b26 >= 0)
m.c250 = Constraint(expr= m.b24 - m.b27 >= 0)
m.c251 = Constraint(expr= m.b25 - m.b28 >= 0)
m.c252 = Constraint(expr= m.x99 - m.x140 - m.x146 - m.x152 == 0)
m.c253 = Constraint(expr= m.x101 - m.x142 - m.x148 - m.x154 == 0)
m.c254 = Constraint(expr= m.x103 - m.x144 - m.x150 - m.x156 == 0)
m.c255 = Constraint(expr= m.x105 - m.x158 - m.x164 - m.x170 - m.x176 == 0)
m.c256 = Constraint(expr= m.x107 - m.x160 - m.x166 - m.x172 - m.x178 == 0)
m.c257 = Constraint(expr= m.x109 - m.x162 - m.x168 - m.x174 - m.x180 == 0)
m.c258 = Constraint(expr= m.x114 - m.x182 - m.x188 == 0)
m.c259 = Constraint(expr= m.x116 - m.x184 - m.x190 == 0)
m.c260 = Constraint(expr= m.x118 - m.x186 - m.x192 == 0)
m.c261 = Constraint(expr= - 2000*m.b2 + m.x141 - m.x225 >= -2000)
m.c262 = Constraint(expr= - 2000*m.b3 + m.x149 - m.x227 >= -2000)
m.c263 = Constraint(expr= - 2000*m.b4 + m.x157 - m.x229 >= -2000)
m.c264 = Constraint(expr= - 2000*m.b5 + m.x165 - m.x225 >= -2000)
m.c265 = Constraint(expr= - 2000*m.b6 + m.x171 - m.x227 >= -2000)
m.c266 = Constraint(expr= - 2000*m.b7 + m.x177 - m.x229 >= -2000)
m.c267 = Constraint(expr= - 2000*m.b8 + m.x183 - m.x225 >= -2000)
m.c268 = Constraint(expr= - 2000*m.b9 + m.x189 - m.x227 >= -2000)
m.c269 = Constraint(expr= - 2000*m.b10 + m.x29 - m.x229 >= -2000)
m.c270 = Constraint(expr= - 2000*m.b11 + m.x32 - m.x230 >= -2000)
m.c271 = Constraint(expr= - 2000*m.b12 + m.x36 - m.x231 >= -2000)
m.c272 = Constraint(expr= - 2000*m.b13 + m.x40 - m.x232 >= -2000)
m.c273 = Constraint(expr= - 2000*m.b14 + m.x44 - m.x230 >= -2000)
m.c274 = Constraint(expr= - 2000*m.b15 + m.x47 - m.x231 >= -2000)
m.c275 = Constraint(expr= - 2000*m.b16 + m.x50 - m.x232 >= -2000)
m.c276 = Constraint(expr= - 2000*m.b17 + m.x53 - m.x230 >= -2000)
m.c277 = Constraint(expr= - 2000*m.b18 + m.x57 - m.x231 >= -2000)
m.c278 = Constraint(expr= - 2000*m.b19 + m.x61 - m.x232 >= -2000)
m.c279 = Constraint(expr= - 2000*m.b20 + m.x65 - m.x230 >= -2000)
m.c280 = Constraint(expr= - 2000*m.b21 + m.x68 - m.x231 >= -2000)
m.c281 = Constraint(expr= - 2000*m.b22 + m.x71 - m.x232 >= -2000)
m.c282 = Constraint(expr= - 2000*m.b23 + m.x74 - m.x233 >= -2000)
m.c283 = Constraint(expr= - 2000*m.b24 + m.x78 - m.x234 >= -2000)
m.c284 = Constraint(expr= - 2000*m.b25 + m.x82 - m.x235 >= -2000)
m.c285 = Constraint(expr= - 2000*m.b26 + m.x86 - m.x233 >= -2000)
m.c286 = Constraint(expr= - 2000*m.b27 + m.x89 - m.x234 >= -2000)
m.c287 = Constraint(expr= - 2000*m.b28 + m.x92 - m.x235 >= -2000)
m.c288 = Constraint(expr= 1049*m.b2 + m.x141 - m.x225 <= 1049)
m.c289 = Constraint(expr= 1049*m.b3 + m.x149 - m.x227 <= 1049)
m.c290 = Constraint(expr= 1049*m.b4 + m.x157 - m.x229 <= 1049)
m.c291 = Constraint(expr= 1049*m.b5 + m.x165 - m.x225 <= 1049)
m.c292 = Constraint(expr= 1049*m.b6 + m.x171 - m.x227 <= 1049)
m.c293 = Constraint(expr= 1049*m.b7 + m.x177 - m.x229 <= 1049)
m.c294 = Constraint(expr= 1049*m.b8 + m.x183 - m.x225 <= 1049)
m.c295 = Constraint(expr= 1049*m.b9 + m.x189 - m.x227 <= 1049)
m.c296 = Constraint(expr= 1049*m.b10 + m.x29 - m.x229 <= 1049)
m.c297 = Constraint(expr= 1065*m.b11 + m.x32 - m.x230 <= 1065)
m.c298 = Constraint(expr= 1065*m.b12 + m.x36 - m.x231 <= 1065)
m.c299 = Constraint(expr= 1065*m.b13 + m.x40 - m.x232 <= 1065)
m.c300 = Constraint(expr= 1065*m.b14 + m.x44 - m.x230 <= 1065)
m.c301 = Constraint(expr= 1065*m.b15 + m.x47 - m.x231 <= 1065)
m.c302 = Constraint(expr= 1065*m.b16 + m.x50 - m.x232 <= 1065)
m.c303 = Constraint(expr= 1065*m.b17 + m.x53 - m.x230 <= 1065)
m.c304 = Constraint(expr= 1065*m.b18 + m.x57 - m.x231 <= 1065)
m.c305 = Constraint(expr= 1065*m.b19 + m.x61 - m.x232 <= 1065)
m.c306 = Constraint(expr= 1065*m.b20 + m.x65 - m.x230 <= 1065)
m.c307 = Constraint(expr= 1065*m.b21 + m.x68 - m.x231 <= 1065)
m.c308 = Constraint(expr= 1065*m.b22 + m.x71 - m.x232 <= 1065)
m.c309 = Constraint(expr= 1095*m.b23 + m.x74 - m.x233 <= 1095)
m.c310 = Constraint(expr= 1095*m.b24 + m.x78 - m.x234 <= 1095)
m.c311 = Constraint(expr= 1095*m.b25 + m.x82 - m.x235 <= 1095)
m.c312 = Constraint(expr= 1095*m.b26 + m.x86 - m.x233 <= 1095)
m.c313 = Constraint(expr= 1095*m.b27 + m.x89 - m.x234 <= 1095)
m.c314 = Constraint(expr= 1095*m.b28 + m.x92 - m.x235 <= 1095)
m.c315 = Constraint(expr= - m.x197 + m.x210 >= 0)
m.c316 = Constraint(expr= - m.x198 + m.x213 >= 0)
m.c317 = Constraint(expr= - m.x199 + m.x216 >= 0)
m.c318 = Constraint(expr= m.x200 - m.x275 >= 0)
m.c319 = Constraint(expr= m.x201 - m.x276 >= 0)
m.c320 = Constraint(expr= m.x202 - m.x277 >= 0)
m.c321 = Constraint(expr= - 0.309838295393634*m.x278 + 13.94696158*m.x279 + 24.46510819*m.x280 - 7.28623839*m.x281
- 23.57687014*m.x282 <= 0)
m.c322 = Constraint(expr= - 0.309838295393634*m.x283 + 13.94696158*m.x284 + 24.46510819*m.x285 - 7.28623839*m.x286
- 23.57687014*m.x287 <= 0)
m.c323 = Constraint(expr= - 0.309838295393634*m.x288 + 13.94696158*m.x289 + 24.46510819*m.x290 - 7.28623839*m.x291
- 23.57687014*m.x292 <= 0)
m.c324 = Constraint(expr= - 0.309838295393634*m.x293 + 13.94696158*m.x294 + 24.46510819*m.x295 - 7.28623839*m.x296
- 23.57687014*m.x297 <= 0)
m.c325 = Constraint(expr= - 0.309838295393634*m.x298 + 13.94696158*m.x299 + 24.46510819*m.x300 - 7.28623839*m.x301
- 23.57687014*m.x302 <= 0)
m.c326 = Constraint(expr= - 0.309838295393634*m.x303 + 13.94696158*m.x304 + 24.46510819*m.x305 - 7.28623839*m.x306
- 23.57687014*m.x307 <= 0)
m.c327 = Constraint(expr= - 0.309838295393634*m.x308 + 13.94696158*m.x309 + 24.46510819*m.x310 - 7.28623839*m.x311
- 23.57687014*m.x312 <= 0)
m.c328 = Constraint(expr= - 0.309838295393634*m.x313 + 13.94696158*m.x314 + 24.46510819*m.x315 - 7.28623839*m.x316
- 23.57687014*m.x317 <= 0)
m.c329 = Constraint(expr= - 0.309838295393634*m.x318 + 13.94696158*m.x319 + 24.46510819*m.x320 - 7.28623839*m.x321
- 23.57687014*m.x322 <= 0)
m.c330 = Constraint(expr= - 0.309838295393634*m.x323 + 29.29404529*m.x324 - 108.39408287*m.x325 + 442.21990639*m.x326
- 454.58448169*m.x327 <= 0)
m.c331 = Constraint(expr= - 0.309838295393634*m.x328 + 29.29404529*m.x329 - 108.39408287*m.x330 + 442.21990639*m.x331
- 454.58448169*m.x332 <= 0)
m.c332 = Constraint(expr= - 0.309838295393634*m.x333 + 29.29404529*m.x334 - 108.39408287*m.x335 + 442.21990639*m.x336
- 454.58448169*m.x337 <= 0)
m.c333 = Constraint(expr= - 0.309838295393634*m.x338 + 29.29404529*m.x339 - 108.39408287*m.x340 + 442.21990639*m.x341
- 454.58448169*m.x342 <= 0)
m.c334 = Constraint(expr= 442.21990639*m.x343 - 454.58448169*m.x344 - 0.309838295393634*m.x345 + 29.29404529*m.x346
- 108.39408287*m.x347 <= 0)
m.c335 = Constraint(expr= - 0.309838295393634*m.x348 + 29.29404529*m.x349 - 108.39408287*m.x350 + 442.21990639*m.x351
- 454.58448169*m.x352 <= 0)
m.c336 = Constraint(expr= - 0.309838295393634*m.x353 + 25.92674585*m.x354 + 18.13482123*m.x355 + 22.12766012*m.x356
- 42.68950769*m.x357 <= 0)
m.c337 = Constraint(expr= - 0.309838295393634*m.x358 + 25.92674585*m.x359 + 18.13482123*m.x360 + 22.12766012*m.x361
- 42.68950769*m.x362 <= 0)
m.c338 = Constraint(expr= - 0.309838295393634*m.x363 + 25.92674585*m.x364 + 18.13482123*m.x365 + 22.12766012*m.x366
- 42.68950769*m.x367 <= 0)
m.c339 = Constraint(expr= - 0.309838295393634*m.x368 + 25.92674585*m.x369 + 18.13482123*m.x370 + 22.12766012*m.x371
- 42.68950769*m.x372 <= 0)
m.c340 = Constraint(expr= - 0.309838295393634*m.x373 + 25.92674585*m.x374 + 18.13482123*m.x375 + 22.12766012*m.x376
- 42.68950769*m.x377 <= 0)
m.c341 = Constraint(expr= - 0.309838295393634*m.x378 + 25.92674585*m.x379 + 18.13482123*m.x380 + 22.12766012*m.x381
- 42.68950769*m.x382 <= 0)
m.c342 = Constraint(expr= - 0.309838295393634*m.x383 + 17.4714791*m.x384 - 39.98407808*m.x385 + 134.55943082*m.x386
- 135.88441782*m.x387 <= 0)
m.c343 = Constraint(expr= - 0.309838295393634*m.x388 + 17.4714791*m.x389 - 39.98407808*m.x390 + 134.55943082*m.x391
- 135.88441782*m.x392 <= 0)
m.c344 = Constraint(expr= - 0.309838295393634*m.x393 + 17.4714791*m.x394 - 39.98407808*m.x395 + 134.55943082*m.x396
- 135.88441782*m.x397 <= 0)
m.c345 = Constraint(expr= - 0.309838295393634*m.x398 + 17.4714791*m.x399 - 39.98407808*m.x400 + 134.55943082*m.x401
- 135.88441782*m.x402 <= 0)
m.c346 = Constraint(expr= - 0.309838295393634*m.x403 + 17.4714791*m.x404 - 39.98407808*m.x405 + 134.55943082*m.x406
- 135.88441782*m.x407 <= 0)
m.c347 = Constraint(expr= - 0.309838295393634*m.x408 + 17.4714791*m.x409 - 39.98407808*m.x410 + 134.55943082*m.x411
- 135.88441782*m.x412 <= 0)
m.c348 = Constraint(expr=m.x98**2 - m.x413 == 0)
m.c349 = Constraint(expr= m.x204 - 5*m.x413 == 0)
m.c350 = Constraint(expr=m.x100**2 - m.x414 == 0)
m.c351 = Constraint(expr= m.x206 - 5*m.x414 == 0)
m.c352 = Constraint(expr=m.x102**2 - m.x415 == 0)
m.c353 = Constraint(expr= m.x208 - 5*m.x415 == 0)
m.c354 = Constraint(expr=m.x104**2 - m.x416 == 0)
m.c355 = Constraint(expr= m.x211 - 4*m.x416 == 0)
m.c356 = Constraint(expr=m.x106**2 - m.x417 == 0)
m.c357 = Constraint(expr= m.x214 - 4*m.x417 == 0)
m.c358 = Constraint(expr=m.x108**2 - m.x418 == 0)
m.c359 = Constraint(expr= m.x217 - 4*m.x418 == 0)
m.c360 = Constraint(expr=m.x113**2 - m.x419 == 0)
m.c361 = Constraint(expr= m.x219 - 5*m.x419 == 0)
m.c362 = Constraint(expr=m.x115**2 - m.x420 == 0)
m.c363 = Constraint(expr= m.x221 - 5*m.x420 == 0)
m.c364 = Constraint(expr=m.x117**2 - m.x421 == 0)
m.c365 = Constraint(expr= m.x223 - 5*m.x421 == 0)
m.c366 = Constraint(expr=m.x140**2 - m.x422 == 0)
m.c367 = Constraint(expr= m.x143 - m.x422 == 0)
m.c368 = Constraint(expr=m.x140**3 - m.x423 == 0)
m.c369 = Constraint(expr= m.x282 - m.x423 == 0)
m.c370 = Constraint(expr=m.x142**2 - m.x424 == 0)
m.c371 = Constraint(expr= m.x151 - m.x424 == 0)
m.c372 = Constraint(expr=m.x142**3 - m.x425 == 0)
m.c373 = Constraint(expr= m.x287 - m.x425 == 0)
m.c374 = Constraint(expr=m.x144**2 - m.x426 == 0)
m.c375 = Constraint(expr= m.x159 - m.x426 == 0)
m.c376 = Constraint(expr=m.x144**3 - m.x427 == 0)
m.c377 = Constraint(expr= m.x292 - m.x427 == 0)
m.c378 = Constraint(expr=m.x146**2 - m.x428 == 0)
m.c379 = Constraint(expr= m.x167 - m.x428 == 0)
m.c380 = Constraint(expr=m.x146**3 - m.x429 == 0)
m.c381 = Constraint(expr= m.x297 - m.x429 == 0)
m.c382 = Constraint(expr=m.x148**2 - m.x430 == 0)
m.c383 = Constraint(expr= m.x175 - m.x430 == 0)
m.c384 = Constraint(expr=m.x148**3 - m.x431 == 0)
m.c385 = Constraint(expr= m.x302 - m.x431 == 0)
m.c386 = Constraint(expr=m.x150**2 - m.x432 == 0)
m.c387 = Constraint(expr= m.x181 - m.x432 == 0)
m.c388 = Constraint(expr=m.x150**3 - m.x433 == 0)
m.c389 = Constraint(expr= m.x307 - m.x433 == 0)
m.c390 = Constraint(expr=m.x152**2 - m.x434 == 0)
m.c391 = Constraint(expr= m.x187 - m.x434 == 0)
m.c392 = Constraint(expr=m.x152**3 - m.x435 == 0)
m.c393 = Constraint(expr= m.x312 - m.x435 == 0)
m.c394 = Constraint(expr=m.x154**2 - m.x436 == 0)
m.c395 = Constraint(expr= m.x191 - m.x436 == 0)
m.c396 = Constraint(expr=m.x154**3 - m.x437 == 0)
m.c397 = Constraint(expr= m.x317 - m.x437 == 0)
m.c398 = Constraint(expr=m.x156**2 - m.x438 == 0)
m.c399 = Constraint(expr= m.x30 - m.x438 == 0)
m.c400 = Constraint(expr=m.x156**3 - m.x439 == 0)
m.c401 = Constraint(expr= m.x322 - m.x439 == 0)
m.c402 = Constraint(expr=m.x158**2 - m.x440 == 0)
m.c403 = Constraint(expr= m.x35 - m.x440 == 0)
m.c404 = Constraint(expr=m.x158**3 - m.x441 == 0)
m.c405 = Constraint(expr= m.x327 - m.x441 == 0)
m.c406 = Constraint(expr=m.x160**2 - m.x442 == 0)
m.c407 = Constraint(expr= m.x37 - m.x442 == 0)
m.c408 = Constraint(expr=m.x160**3 - m.x443 == 0)
m.c409 = Constraint(expr= m.x332 - m.x443 == 0)
m.c410 = Constraint(expr=m.x162**2 - m.x444 == 0)
m.c411 = Constraint(expr= m.x42 - m.x444 == 0)
m.c412 = Constraint(expr=m.x162**3 - m.x445 == 0)
m.c413 = Constraint(expr= m.x337 - m.x445 == 0)
m.c414 = Constraint(expr=m.x164**2 - m.x446 == 0)
m.c415 = Constraint(expr= m.x46 - m.x446 == 0)
m.c416 = Constraint(expr=m.x164**3 - m.x447 == 0)
m.c417 = Constraint(expr= m.x342 - m.x447 == 0)
m.c418 = Constraint(expr=m.x166**2 - m.x448 == 0)
m.c419 = Constraint(expr= m.x48 - m.x448 == 0)
m.c420 = Constraint(expr=m.x166**3 - m.x449 == 0)
m.c421 = Constraint(expr= m.x344 - m.x449 == 0)
m.c422 = Constraint(expr=m.x168**2 - m.x450 == 0)
m.c423 = Constraint(expr= m.x52 - m.x450 == 0)
m.c424 = Constraint(expr=m.x168**3 - m.x451 == 0)
m.c425 = Constraint(expr= m.x352 - m.x451 == 0)
m.c426 = Constraint(expr=m.x170**2 - m.x452 == 0)
m.c427 = Constraint(expr= m.x56 - m.x452 == 0)
m.c428 = Constraint(expr=m.x170**3 - m.x453 == 0)
m.c429 = Constraint(expr= m.x357 - m.x453 == 0)
m.c430 = Constraint(expr=m.x172**2 - m.x454 == 0)
m.c431 = Constraint(expr= m.x58 - m.x454 == 0)
m.c432 = Constraint(expr=m.x172**3 - m.x455 == 0)
m.c433 = Constraint(expr= m.x362 - m.x455 == 0)
m.c434 = Constraint(expr=m.x174**2 - m.x456 == 0)
m.c435 = Constraint(expr= m.x64 - m.x456 == 0)
m.c436 = Constraint(expr=m.x174**3 - m.x457 == 0)
m.c437 = Constraint(expr= m.x367 - m.x457 == 0)
m.c438 = Constraint(expr=m.x176**2 - m.x458 == 0)
m.c439 = Constraint(expr= m.x67 - m.x458 == 0)
m.c440 = Constraint(expr=m.x176**3 - m.x459 == 0)
m.c441 = Constraint(expr= m.x372 - m.x459 == 0)
m.c442 = Constraint(expr=m.x178**2 - m.x460 == 0)
m.c443 = Constraint(expr= m.x69 - m.x460 == 0)
m.c444 = Constraint(expr=m.x178**3 - m.x461 == 0)
m.c445 = Constraint(expr= m.x377 - m.x461 == 0)
m.c446 = Constraint(expr=m.x180**2 - m.x462 == 0)
m.c447 = Constraint(expr= m.x73 - m.x462 == 0)
m.c448 = Constraint(expr=m.x180**3 - m.x463 == 0)
m.c449 = Constraint(expr= m.x382 - m.x463 == 0)
m.c450 = Constraint(expr=m.x182**2 - m.x464 == 0)
m.c451 = Constraint(expr= m.x76 - m.x464 == 0)
m.c452 = Constraint(expr=m.x182**3 - m.x465 == 0)
m.c453 = Constraint(expr= m.x387 - m.x465 == 0)
m.c454 = Constraint(expr=m.x184**2 - m.x466 == 0)
m.c455 = Constraint(expr= m.x81 - m.x466 == 0)
m.c456 = Constraint(expr=m.x184**3 - m.x467 == 0)
m.c457 = Constraint(expr= m.x392 - m.x467 == 0)
m.c458 = Constraint(expr=m.x186**2 - m.x468 == 0)
m.c459 = Constraint(expr= m.x85 - m.x468 == 0)
m.c460 = Constraint(expr=m.x186**3 - m.x469 == 0)
m.c461 = Constraint(expr= m.x397 - m.x469 == 0)
m.c462 = Constraint(expr=m.x188**2 - m.x470 == 0)
m.c463 = Constraint(expr= m.x87 - m.x470 == 0)
m.c464 = Constraint(expr=m.x188**3 - m.x471 == 0)
m.c465 = Constraint(expr= m.x402 - m.x471 == 0)
m.c466 = Constraint(expr=m.x190**2 - m.x472 == 0)
m.c467 = Constraint(expr= m.x90 - m.x472 == 0)
m.c468 = Constraint(expr=m.x190**3 - m.x473 == 0)
m.c469 = Constraint(expr= m.x407 - m.x473 == 0)
m.c470 = Constraint(expr=m.x192**2 - m.x474 == 0)
m.c471 = Constraint(expr= m.x94 - m.x474 == 0)
m.c472 = Constraint(expr=m.x192**3 - m.x475 == 0)
m.c473 = Constraint(expr= m.x412 - m.x475 == 0)
m.c474 = Constraint(expr=m.x140*m.x263 - m.x145 == 0)
m.c475 = Constraint(expr=m.x263*m.x422 - m.x281 == 0)
m.c476 = Constraint(expr=m.x146*m.x263 - m.x169 == 0)
m.c477 = Constraint(expr=m.x263*m.x428 - m.x296 == 0)
m.c478 = Constraint(expr=m.x152*m.x263 - m.x185 == 0)
m.c479 = Constraint(expr=m.x263*m.x434 - m.x311 == 0)
m.c480 = Constraint(expr=m.x263**2 - m.x476 == 0)
m.c481 = Constraint(expr= m.x147 - m.x476 == 0)
m.c482 = Constraint(expr=m.x140*m.x476 - m.x280 == 0)
m.c483 = Constraint(expr=m.x146*m.x476 - m.x295 == 0)
m.c484 = Constraint(expr=m.x152*m.x476 - m.x310 == 0)
m.c485 = Constraint(expr=m.x263**3 - m.x477 == 0)
m.c486 = Constraint(expr=m.b2*m.x477 - m.x279 == 0)
m.c487 = Constraint(expr=m.b5*m.x477 - m.x294 == 0)
m.c488 = Constraint(expr=m.b8*m.x477 - m.x309 == 0)
m.c489 = Constraint(expr=m.x142*m.x264 - m.x155 == 0)
m.c490 = Constraint(expr=m.x264*m.x424 - m.x286 == 0)
m.c491 = Constraint(expr=m.x148*m.x264 - m.x173 == 0)
m.c492 = Constraint(expr=m.x264*m.x430 - m.x301 == 0)
m.c493 = Constraint(expr=m.x154*m.x264 - m.x193 == 0)
m.c494 = Constraint(expr=m.x264*m.x436 - m.x316 == 0)
m.c495 = Constraint(expr=m.x264**2 - m.x478 == 0)
m.c496 = Constraint(expr= m.x153 - m.x478 == 0)
m.c497 = Constraint(expr=m.x142*m.x478 - m.x285 == 0)
m.c498 = Constraint(expr=m.x148*m.x478 - m.x300 == 0)
m.c499 = Constraint(expr=m.x154*m.x478 - m.x315 == 0)
m.c500 = Constraint(expr=m.x264**3 - m.x479 == 0)
m.c501 = Constraint(expr=m.b3*m.x479 - m.x284 == 0)
m.c502 = Constraint(expr=m.b6*m.x479 - m.x299 == 0)
m.c503 = Constraint(expr=m.b9*m.x479 - m.x314 == 0)
m.c504 = Constraint(expr=m.x144*m.x265 - m.x163 == 0)
m.c505 = Constraint(expr=m.x265*m.x426 - m.x291 == 0)
m.c506 = Constraint(expr=m.x150*m.x265 - m.x179 == 0)
m.c507 = Constraint(expr=m.x265*m.x432 - m.x306 == 0)
m.c508 = Constraint(expr=m.x156*m.x265 - m.x31 == 0)
m.c509 = Constraint(expr=m.x265*m.x438 - m.x321 == 0)
m.c510 = Constraint(expr=m.x265**2 - m.x480 == 0)
m.c511 = Constraint(expr= m.x161 - m.x480 == 0)
m.c512 = Constraint(expr=m.x144*m.x480 - m.x290 == 0)
m.c513 = Constraint(expr=m.x150*m.x480 - m.x305 == 0)
m.c514 = Constraint(expr=m.x156*m.x480 - m.x320 == 0)
m.c515 = Constraint(expr=m.x265**3 - m.x481 == 0)
m.c516 = Constraint(expr=m.b4*m.x481 - m.x289 == 0)
m.c517 = Constraint(expr=m.b7*m.x481 - m.x304 == 0)
m.c518 = Constraint(expr=m.b10*m.x481 - m.x319 == 0)
m.c519 = Constraint(expr=m.x158*m.x266 - m.x34 == 0)
m.c520 = Constraint(expr=m.x266*m.x440 - m.x326 == 0)
m.c521 = Constraint(expr=m.x164*m.x266 - m.x45 == 0)
m.c522 = Constraint(expr=m.x266*m.x446 - m.x341 == 0)
m.c523 = Constraint(expr=m.x266**2 - m.x482 == 0)
m.c524 = Constraint(expr= m.x33 - m.x482 == 0)
m.c525 = Constraint(expr=m.x158*m.x482 - m.x325 == 0)
m.c526 = Constraint(expr=m.x164*m.x482 - m.x340 == 0)
m.c527 = Constraint(expr=m.x266**3 - m.x483 == 0)
m.c528 = Constraint(expr=m.b11*m.x483 - m.x324 == 0)
m.c529 = Constraint(expr=m.b14*m.x483 - m.x339 == 0)
m.c530 = Constraint(expr=m.x160*m.x267 - m.x39 == 0)
m.c531 = Constraint(expr=m.x267*m.x442 - m.x331 == 0)
m.c532 = Constraint(expr=m.x166*m.x267 - m.x49 == 0)
m.c533 = Constraint(expr=m.x267*m.x448 - m.x343 == 0)
m.c534 = Constraint(expr=m.x267**2 - m.x484 == 0)
m.c535 = Constraint(expr= m.x38 - m.x484 == 0)
m.c536 = Constraint(expr=m.x160*m.x484 - m.x330 == 0)
m.c537 = Constraint(expr=m.x166*m.x484 - m.x347 == 0)
m.c538 = Constraint(expr=m.x267**3 - m.x485 == 0)
m.c539 = Constraint(expr=m.b12*m.x485 - m.x329 == 0)
m.c540 = Constraint(expr=m.b15*m.x485 - m.x346 == 0)
m.c541 = Constraint(expr=m.x162*m.x268 - m.x41 == 0)
m.c542 = Constraint(expr=m.x268*m.x444 - m.x336 == 0)
m.c543 = Constraint(expr=m.x168*m.x268 - m.x51 == 0)
m.c544 = Constraint(expr=m.x268*m.x450 - m.x351 == 0)
m.c545 = Constraint(expr=m.x268**2 - m.x486 == 0)
m.c546 = Constraint(expr= m.x43 - m.x486 == 0)
m.c547 = Constraint(expr=m.x162*m.x486 - m.x335 == 0)
m.c548 = Constraint(expr=m.x168*m.x486 - m.x350 == 0)
m.c549 = Constraint(expr=m.x268**3 - m.x487 == 0)
m.c550 = Constraint(expr=m.b13*m.x487 - m.x334 == 0)
m.c551 = Constraint(expr=m.b16*m.x487 - m.x349 == 0)
m.c552 = Constraint(expr=m.x170*m.x269 - m.x54 == 0)
m.c553 = Constraint(expr=m.x269*m.x452 - m.x356 == 0)
m.c554 = Constraint(expr=m.x176*m.x269 - m.x66 == 0)
m.c555 = Constraint(expr=m.x269*m.x458 - m.x371 == 0)
m.c556 = Constraint(expr=m.x269**2 - m.x488 == 0)
m.c557 = Constraint(expr= m.x55 - m.x488 == 0)
m.c558 = Constraint(expr=m.x170*m.x488 - m.x355 == 0)
m.c559 = Constraint(expr=m.x176*m.x488 - m.x370 == 0)
m.c560 = Constraint(expr=m.x269**3 - m.x489 == 0)
m.c561 = Constraint(expr=m.b17*m.x489 - m.x354 == 0)
m.c562 = Constraint(expr=m.b20*m.x489 - m.x369 == 0)
m.c563 = Constraint(expr=m.x172*m.x270 - m.x59 == 0)
m.c564 = Constraint(expr=m.x270*m.x454 - m.x361 == 0)
m.c565 = Constraint(expr=m.x178*m.x270 - m.x70 == 0)
m.c566 = Constraint(expr=m.x270*m.x460 - m.x376 == 0)
m.c567 = Constraint(expr=m.x270**2 - m.x490 == 0)
m.c568 = Constraint(expr= m.x60 - m.x490 == 0)
m.c569 = Constraint(expr=m.x172*m.x490 - m.x360 == 0)
m.c570 = Constraint(expr=m.x178*m.x490 - m.x375 == 0)
m.c571 = Constraint(expr=m.x270**3 - m.x491 == 0)
m.c572 = Constraint(expr=m.b18*m.x491 - m.x359 == 0)
m.c573 = Constraint(expr=m.b21*m.x491 - m.x374 == 0)
m.c574 = Constraint(expr=m.x174*m.x271 - m.x63 == 0)
m.c575 = Constraint(expr=m.x271*m.x456 - m.x366 == 0)
m.c576 = Constraint(expr=m.x180*m.x271 - m.x72 == 0)
m.c577 = Constraint(expr=m.x271*m.x462 - m.x381 == 0)
m.c578 = Constraint(expr=m.x271**2 - m.x492 == 0)
m.c579 = Constraint(expr= m.x62 - m.x492 == 0)
m.c580 = Constraint(expr=m.x174*m.x492 - m.x365 == 0)
m.c581 = Constraint(expr=m.x180*m.x492 - m.x380 == 0)
m.c582 = Constraint(expr=m.x271**3 - m.x493 == 0)
m.c583 = Constraint(expr=m.b19*m.x493 - m.x364 == 0)
m.c584 = Constraint(expr=m.b22*m.x493 - m.x379 == 0)
m.c585 = Constraint(expr=m.x182*m.x272 - m.x75 == 0)
m.c586 = Constraint(expr=m.x272*m.x464 - m.x386 == 0)
m.c587 = Constraint(expr=m.x188*m.x272 - m.x88 == 0)
m.c588 = Constraint(expr=m.x272*m.x470 - m.x401 == 0)
m.c589 = Constraint(expr=m.x272**2 - m.x494 == 0)
m.c590 = Constraint(expr= m.x77 - m.x494 == 0)
m.c591 = Constraint(expr=m.x182*m.x494 - m.x385 == 0)
m.c592 = Constraint(expr=m.x188*m.x494 - m.x400 == 0)
m.c593 = Constraint(expr=m.x272**3 - m.x495 == 0)
m.c594 = Constraint(expr=m.b23*m.x495 - m.x384 == 0)
m.c595 = Constraint(expr=m.b26*m.x495 - m.x399 == 0)
m.c596 = Constraint(expr=m.x184*m.x273 - m.x79 == 0)
m.c597 = Constraint(expr=m.x273*m.x466 - m.x391 == 0)
m.c598 = Constraint(expr=m.x190*m.x273 - m.x91 == 0)
m.c599 = Constraint(expr=m.x273*m.x472 - m.x406 == 0)
m.c600 = Constraint(expr=m.x273**2 - m.x496 == 0)
m.c601 = Constraint(expr= m.x80 - m.x496 == 0)
m.c602 = Constraint(expr=m.x184*m.x496 - m.x390 == 0)
m.c603 = Constraint(expr=m.x190*m.x496 - m.x405 == 0)
m.c604 = Constraint(expr=m.x273**3 - m.x497 == 0)
m.c605 = Constraint(expr=m.b24*m.x497 - m.x389 == 0)
m.c606 = Constraint(expr=m.b27*m.x497 - m.x404 == 0)
m.c607 = Constraint(expr=m.x186*m.x274 - m.x83 == 0)
m.c608 = Constraint(expr=m.x274*m.x468 - m.x396 == 0)
m.c609 = Constraint(expr=m.x192*m.x274 - m.x93 == 0)
m.c610 = Constraint(expr=m.x274*m.x474 - m.x411 == 0)
m.c611 = Constraint(expr=m.x274**2 - m.x498 == 0)
m.c612 = Constraint(expr= m.x84 - m.x498 == 0)
m.c613 = Constraint(expr=m.x186*m.x498 - m.x395 == 0)
m.c614 = Constraint(expr=m.x192*m.x498 - m.x410 == 0)
m.c615 = Constraint(expr=m.x274**3 - m.x499 == 0)
m.c616 = Constraint(expr=m.b25*m.x499 - m.x394 == 0)
m.c617 = Constraint(expr=m.b28*m.x499 - m.x409 == 0)
| 38.381299 | 117 | 0.65222 |
0bbab57a58980cab77be4152c0853746383805da | 3,265 | py | Python | examples/pincell_depletion/restart_depletion.py | norberto-schmidt/openmc | ff4844303154a68027b9c746300f5704f73e0875 | [
"MIT"
] | 262 | 2018-08-09T21:27:03.000Z | 2022-03-24T05:02:10.000Z | examples/pincell_depletion/restart_depletion.py | norberto-schmidt/openmc | ff4844303154a68027b9c746300f5704f73e0875 | [
"MIT"
] | 753 | 2018-08-03T15:26:57.000Z | 2022-03-29T23:54:48.000Z | examples/pincell_depletion/restart_depletion.py | norberto-schmidt/openmc | ff4844303154a68027b9c746300f5704f73e0875 | [
"MIT"
] | 196 | 2018-08-06T13:41:14.000Z | 2022-03-29T20:47:12.000Z | import openmc
import openmc.deplete
import matplotlib.pyplot as plt
###############################################################################
# Load previous simulation results
###############################################################################
# Load geometry from statepoint
statepoint = 'statepoint.100.h5'
with openmc.StatePoint(statepoint) as sp:
geometry = sp.summary.geometry
# Load previous depletion results
previous_results = openmc.deplete.ResultsList.from_hdf5("depletion_results.h5")
###############################################################################
# Transport calculation settings
###############################################################################
# Instantiate a Settings object, set all runtime parameters
settings = openmc.Settings()
settings.batches = 100
settings.inactive = 10
settings.particles = 10000
# Create an initial uniform spatial source distribution over fissionable zones
bounds = [-0.62992, -0.62992, -1, 0.62992, 0.62992, 1]
uniform_dist = openmc.stats.Box(bounds[:3], bounds[3:], only_fissionable=True)
settings.source = openmc.source.Source(space=uniform_dist)
entropy_mesh = openmc.RegularMesh()
entropy_mesh.lower_left = [-0.39218, -0.39218, -1.e50]
entropy_mesh.upper_right = [0.39218, 0.39218, 1.e50]
entropy_mesh.dimension = [10, 10, 1]
settings.entropy_mesh = entropy_mesh
###############################################################################
# Initialize and run depletion calculation
###############################################################################
# Create depletion "operator"
chain_file = './chain_simple.xml'
op = openmc.deplete.Operator(geometry, settings, chain_file, previous_results)
# Perform simulation using the predictor algorithm
time_steps = [1.0, 1.0, 1.0, 1.0, 1.0] # days
power = 174 # W/cm, for 2D simulations only (use W for 3D)
integrator = openmc.deplete.PredictorIntegrator(op, time_steps, power, timestep_units='d')
integrator.integrate()
###############################################################################
# Read depletion calculation results
###############################################################################
# Open results file
results = openmc.deplete.ResultsList.from_hdf5("depletion_results.h5")
# Obtain K_eff as a function of time
time, keff = results.get_eigenvalue()
# Obtain U235 concentration as a function of time
time, n_U235 = results.get_atoms('1', 'U235')
# Obtain Xe135 capture reaction rate as a function of time
time, Xe_capture = results.get_reaction_rate('1', 'Xe135', '(n,gamma)')
###############################################################################
# Generate plots
###############################################################################
days = 24*60*60
plt.figure()
plt.plot(time/days, keff, label="K-effective")
plt.xlabel("Time (days)")
plt.ylabel("Keff")
plt.show()
plt.figure()
plt.plot(time/days, n_U235, label="U 235")
plt.xlabel("Time (days)")
plt.ylabel("n U5 (-)")
plt.show()
plt.figure()
plt.plot(time/days, Xe_capture, label="Xe135 capture")
plt.xlabel("Time (days)")
plt.ylabel("RR (-)")
plt.show()
plt.close('all')
| 35.879121 | 90 | 0.543951 |
0bbb896c1f766d40e02d03530e5012bd42f6b56e | 660 | py | Python | app/schemas/treatment_type.py | DzhonPetrus/Treatment-Management | 6b08c59d2d4e79181bbae4e951b7a5fd2e3162f1 | [
"MIT"
] | null | null | null | app/schemas/treatment_type.py | DzhonPetrus/Treatment-Management | 6b08c59d2d4e79181bbae4e951b7a5fd2e3162f1 | [
"MIT"
] | null | null | null | app/schemas/treatment_type.py | DzhonPetrus/Treatment-Management | 6b08c59d2d4e79181bbae4e951b7a5fd2e3162f1 | [
"MIT"
] | null | null | null | from datetime import datetime as dt
from typing import Optional, List
from pydantic import BaseModel
from ..utils.schemaHelper import Base, as_form
| 19.411765 | 46 | 0.721212 |
0bbbc45ba4c350c8c90d7bb728eaa10783237f8b | 2,211 | py | Python | app/daemon.py | mika-koivusaari/mqtt_db_gateway | c2e6a0f97d340f5a9d8a2f530f3ae0145064fd2b | [
"MIT"
] | 1 | 2017-12-02T17:38:23.000Z | 2017-12-02T17:38:23.000Z | app/daemon.py | mika-koivusaari/mqtt_db_gateway | c2e6a0f97d340f5a9d8a2f530f3ae0145064fd2b | [
"MIT"
] | null | null | null | app/daemon.py | mika-koivusaari/mqtt_db_gateway | c2e6a0f97d340f5a9d8a2f530f3ae0145064fd2b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from pep3143daemon import DaemonContext, PidFile
import signal
import os
import sys
import time
| 35.66129 | 99 | 0.502035 |
0bc0a0c5b56516ed3c7366dbc0aa3ccecc32fda3 | 623 | py | Python | src/posts/forms.py | trivvet/djangoAdvance | 28891893869c1c0c3cf67d7f496dda96322de18c | [
"MIT"
] | null | null | null | src/posts/forms.py | trivvet/djangoAdvance | 28891893869c1c0c3cf67d7f496dda96322de18c | [
"MIT"
] | null | null | null | src/posts/forms.py | trivvet/djangoAdvance | 28891893869c1c0c3cf67d7f496dda96322de18c | [
"MIT"
] | null | null | null | from django import forms
from crispy_forms.helper import FormHelper
from pagedown.widgets import PagedownWidget
from .models import Post
| 23.961538 | 72 | 0.622793 |
0bc0b1a713ee07a7da22300f41d7eef91e9cf3f3 | 1,621 | py | Python | games/migrations/0004_auto_20150726_1430.py | rnelson/library | 5f327c188f2847151dcfc92de0dc4f43f24096bf | [
"MIT"
] | null | null | null | games/migrations/0004_auto_20150726_1430.py | rnelson/library | 5f327c188f2847151dcfc92de0dc4f43f24096bf | [
"MIT"
] | null | null | null | games/migrations/0004_auto_20150726_1430.py | rnelson/library | 5f327c188f2847151dcfc92de0dc4f43f24096bf | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
| 27.016667 | 60 | 0.52992 |
0bc0f8ad9a5e857c61031c1ca0a45f2bb10b8808 | 783 | py | Python | Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py | tchamabe1979/exareme | 462983e4feec7808e1fd447d02901502588a8879 | [
"MIT"
] | null | null | null | Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py | tchamabe1979/exareme | 462983e4feec7808e1fd447d02901502588a8879 | [
"MIT"
] | null | null | null | Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py | tchamabe1979/exareme | 462983e4feec7808e1fd447d02901502588a8879 | [
"MIT"
] | null | null | null | import sys
import json
from os import path
from argparse import ArgumentParser
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))) + '/utils/')
from algorithm_utils import set_algorithms_output_data
from health_check_lib import HealthCheckLocalDT
if __name__ == '__main__':
main()
| 27.964286 | 84 | 0.715198 |
0bc10ee1d8cb8fa794fa00533f0e4782089ee855 | 107 | py | Python | app/search/urlmap.py | Hanaasagi/Ushio | 007f8e50e68bf71a1822b09291b1236a1a37c515 | [
"MIT"
] | 5 | 2016-10-24T14:01:48.000Z | 2017-09-26T07:33:20.000Z | app/search/urlmap.py | Hanaasagi/Ushio | 007f8e50e68bf71a1822b09291b1236a1a37c515 | [
"MIT"
] | null | null | null | app/search/urlmap.py | Hanaasagi/Ushio | 007f8e50e68bf71a1822b09291b1236a1a37c515 | [
"MIT"
] | null | null | null | # -*-coding:UTF-8-*-
from handler import SearchHandler
urlpattern = (
(r'/search', SearchHandler),
)
| 13.375 | 33 | 0.654206 |
0bc1b5133ac6d7c68f1be37cb9acd664f71acc62 | 1,601 | py | Python | collect_data/utils/immerseuk/gtr/gtr_extrainfo_awsreduce.py | jaklinger/nesta_dataflow | 5d5647dd8d900a40b460bae0841f7d917e53ae08 | [
"MIT"
] | null | null | null | collect_data/utils/immerseuk/gtr/gtr_extrainfo_awsreduce.py | jaklinger/nesta_dataflow | 5d5647dd8d900a40b460bae0841f7d917e53ae08 | [
"MIT"
] | null | null | null | collect_data/utils/immerseuk/gtr/gtr_extrainfo_awsreduce.py | jaklinger/nesta_dataflow | 5d5647dd8d900a40b460bae0841f7d917e53ae08 | [
"MIT"
] | null | null | null | import logging
from utils.common.datapipeline import DataPipeline
import boto3
import json
from copy import deepcopy
s3 = boto3.resource('s3')
bucket = s3.Bucket('tier-0')
if __name__ == "__main__":
#run()
#import numpy as np
#all_numbers = list(np.arange(0,37242,6))
#all_numbers.append(37242)
print(len(open("not_done").read().split()))
n = 0
for obj in bucket.objects.all():
n += int(len(obj.key.split("_")) == 3)
#if key not in all_numbers:
# continue
#print(key,"!!")
#else:
# all_numbers.remove(key)
print(n)
# with open("not_done","w") as f:
# for n in all_numbers:
# print("-->",n,"<--")
# f.write(str(n)+" ")
#data = obj.get()['Body'].read().decode("utf-8")
#orgs += json.loads(data)
| 25.822581 | 56 | 0.519675 |
0bc1b87155af7211f7ef4f7bb261c76723b7c1da | 3,595 | py | Python | src/features/helpers/processing_v4.py | askoki/nfl_dpi_prediction | dc3256f24ddc0b6725eace2081d1fb1a7e5ce805 | [
"MIT"
] | null | null | null | src/features/helpers/processing_v4.py | askoki/nfl_dpi_prediction | dc3256f24ddc0b6725eace2081d1fb1a7e5ce805 | [
"MIT"
] | null | null | null | src/features/helpers/processing_v4.py | askoki/nfl_dpi_prediction | dc3256f24ddc0b6725eace2081d1fb1a7e5ce805 | [
"MIT"
] | null | null | null | import math
import numpy as np
from matplotlib.patches import FancyArrowPatch
def arrow(x, y, s, ax, color):
"""
Function to draw the arrow of the movement
:param x: position on x-axis
:param y: position on y-axis
:param s: speed in yards/s
:param ax: plot's configuration
:param color: color of the arrows
:return: arrows on the specific positions
"""
# distance between the arrows
distance = 5
ind = range(1, len(x), distance)
# computing of the arrows
for i in ind:
ar = FancyArrowPatch(
(x[i - 1], y[i - 1]), (x[i], y[i]),
arrowstyle='->',
mutation_scale=convert_speed_to_marker_size(s[i]),
color=color,
)
ax.add_patch(ar)
def arrow_o(x, y, o, s, ax, color):
"""
Function to draw the arrow of the movement
:param x: position on x-axis
:param y: position on y-axis
:param o: orientation in degrees 0-360
:param s: speed in yards/s
:param ax: plot's configuration
:param color: color of the arrows
:return: arrows on the specific positions
"""
# distance between the arrows
distance = 3
ind = range(5, len(x), distance)
# computing of the arrows
for i in ind:
x2, y2 = calculate_arrow_xy(x[i], y[i], o[i])
ar = FancyArrowPatch(
(x[i], y[i]), (x2, y2),
arrowstyle='-|>',
mutation_scale=convert_speed_to_marker_size(s[i]),
alpha=0.6,
color=color,
)
ax.add_patch(ar)
| 27.868217 | 94 | 0.569958 |
0bc25237116d36d1b3724261d878f108f7fb3326 | 1,103 | py | Python | abc199/d/main.py | KeiNishikawa218/atcoder | 0af5e091f8b1fd64d5ca7b46b06b9356eacfe601 | [
"MIT"
] | null | null | null | abc199/d/main.py | KeiNishikawa218/atcoder | 0af5e091f8b1fd64d5ca7b46b06b9356eacfe601 | [
"MIT"
] | null | null | null | abc199/d/main.py | KeiNishikawa218/atcoder | 0af5e091f8b1fd64d5ca7b46b06b9356eacfe601 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
main() | 23.978261 | 54 | 0.481414 |
0bc25628bdeee646aae0cedd3efc79f8829fa812 | 4,963 | py | Python | scripts/corpinfo.py | HiroshiOhta/GetCorporationInfo | 3c64ba44a15d481c652da70d62f7127372ac6d1e | [
"Apache-2.0"
] | 1 | 2020-05-24T02:41:24.000Z | 2020-05-24T02:41:24.000Z | scripts/corpinfo.py | HiroshiOhta/GetCorporationInfo | 3c64ba44a15d481c652da70d62f7127372ac6d1e | [
"Apache-2.0"
] | null | null | null | scripts/corpinfo.py | HiroshiOhta/GetCorporationInfo | 3c64ba44a15d481c652da70d62f7127372ac6d1e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
from pathlib import Path
from re import search, sub
from sys import exit, argv
from xml.etree import ElementTree as ET
import csv
#
from requests import get
from requests.exceptions import Timeout, RequestException
#
from constants import ENC_API_KEY, NTA_API_URL
from crypt_string import decrypt_strings
def validate_number(corp_number: str) -> bool:
"""
Parameters
----------
corp_number : str
13
Returns
-------
bool
truefalse
"""
tmp_corp_num_lst = list(corp_number)
corp_num_lst = list(map(int, tmp_corp_num_lst))
# 1
check_degit = corp_num_lst[0]
del corp_num_lst[0]
# STEP1: 2 +
degit_step1 = sum(corp_num_lst[-2::-2]) * 2 + sum(corp_num_lst[-1::-2])
# STEP2: STEP19
degit_step2 = degit_step1 % 9
# STEP3: 9 STEP2
degit = 9 - degit_step2
if check_degit == degit:
return True
else:
return False
def get_corp_info(api_key: str, corp_number: str) -> str:
"""
[summary]
Parameters
----------
api_key : str
[description]
corp_number : str
[description]
Returns
-------
str
[description]
"""
#
# ------------------------------------------------------------------------------
params = {
'id': api_key,
'number': corp_number,
'type': '12',
'history': '0',
}
#
# ------------------------------------------------------------------------------
try:
response = get(NTA_API_URL, params=params, timeout=3.0)
response.raise_for_status()
except Timeout as err:
# TODO: logging
print(err)
print("")
exit(11)
except RequestException as err:
# TODO: logging
print(err)
exit(12)
# XML
# ------------------------------------------------------------------------------
root = ET.fromstring(response.text)
num = 4
corp_info_list = [["", "", "",
"", "", ""]]
if num >= len(root):
# TODO: logging
print("(" + corp_number + ")")
else:
while num < len(root):
corp_info_list.append([root[num][1].text,
root[num][4].text,
root[num][6].text,
root[num][9].text +
root[num][10].text +
root[num][11].text,
sub(r'([0-9]{3})([0-9]{4})',
r'\1-\2', root[num][15].text),
root[num][28].text])
num += 1
for corp_info in corp_info_list[1:]:
print("{0:<14} : {1}".format(corp_info_list[0][0], corp_info[0]))
print("{0:<14} : {1}".format(corp_info_list[0][2], corp_info[2]))
print("{0:<14} : {1}".format(corp_info_list[0][5], corp_info[5]))
print("{0:<14} : {1}".format(corp_info_list[0][4], corp_info[4]))
print("{0:<14} : {1}".format(corp_info_list[0][3], corp_info[3]))
print("{0:<14} : {1}".format(corp_info_list[0][1], corp_info[1]))
print("")
try:
with open('../log/corp_info.csv', 'w', encoding='utf-8') as csv_out:
writer = csv.writer(csv_out, lineterminator='\n')
writer.writerows(corp_info_list)
except FileNotFoundError as err:
# TODO: logging
print(err)
except PermissionError as err:
# TODO: logging
print(err)
except csv.Error as err:
# TODO: logging
print(err)
if __name__ == "__main__":
# Web-APIID
if Path(argv[-1]).is_file():
api_key = decrypt_strings(ENC_API_KEY, argv[-1])
del argv[-1]
else:
api_key = decrypt_strings(ENC_API_KEY)
#
if not argv[1:]:
# TODO: logging
print("")
exit(1)
else:
for corp_number in argv[1:]:
if not search("^[1-9][0-9]{12}$", corp_number):
# TODO: logging
print("13")
exit(2)
elif not validate_number(corp_number):
# TODO: logging
print("(" + corp_number + ")")
exit(3)
#
corp_numbers = ",".join(map(str, argv[1:]))
get_corp_info(api_key, corp_numbers)
exit(0)
| 25.715026 | 84 | 0.518638 |
e7e46d31c42a93c03c2df71128dd11ecc6e4322c | 3,289 | py | Python | lib/misc.py | cripplet/langmuir-hash | 5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5 | [
"MIT"
] | null | null | null | lib/misc.py | cripplet/langmuir-hash | 5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5 | [
"MIT"
] | null | null | null | lib/misc.py | cripplet/langmuir-hash | 5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5 | [
"MIT"
] | null | null | null | # custom libs
from lib.args import getConf
# Python libs
from re import sub
from os import mkdir
from os.path import exists
from getpass import getuser
from socket import gethostname
# given an int (treated as binary list), generate all unique rotational permutations of int (circular shifts)
# http://bit.ly/GLdKmI
# given a string representation of a neighbor configuration, return the number of neighbors in the configuration
# makes a unique directory
# given an array of lines:
# stripping lines that begin with "#"
# stripping the rest of a line with "#" in the middle
# stripping lines that end with ":"
# remove whitespace
# bin() format is "0bxxxxxx"
# [2:] strips "0b"
# [-width:] selects last < width > chars
# renders the configuration file
# def renderConfig(folder):
# if(folder[-1] != "/"):
# folder += "/"
# fp = open(folder + "config.conf", "r")
# s = "config file for " + folder[:-1] + ":\n\n"
# for line in fp:
# s += line
# return(s)
# given a config file, output a CSV line
| 28.353448 | 112 | 0.617817 |
e7e6f4d9ac01c5dc81ed803d1582d06a2e43feb7 | 5,538 | py | Python | actions/geoip.py | cognifloyd/stackstorm-networking_utils | 56bbb6fc55f7662c2e7e7cccd79f1ebbfcb1df38 | [
"Apache-2.0"
] | null | null | null | actions/geoip.py | cognifloyd/stackstorm-networking_utils | 56bbb6fc55f7662c2e7e7cccd79f1ebbfcb1df38 | [
"Apache-2.0"
] | null | null | null | actions/geoip.py | cognifloyd/stackstorm-networking_utils | 56bbb6fc55f7662c2e7e7cccd79f1ebbfcb1df38 | [
"Apache-2.0"
] | null | null | null | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import ipaddress
import geoip2.database
from st2common.runners.base_action import Action
| 36.434211 | 109 | 0.501083 |
e7e747c17639e0dcf83dd1ce0bf4d49fb48d32c9 | 6,372 | py | Python | backend/src/dealer/helpers/result.py | codepals-org/poker | 8b58df2ff4d3d9799c42652a9d6942d8ec6b3707 | [
"MIT"
] | 2 | 2020-11-07T16:37:14.000Z | 2020-11-07T17:11:24.000Z | backend/src/dealer/helpers/result.py | codepals-org/poker | 8b58df2ff4d3d9799c42652a9d6942d8ec6b3707 | [
"MIT"
] | 7 | 2020-11-07T14:04:06.000Z | 2020-11-11T11:49:13.000Z | backend/src/dealer/helpers/result.py | codepals-org/poker | 8b58df2ff4d3d9799c42652a9d6942d8ec6b3707 | [
"MIT"
] | 1 | 2020-11-08T13:00:27.000Z | 2020-11-08T13:00:27.000Z | """ This module comes with functions to decide which poker player out
of all players has the best cards.
"""
import itertools
# full_list in [('A','A'),('B','B')...,('F','F')]
def results(full_list, public_card):
""" The results function takes a list of player cards and
the community cards (in the middle of the table) and calculates
who of the players has the wining hand. """
#public_card = ['6H', '6D', '5S', '4S', '8S']
#full_list = [['9C', 'AS'], ['9H', '5C'], ['4D', '2S'], ['KC', '2D'], ['9D', '10C']]
high_comb_rank = []
high_type_rank = []
high_point_rank = []
public_card_temp = []
winner_card_type = []
public_card_temp.extend(list(public_card))
total_players = len(full_list)
for player_card_check in full_list:
player_card_check += public_card
card_combinations = list(itertools.combinations(player_card_check, 5))
color_all = []
size_all = []
for card_combination in card_combinations:
color_current = []
for card in card_combination:
color_current.append(str(card[-1]))
color_all.append(color_current)
size_current = []
for card in card_combination:
if card[-2].isdigit():
size5 = int(card[-2])
if size5 == 0:
size5 = 10
else:
if card[-2] == "J":
size5 = 11
elif card[-2] == "Q":
size5 = 12
elif card[-2] == "K":
size5 = 13
elif card[-2] == "A":
size5 = 14
size_current.append(size5)
size_all.append(size_current)
card_type_all = []
type_score_all = []
high_card_all = []
win_point = []
for i, card_combination in enumerate(card_combinations):
color = color_all[i]
size = size_all[i]
high_card = []
card_type = []
size_set = list(set(size))
while len(set(color)) == 1:
if max(size) - min(size) == 4:
card_type = 'Straight flush'
high_card = max(size)
break
else:
card_type = 'Flush'
high_card = sum(size)
break
else:
if len(set(size)) == 5:
if max(size) - min(size) == 4:
if sorted(size)[2] == sum(size) / len(size):
card_type = 'Straight'
high_card = max(size)
elif max(size) - min(size) == 12:
if sum(size) == 28:
card_type = 'Straight'
high_card = 5
else:
card_type = 'High card'
high_card = sum(size)
else:
card_type = 'High card'
high_card = sum(size)
elif len(size) - 1 == len(set(size)):
card_type = 'One pair'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
elif len(size) - 2 == len(set(size)):
size_temp = []
size_temp.extend(size)
for a in range(0, 5):
for b in range(0, 3):
if size[a] == size_set[b]:
size[a] = 0
size_set[b] = 0
last = [x for x in size if x != 0]
size = []
size.extend(size_temp)
if last[0] == last[1]:
card_type = 'Three of a kind'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
else:
card_type = 'Two pairs'
high_card = sum([x for n, x in enumerate(size) if x in size[:n]])
elif len(size) - 3 == len(set(size)):
for a in range(0, 5):
for b in range(0, 2):
if size[a] == size[b]:
size[a] = 0
size_set[b] = 0
last = [x for x in size if x != 0]
if last[0] == last[1] == last[2]:
card_type = 'Four of a kind'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
else:
card_type = 'Full house'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
type_score = []
if card_type == 'Straight flush':
type_score = 9
elif card_type == 'Four of a kind':
type_score = 8
elif card_type == 'Full house':
type_score = 7
elif card_type == 'Flush':
type_score = 6
elif card_type == 'Straight':
type_score = 5
elif card_type == 'Three of a kind':
type_score = 4
elif card_type == 'Two pairs':
type_score = 3
elif card_type == 'One pair':
type_score = 2
elif card_type == 'High card':
type_score = 1
card_type_all.append(card_type)
high_card_all.append(high_card)
win_point.append(type_score * int(100) + high_card)
high_point = max(win_point)
locate = win_point.index(max(win_point))
high_comb = card_combinations[locate]
high_type = card_type_all[locate]
high_point_rank.append(high_point)
high_comb_rank.append(high_comb)
high_type_rank.append(high_type)
winner = ()
for i in range(len(high_point_rank)):
if high_point_rank[i] == max(high_point_rank):
winner += (i,)
for i in winner:
a = int(i)
b = high_type_rank[a]
winner_card_type.append(b)
return (winner, winner_card_type)
| 38.155689 | 91 | 0.44005 |
e7e78d1aba44146a11b4493e469f13a8468f2449 | 420 | py | Python | nimble-newts/askgrieves/chatbot/models.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 40 | 2020-08-02T07:38:22.000Z | 2021-07-26T01:46:50.000Z | nimble-newts/askgrieves/chatbot/models.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 134 | 2020-07-31T12:15:45.000Z | 2020-12-13T04:42:19.000Z | nimble-newts/askgrieves/chatbot/models.py | AvianAnalyst/summer-code-jam-2020 | c5e2aeb4ce399c438a1b8aad393d9c2e9ef98a75 | [
"MIT"
] | 101 | 2020-07-31T12:00:47.000Z | 2021-11-01T09:06:58.000Z | from django.db import models
| 23.333333 | 57 | 0.7 |
e7e9d221f1fcec4aa818bff540aa8cfe75c86d5f | 1,026 | py | Python | examples/example_wait_for.py | plun1331/discord.py-components-1 | a31b1a0cfbd31b98d01e910ed905c9c70afe0c3e | [
"MIT"
] | 1 | 2021-08-07T18:40:36.000Z | 2021-08-07T18:40:36.000Z | examples/example_wait_for.py | plun1331/discord.py-components-1 | a31b1a0cfbd31b98d01e910ed905c9c70afe0c3e | [
"MIT"
] | null | null | null | examples/example_wait_for.py | plun1331/discord.py-components-1 | a31b1a0cfbd31b98d01e910ed905c9c70afe0c3e | [
"MIT"
] | null | null | null | from discord.ext.commands import Bot
from discord_components import DiscordComponents, Button, ButtonStyle, InteractionType
from asyncio import TimeoutError
bot = Bot("!")
bot.run("TOKEN")
| 24.428571 | 99 | 0.621832 |
e7ea14302b331a9466a14df8ced10e7042b53923 | 7,081 | py | Python | core/data/dataloader/upb_kitti.py | nemodrive/awesome-semantic-segmentation-pytorch | fa0e4174004822ace0560cc046c2fbdb81f1e1b9 | [
"Apache-2.0"
] | null | null | null | core/data/dataloader/upb_kitti.py | nemodrive/awesome-semantic-segmentation-pytorch | fa0e4174004822ace0560cc046c2fbdb81f1e1b9 | [
"Apache-2.0"
] | null | null | null | core/data/dataloader/upb_kitti.py | nemodrive/awesome-semantic-segmentation-pytorch | fa0e4174004822ace0560cc046c2fbdb81f1e1b9 | [
"Apache-2.0"
] | null | null | null | """Pascal VOC Semantic Segmentation Dataset."""
import os
import torch
import numpy as np
from PIL import Image
from .segbase import SegmentationDataset
if __name__ == '__main__':
dataset = KITTISegmentation()
| 36.880208 | 141 | 0.591018 |
e7ea5fbf2a5ea893fa5d02bc075a60e6e8983358 | 4,580 | py | Python | app/request.py | angelakarenzi5/News-Highlight | 3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62 | [
"Unlicense"
] | null | null | null | app/request.py | angelakarenzi5/News-Highlight | 3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62 | [
"Unlicense"
] | null | null | null | app/request.py | angelakarenzi5/News-Highlight | 3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62 | [
"Unlicense"
] | null | null | null | from app import app
import urllib.request,json
from .models import source
from .models import article
Source = source.Source
Article = article.Article
# Getting api key
api_key = app.config['NEWS_API_KEY']
# Getting the source base url
base_url = app.config["SOURCE_API_BASE_URL"]
article_url = app.config["ARTICLE_API_BASE_URL"]
def process_results(source_list):
'''
Function that processes the source result and transform them to a list of Objects
Args:
source_list: A list of dictionaries that contain source details
Returns :
source_results: A list of source objects
'''
source_results = []
for source_item in source_list:
id = source_item.get('id')
name = source_item.get('name')
description= source_item.get('description')
url = source_item.get('url')
category = source_item.get('category')
language = source_item.get('language')
country = source_item.get('country')
if url:
source_object = Source(id,name,description,url,category,language,country)
source_results.append(source_object)
return source_results
def get_sources(category):
'''
Function that gets the json response to our url request
'''
get_sources_url = base_url.format(category,api_key)
with urllib.request.urlopen(get_sources_url) as url:
get_sources_data = url.read()
get_sources_response = json.loads(get_sources_data)
source_results = None
if get_sources_response['sources']:
source_results_list = get_sources_response['sources']
source_results = process_results(source_results_list)
return source_results
def get_articles(category):
'''
Function that gets the json response to our url request
'''
get_articles_url = article_url.format(category,api_key)
with urllib.request.urlopen(get_articles_url) as url:
get_articles_data = url.read()
get_articles_response = json.loads(get_articles_data)
article_results = None
if get_articles_response['articles']:
article_results_list = get_articles_response['articles']
article_results = process_results(article_results_list)
return article_results
def process_articles(article_list):
'''
Function that processes the article result and transform them to a list of Objects
Args:
article_list: A list of dictionaries that contain article details
Returns :
article_results: A list of article objects
'''
article_results = []
for article_item in article_list:
author = article_item.get('author')
title = article_item.get('title')
description= article_item.get('description')
url =article_item.get('url')
urlToImage = article_item.get('urlToImage')
publishedAt = article_item.get('publishedAt')
content = article_item.get('content')
if url:
article_object =Article(author,title,description, url, urlToImage,publishedAt,content)
article_results.append(article_object)
return article_results
def get_articles(source):
'''
Function that gets the json response to our url request
'''
get_articles_url = article_url.format(source,api_key)
with urllib.request.urlopen(get_articles_url) as url:
get_articles_data = url.read()
get_articles_response = json.loads(get_articles_data)
article_results = None
if get_articles_response['articles']:
article_results_list = get_articles_response['articles']
article_results = process_articles(article_results_list)
return article_results | 31.156463 | 98 | 0.691921 |
e7ea9b418ef09dc2361de5d9ada98bfd38198af3 | 19 | py | Python | login.py | XM001-creater/test_one | 1cf96a45c8dfbf988125e3d250d86fb06fe65c34 | [
"MIT"
] | null | null | null | login.py | XM001-creater/test_one | 1cf96a45c8dfbf988125e3d250d86fb06fe65c34 | [
"MIT"
] | null | null | null | login.py | XM001-creater/test_one | 1cf96a45c8dfbf988125e3d250d86fb06fe65c34 | [
"MIT"
] | null | null | null | num1 =1
num2 = 222
| 6.333333 | 10 | 0.631579 |
e7ecc557e33faf2b68bd5445272a43c0e0419ea1 | 445 | py | Python | change_file_name.py | Guzhongren/picuture2thumbnail | 15d58c2e53652e5c5af9ff1bf89883b9038bfa03 | [
"MIT"
] | 1 | 2019-07-07T17:51:37.000Z | 2019-07-07T17:51:37.000Z | change_file_name.py | Guzhongren/picuture2thumbnail | 15d58c2e53652e5c5af9ff1bf89883b9038bfa03 | [
"MIT"
] | null | null | null | change_file_name.py | Guzhongren/picuture2thumbnail | 15d58c2e53652e5c5af9ff1bf89883b9038bfa03 | [
"MIT"
] | 1 | 2020-01-19T08:27:10.000Z | 2020-01-19T08:27:10.000Z | # -*- coding: utf-8 -*-
# Author:Guzhongren
# created: 2017-05-08
import os
path = 'C:\\geoconFailed\\willfix\\'
for file in os.listdir(path):
if os.path.isfile(os.path.join(path,file))==True:
_file= file.split(".")
_file_name=_file[0]
_file_type=_file[1]
new_file_name=_file_name[:-1]+"."+_file_type
os.rename(os.path.join(path,file), os.path.join(path, new_file_name))
print(file+u"")
| 27.8125 | 77 | 0.624719 |
e7ed80b597ccfb79e5e0d84b01e14970f4384658 | 434 | py | Python | day22/day22.py | norbert-e-horn/adventofcode-2017 | 81a6a8eb6f23f2191786d1ea8b2aad1f54d9c12a | [
"Apache-2.0"
] | null | null | null | day22/day22.py | norbert-e-horn/adventofcode-2017 | 81a6a8eb6f23f2191786d1ea8b2aad1f54d9c12a | [
"Apache-2.0"
] | null | null | null | day22/day22.py | norbert-e-horn/adventofcode-2017 | 81a6a8eb6f23f2191786d1ea8b2aad1f54d9c12a | [
"Apache-2.0"
] | null | null | null | import sys
c=[[2if a=="#"else 0for a in i]for i in sys.argv[1].split("\n")]
n=len(c)
s=1001
a=[]
k=(s-n)//2
for i in range(s):a+=[0]*k+c[i-k]+k*[0]if k<=i<(s+n)/2else[0]*s
b=list(a)
d=[0,s**2//2,0]
for i in range(10000):m(2)
print(d[2])
a=b
d=[0,s**2//2,0]
for i in range(10000000):m(1)
print(d[2])
| 20.666667 | 64 | 0.495392 |
e7edbdfed8164b295e564361932bcbdae312f33f | 10,178 | py | Python | armory/scenarios/audio_asr.py | GuillaumeLeclerc/armory | c24928701b4ff6fc37cdb994ea784f9733a8e8da | [
"MIT"
] | 1 | 2021-06-17T23:05:58.000Z | 2021-06-17T23:05:58.000Z | armory/scenarios/audio_asr.py | GuillaumeLeclerc/armory | c24928701b4ff6fc37cdb994ea784f9733a8e8da | [
"MIT"
] | null | null | null | armory/scenarios/audio_asr.py | GuillaumeLeclerc/armory | c24928701b4ff6fc37cdb994ea784f9733a8e8da | [
"MIT"
] | null | null | null | """
Automatic speech recognition scenario
"""
import logging
from typing import Optional
from tqdm import tqdm
import numpy as np
from art.preprocessing.audio import LFilter, LFilterPyTorch
from armory.utils.config_loading import (
load_dataset,
load_model,
load_attack,
load_adversarial_dataset,
load_defense_wrapper,
load_defense_internal,
load_label_targeter,
)
from armory.utils import metrics
from armory.scenarios.base import Scenario
from armory.utils.export import SampleExporter
logger = logging.getLogger(__name__)
def load_audio_channel(delay, attenuation, pytorch=True):
"""
Return an art LFilter object for a simple delay (multipath) channel
If attenuation == 0 or delay == 0, return an identity channel
Otherwise, return a channel with length equal to delay + 1
NOTE: lfilter truncates the end of the echo, so output length equals input length
"""
delay = int(delay)
attenuation = float(attenuation)
if delay < 0:
raise ValueError(f"delay {delay} must be a nonnegative number (of samples)")
if delay == 0 or attenuation == 0:
logger.warning("Using an identity channel")
numerator_coef = np.array([1.0])
denominator_coef = np.array([1.0])
else:
if not (-1 <= attenuation <= 1):
logger.warning(f"filter attenuation {attenuation} not in [-1, 1]")
# Simple FIR filter with a single multipath delay
numerator_coef = np.zeros(delay + 1)
numerator_coef[0] = 1.0
numerator_coef[delay] = attenuation
denominator_coef = np.zeros_like(numerator_coef)
denominator_coef[0] = 1.0
if pytorch:
try:
return LFilterPyTorch(
numerator_coef=numerator_coef, denominator_coef=denominator_coef
)
except ImportError:
logger.exception("PyTorch not available. Resorting to scipy filter")
logger.warning("Scipy LFilter does not currently implement proper gradients")
return LFilter(numerator_coef=numerator_coef, denominator_coef=denominator_coef)
| 39.449612 | 91 | 0.599921 |
e7ee6d842483ab8133f076264eb1658607e7ec98 | 5,558 | py | Python | FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py | rishiagarwal-oracle/fmw-kubernetes | cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f | [
"UPL-1.0",
"MIT"
] | null | null | null | FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py | rishiagarwal-oracle/fmw-kubernetes | cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f | [
"UPL-1.0",
"MIT"
] | null | null | null | FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py | rishiagarwal-oracle/fmw-kubernetes | cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f | [
"UPL-1.0",
"MIT"
] | null | null | null | # Copyright (c) 2022, Oracle and/or its affiliates.
#
# Licensed under the Universal Permissive License v 1.0 as shown at
# https://oss.oracle.com/licenses/upl
import xml.dom.minidom
import re
import sys
# Method to uncomment and comment the required tag and save back
# Method to replace the properties
# Method to replace the properties
if __name__ == "__main__":
# calling main function
main()
| 45.933884 | 161 | 0.737496 |
e7ee8f88cffe1a482d5fa7391195738c0119a53d | 2,228 | py | Python | SQLFileGenerator/sqlqueries.py | DataMadeEasy/PySQLFileGenerator | 3efc54fa7b8741f48d00dc199675081b0fc4e04d | [
"BSD-2-Clause"
] | null | null | null | SQLFileGenerator/sqlqueries.py | DataMadeEasy/PySQLFileGenerator | 3efc54fa7b8741f48d00dc199675081b0fc4e04d | [
"BSD-2-Clause"
] | null | null | null | SQLFileGenerator/sqlqueries.py | DataMadeEasy/PySQLFileGenerator | 3efc54fa7b8741f48d00dc199675081b0fc4e04d | [
"BSD-2-Clause"
] | null | null | null | sqlqueries = {
'WeatherForecast':"select concat ('FY', to_char(f.forecasted_timestamp, 'YY')) Fiscal_yr, to_char(f.forecasted_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(f.forecasted_timestamp, 'DD')) Fiscal_day, f.zipcode zip, min(f.temp_avg) low, max(f.temp_avg) high, max(f.wind_speed) wind, max(f.humidity) humidity from forecast f where to_char(forecast_timestamp, 'YYYY-MM-DD HH24') = (select max(to_char(forecast_timestamp, 'YYYY-MM-DD HH24')) from forecast) group by to_char(f.forecasted_timestamp, 'YY'), to_char(f.forecasted_timestamp, 'MON'), to_char(f.forecasted_timestamp, 'DD'), f.zipcode;",
'WeatherActDesc':"select concat ('FY', to_char(o.observation_timestamp, 'YY')) Fiscal_yr, to_char(o.observation_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(o.observation_timestamp, 'DD')) Fiscal_day, o.zipcode zip, o.weather_description descripion from observations o group by to_char(o.observation_timestamp, 'YY'), to_char(o.observation_timestamp, 'MON'), to_char(o.observation_timestamp, 'DD'), o.zipcode, o.weather_description order by fiscal_yr, fiscal_mth, fiscal_day, zip;",
'WeatherActual':"select concat ('FY', to_char(o.observation_timestamp, 'YY')) Fiscal_yr, to_char(o.observation_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(o.observation_timestamp, 'DD')) Fiscal_day, o.zipcode zip, min(o.temp_avg) low, max(o.temp_avg) high, max(o.wind_speed) wind, max(o.humidity) humidity from observations o group by to_char(o.observation_timestamp, 'YY'), to_char(o.observation_timestamp, 'MON') , to_char(o.observation_timestamp, 'DD') , o.zipcode order by fiscal_yr, fiscal_mth, fiscal_day, zip;",
'WeatherDescription':"select concat ('FY', to_char(f.forecasted_timestamp, 'YY')) Fiscal_yr , to_char(f.forecasted_timestamp, 'MON') Fiscal_mth , concat ('Day_', to_char(f.forecasted_timestamp, 'DD')) Fiscal_day , f.zipcode zip , f.weather_description descripion from forecast f where to_char(forecast_timestamp, 'YYYY-MM-DD HH24') = (select max(to_char(forecast_timestamp, 'YYYY-MM-DD HH24')) from forecast) group by to_char(forecasted_timestamp, 'YY') , to_char(f.forecasted_timestamp, 'MON') , to_char(f.forecasted_timestamp, 'DD') , f.zipcode , f.weather_description;"
} | 371.333333 | 604 | 0.763465 |
e7f06cecae55d479e6604b53a295b76a9bdf0276 | 5,005 | py | Python | backend/tests/unit/protocols/application/test_lists.py | pez-globo/pufferfish-software | b42fecd652731dd80fbe366e95983503fced37a4 | [
"Apache-2.0"
] | 1 | 2020-10-20T23:47:23.000Z | 2020-10-20T23:47:23.000Z | backend/tests/unit/protocols/application/test_lists.py | pez-globo/pufferfish-software | b42fecd652731dd80fbe366e95983503fced37a4 | [
"Apache-2.0"
] | 242 | 2020-10-23T06:44:01.000Z | 2022-01-28T05:50:45.000Z | backend/tests/unit/protocols/application/test_lists.py | pez-globo/pufferfish-vent-software | f1e5e47acf1941e7c729adb750b85bf26c38b274 | [
"Apache-2.0"
] | 1 | 2021-04-12T02:10:18.000Z | 2021-04-12T02:10:18.000Z | """Test the functionality of protocols.application.states classes."""
from ventserver.protocols.application import lists
from ventserver.protocols.protobuf import mcu_pb as pb
def test_send_new_elements() -> None:
"""Test adding new elements to a list for sending."""
example_sequence = [
lists.UpdateEvent(new_elements=[pb.LogEvent(id=i)])
for i in range(20)
]
synchronizer = lists.SendSynchronizer(
segment_type=pb.NextLogEvents,
max_len=10, max_segment_len=5
)
assert synchronizer.output() is None
for update_event in example_sequence:
synchronizer.input(update_event)
assert synchronizer.output() is None
# The first 10 events should've been discarded
for next_expected in range(10):
synchronizer.input(lists.UpdateEvent(next_expected=next_expected))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == next_expected
assert output.total == 10
assert output.remaining == 10
for (i, event) in enumerate(output.elements):
assert event.id == 10 + i
# Segments should be returned as requested
for next_expected in range(10, 20):
synchronizer.input(lists.UpdateEvent(next_expected=next_expected))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == next_expected
assert output.total == 10
assert output.remaining == 10 - (next_expected - 10)
for (i, event) in enumerate(output.elements):
assert event.id == next_expected + i
if next_expected <= 15:
assert len(output.elements) == 5
else:
assert len(output.elements) == 5 - (next_expected - 15)
# New elements should be in the segment resulting from a repeated request
assert synchronizer.output() is None
synchronizer.input(lists.UpdateEvent(
new_elements=[pb.LogEvent(id=20)], next_expected=19
))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == 19
assert output.total == 10
assert output.remaining == 2
for (i, event) in enumerate(output.elements):
assert event.id == 19 + i
assert len(output.elements) == 2
# TODO: add a test where we send all events, then reset expected event to 0.
# All events should be sent again.
def test_receive_new_elements() -> None:
"""Test adding new elements to a list from receiving."""
example_sequence = [
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 5)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(5, 10)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(7, 11)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 4)]
),
pb.NextLogEvents(session_id=1),
pb.NextLogEvents(
session_id=1, elements=[pb.LogEvent(id=i) for i in range(0, 4)]
),
]
synchronizer: lists.ReceiveSynchronizer[pb.LogEvent] = \
lists.ReceiveSynchronizer()
assert synchronizer.output() is None
for segment in example_sequence:
synchronizer.input(segment)
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 5
assert len(update_event.new_elements) == 5
for (i, element) in enumerate(update_event.new_elements):
assert element.id == i
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 10
assert len(update_event.new_elements) == 5
for (i, element) in enumerate(update_event.new_elements):
assert element.id == 5 + i
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 11
assert len(update_event.new_elements) == 1
assert update_event.new_elements[0].id == 10
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 11
assert len(update_event.new_elements) == 0
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 1
assert update_event.next_expected == 0
assert len(update_event.new_elements) == 0
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 1
assert update_event.next_expected == 4
assert len(update_event.new_elements) == 4
for (i, element) in enumerate(update_event.new_elements):
assert element.id == i
| 36.532847 | 77 | 0.675524 |
e7f2a75349f080e6ef9556951fc033879ae1e187 | 1,969 | py | Python | application/api.py | DonBlaine/OpenDoorData | 74740c6ff6dca893f0389963f2ef12de42a36829 | [
"MIT"
] | null | null | null | application/api.py | DonBlaine/OpenDoorData | 74740c6ff6dca893f0389963f2ef12de42a36829 | [
"MIT"
] | null | null | null | application/api.py | DonBlaine/OpenDoorData | 74740c6ff6dca893f0389963f2ef12de42a36829 | [
"MIT"
] | null | null | null | # file that contains db models to be exposed via a REST API
from models import room, survey, wifi_log, timetable, module # import db models
from app import app # import Flask app
from auth import auth # import Auth app to provide user authentificaiton
from flask import request # import request object to parse json request data
from flask_peewee.rest import RestAPI,UserAuthentication, RestrictOwnerResource, AdminAuthentication
# create RestrictOwnerResource subclass which prevents users modifying another user's content
# instantiate UserAuthentication
user_auth = UserAuthentication(auth)
# instantiate admin-only auth
admin_auth = AdminAuthentication(auth)
# instantiate our api wrapper, specifying user_auth as the default
api = RestAPI(app, default_auth=user_auth)
# register models so they are exposed via /api/<model>/
api.register(room, auth=admin_auth, allowed_methods=['GET'])
api.register(survey,SurveyResource,allowed_methods=['GET','POST'])
api.register(wifi_log, auth=admin_auth,allowed_methods=['GET'])
api.register(timetable, auth=admin_auth, allowed_methods=['GET'])
api.register(module, auth=admin_auth, allowed_methods=['GET'])
| 39.38 | 145 | 0.739462 |
e7f30077f490cc616f7f71217c5e89c086968e6a | 1,807 | py | Python | www/purple_admin/urls.py | SubminO/vas | 3096df12e637fc614d18cb3eef43c18be0775e5c | [
"Apache-2.0"
] | null | null | null | www/purple_admin/urls.py | SubminO/vas | 3096df12e637fc614d18cb3eef43c18be0775e5c | [
"Apache-2.0"
] | null | null | null | www/purple_admin/urls.py | SubminO/vas | 3096df12e637fc614d18cb3eef43c18be0775e5c | [
"Apache-2.0"
] | null | null | null | from django.urls import path
from purple_admin import views
urlpatterns = [
path('', views.cabinet, name='admin_panel_cabinet'),
#
path('route_list', views.cabinet_list, {'type': 'route'}, name='admin_panel_route_list'),
path('route_add', views.cabinet_add, {'type': 'route'}, name='admin_panel_route_add'),
path('route_edit/<int:pk>/', views.cabinet_edit, {'type': 'route'}, name='admin_panel_route_edit'),
path('route_delete/<int:pk>/', views.cabinet_delete, {'type': 'route'}, name='admin_panel_route_delete'),
#
path('route_platform_list', views.cabinet_list, {'type': 'route_platform'}, name='admin_panel_route_platform_list'),
path('route_platform_add', views.cabinet_add, {'type': 'route_platform'}, name='admin_panel_route_platform_add'),
path('route_platform_edit/<int:pk>/', views.cabinet_edit, {'type': 'route_platform'},
name='admin_panel_route_platform_edit'),
path('route_platform_delete/<int:pk>/', views.cabinet_delete, {'type': 'route_platform'},
name='admin_panel_route_platform_delete'),
path('route_relation_add_ajax', views.cabinet_add, {'type': 'route_platform_type'},
name='admin_panel_route_platform_type_relation_ajax_add'),
#
path('ts_list', views.cabinet_list, {'type': 'ts'}, name='admin_panel_ts_list'),
path('ts_add', views.cabinet_add, {'type': 'ts'}, name='admin_panel_ts_add'),
path('ts_edit/<int:pk>/', views.cabinet_edit, {'type': 'ts'}, name='admin_panel_ts_edit'),
path('ts_delete/<int:pk>/', views.cabinet_delete, {'type': 'ts'}, name='admin_panel_ts_delete'),
#
path('map_route_editor_add', views.mapped_route_add, name='admin_panel_mapped_route_add'),
]
| 62.310345 | 120 | 0.716657 |
e7f60dd013f54bbf4fa181ff948f295cdc87e462 | 1,893 | py | Python | tests/mock_dbcli_config.py | bluelabsio/db-facts | fc8faa59f450a5cc00a0e50160ca57e47291b375 | [
"Apache-2.0"
] | 2 | 2020-11-25T20:11:50.000Z | 2020-12-12T18:39:09.000Z | tests/mock_dbcli_config.py | bluelabsio/db-facts | fc8faa59f450a5cc00a0e50160ca57e47291b375 | [
"Apache-2.0"
] | 5 | 2020-01-24T15:05:50.000Z | 2020-02-29T13:34:40.000Z | tests/mock_dbcli_config.py | bluelabsio/db-facts | fc8faa59f450a5cc00a0e50160ca57e47291b375 | [
"Apache-2.0"
] | 1 | 2021-05-16T17:07:40.000Z | 2021-05-16T17:07:40.000Z | mock_dbcli_config = {
'exports_from': {
'lpass': {
'pull_lastpass_from': "{{ lastpass_entry }}",
},
'lpass_user_and_pass_only': {
'pull_lastpass_username_password_from': "{{ lastpass_entry }}",
},
'my-json-script': {
'json_script': [
'some-custom-json-script'
]
},
'invalid-method': {
},
},
'dbs': {
'baz': {
'exports_from': 'my-json-script',
},
'bing': {
'exports_from': 'invalid-method',
},
'bazzle': {
'exports_from': 'lpass',
'lastpass_entry': 'lpass entry name'
},
'bazzle-bing': {
'exports_from': 'lpass',
'lastpass_entry': 'different lpass entry name'
},
'frazzle': {
'exports_from': 'lpass',
'lastpass_entry': 'lpass entry name'
},
'frink': {
'exports_from': 'lpass_user_and_pass_only',
'lastpass_entry': 'lpass entry name',
'jinja_context_name': 'standard',
'exports': {
'some_additional': 'export',
'a_numbered_export': 123
},
},
'gaggle': {
'jinja_context_name': [
'env',
'base64',
],
'exports': {
'type': 'bigquery',
'protocol': 'bigquery',
'bq_account': 'bq_itest',
'bq_service_account_json':
"{{ env('ITEST_BIGQUERY_SERVICE_ACCOUNT_JSON_BASE64') | b64decode }}",
'bq_default_project_id': 'bluelabs-tools-dev',
'bq_default_dataset_id': 'bq_itest',
},
},
},
'orgs': {
'myorg': {
'full_name': 'MyOrg',
},
},
}
| 28.253731 | 86 | 0.43159 |
e7f7aa1ed993e5ba94893e2ddce56e42c0e3c43a | 586 | py | Python | java/test/src/main/resources/test_cross_language_invocation.py | hershg/ray | a1744f67fe954d8408c5b84e28ecccc130157f8e | [
"Apache-2.0"
] | 2 | 2017-12-15T19:36:55.000Z | 2019-02-24T16:56:06.000Z | java/test/src/main/resources/test_cross_language_invocation.py | hershg/ray | a1744f67fe954d8408c5b84e28ecccc130157f8e | [
"Apache-2.0"
] | 4 | 2019-03-04T13:03:24.000Z | 2019-06-06T11:25:07.000Z | java/test/src/main/resources/test_cross_language_invocation.py | hershg/ray | a1744f67fe954d8408c5b84e28ecccc130157f8e | [
"Apache-2.0"
] | 2 | 2017-10-31T23:20:07.000Z | 2019-11-13T20:16:03.000Z | # This file is used by CrossLanguageInvocationTest.java to test cross-language
# invocation.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import ray
| 21.703704 | 78 | 0.723549 |
e7f8e7564db7dfcbe99ed0496a94327a80f2134b | 534 | py | Python | game_stats.py | DeqianBai/Project-Alien-Invasion | 3beac9eaba6609b8cecce848269b1ffe7b7bf493 | [
"Apache-2.0"
] | 4 | 2019-02-25T13:11:30.000Z | 2019-07-23T11:42:38.000Z | game_stats.py | DeqianBai/Project-Alien-Invasion | 3beac9eaba6609b8cecce848269b1ffe7b7bf493 | [
"Apache-2.0"
] | 1 | 2019-11-22T12:50:01.000Z | 2019-11-22T12:50:01.000Z | game_stats.py | DeqianBai/Project-Alien-Invasion | 3beac9eaba6609b8cecce848269b1ffe7b7bf493 | [
"Apache-2.0"
] | 3 | 2019-06-13T03:00:50.000Z | 2020-03-04T08:46:42.000Z | #/usr/bin/env python
# -*- coding:utf-8 -*-
# author:dabai time:2019/2/24
| 19.777778 | 52 | 0.567416 |
e7f94faea0813341ebda497d2d676c1095cd32fd | 4,464 | py | Python | ros/src/tl_detector/light_classification/carla.py | xiangjiang/Capstone_1 | 68e6d044041f5759f3596d6d547bd871afb1970b | [
"MIT"
] | null | null | null | ros/src/tl_detector/light_classification/carla.py | xiangjiang/Capstone_1 | 68e6d044041f5759f3596d6d547bd871afb1970b | [
"MIT"
] | null | null | null | ros/src/tl_detector/light_classification/carla.py | xiangjiang/Capstone_1 | 68e6d044041f5759f3596d6d547bd871afb1970b | [
"MIT"
] | null | null | null | import tensorflow as tf
from os import path
import numpy as np
from scipy import misc
from styx_msgs.msg import TrafficLight
import cv2
import rospy
import tensorflow as tf
| 43.764706 | 114 | 0.533602 |
e7fca0855906e19926ef43a259b033f9d1d6ddb0 | 542 | py | Python | transform/indexed_transform.py | cviaai/unsupervised-heartbeat-anomaly-detection | 3586bf505256463c030422607e95e4cee40fa086 | [
"MIT"
] | 2 | 2020-10-14T05:50:25.000Z | 2021-05-11T03:42:02.000Z | transform/indexed_transform.py | cviaai/unsupervised-heartbeat-anomaly-detection | 3586bf505256463c030422607e95e4cee40fa086 | [
"MIT"
] | null | null | null | transform/indexed_transform.py | cviaai/unsupervised-heartbeat-anomaly-detection | 3586bf505256463c030422607e95e4cee40fa086 | [
"MIT"
] | null | null | null | from typing import Tuple, List
from transform.transformer import TimeSeriesTransformer
import numpy as np
| 30.111111 | 84 | 0.695572 |
e7fca20cce05d1364eee53a17bec476012eb661d | 2,177 | py | Python | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | 26 | 2015-01-12T18:00:50.000Z | 2020-12-19T23:49:16.000Z | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | null | null | null | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | 26 | 2015-01-10T22:35:01.000Z | 2020-01-15T08:56:53.000Z | #------------------------------------------
# this script combine result of different
# nets and report final result
#------------------------------------------
import sys
import numpy as np
from util import pickle, unpickle
if __name__ == "__main__":
main()
| 30.661972 | 87 | 0.592559 |
e7fcb403c125d5647a5fdcb4339ffbade5bc81e8 | 1,556 | py | Python | goless/__init__.py | ctismer/goless | 02168a40902691264b32c7da6f453819ed7a91cf | [
"Apache-2.0"
] | 1 | 2015-05-28T03:12:47.000Z | 2015-05-28T03:12:47.000Z | goless/__init__.py | ctismer/goless | 02168a40902691264b32c7da6f453819ed7a91cf | [
"Apache-2.0"
] | null | null | null | goless/__init__.py | ctismer/goless | 02168a40902691264b32c7da6f453819ed7a91cf | [
"Apache-2.0"
] | null | null | null | """
``goless`` introduces go-like channels and select to Python,
built on top of Stackless Python (and maybe one day gevent).
Use :func:`goless.chan` to create a synchronous or buffered channel.
Use :func:`goless.select` like you would the ``Select`` function in Go's reflect package
(since Python lacks a switch/case statement, replicating Go's select statement syntax
wasn't very effective).
"""
import logging
import sys
import traceback
from .backends import current as _be
# noinspection PyUnresolvedReferences
from .channels import chan, ChannelClosed
# noinspection PyUnresolvedReferences
from .selecting import dcase, rcase, scase, select
version_info = 0, 0, 1
version = '.'.join([str(v) for v in version_info])
def on_panic(etype, value, tb):
"""
Called when there is an unhandled error in a goroutine.
By default, logs and exits the process.
"""
logging.critical(traceback.format_exception(etype, value, tb))
_be.propagate_exc(SystemExit, 1)
def go(func, *args, **kwargs):
"""
Run a function in a new tasklet, like a goroutine.
If the goroutine raises an unhandled exception (*panics*),
the :func:`goless.on_panic` will be called,
which by default logs the error and exits the process.
:param args: Positional arguments to ``func``.
:param kwargs: Keyword arguments to ``func``.
"""
_be.start(safe_wrapped, func)
| 30.509804 | 88 | 0.703728 |
e7fcf109cce1b1c57ca682a8b6f5606efb8ee46b | 643 | py | Python | data/test1.py | moses-alexander/simple-python-parser | a15f53a86d61fa5d98f5ade149d8c3a178ebfb50 | [
"BSD-3-Clause"
] | null | null | null | data/test1.py | moses-alexander/simple-python-parser | a15f53a86d61fa5d98f5ade149d8c3a178ebfb50 | [
"BSD-3-Clause"
] | null | null | null | data/test1.py | moses-alexander/simple-python-parser | a15f53a86d61fa5d98f5ade149d8c3a178ebfb50 | [
"BSD-3-Clause"
] | null | null | null | 1+2
3+5
7+8
6>7
abs(-3)
if 8 < 9: min(3,5)
else 4 < 5: abs(-2)
else 4 > 5: max(3, 7)
round(2.1)
round(3.6)
len("jfdgge")
type(4)
any(1, 3, 4)
any(0.0, 0.0, 0.0)
all("abc", "a")
all(0, 1)
bin(45)
lower("ABC")
upper("abc")
join("abc", "abc")
bool(0)
bool("abc")
ord('r')
chr(100)
str(130)
globals()
help()
hex(15)
oct(27)
pow(4,2)
sum(1,2, 3)
id(4)
id("abc")
not False
none()
none(0)
# breaks here ... for now
b = 1
print("a", b); print();
a = 5
#def append_element(self, val): newest =__Node(val);newestprev = self__trailerprev;self__trailerprevnext = newest;self__trailerprev = newest;newestnext = self__trailer;self__size = self__size + 1;
| 14.613636 | 196 | 0.62986 |
e7fd1190b6509c18afc6e8dc44570e03220fb1f1 | 235 | py | Python | python/funciones2.py | Tai-Son/Python-Chile | fd3aa28304caa806ee334686adbb029e81514912 | [
"MIT"
] | null | null | null | python/funciones2.py | Tai-Son/Python-Chile | fd3aa28304caa806ee334686adbb029e81514912 | [
"MIT"
] | null | null | null | python/funciones2.py | Tai-Son/Python-Chile | fd3aa28304caa806ee334686adbb029e81514912 | [
"MIT"
] | null | null | null | # Practica de funciones
#! /usr/bin/python
# -*- coding: iso-8859-15
# Programa que usa la funcion f
n = int(input("Ingrese nmero: "))
for i in range(n):
y = f(i)
print (i,y)
| 13.823529 | 34 | 0.557447 |
e7fdb3f99099bfa047bbe790a686f91e9a3ed33c | 1,070 | py | Python | setup.py | br-g/pyroaman | 86d9a4771e4e0657c96e1c45dacbbde579e527d9 | [
"MIT"
] | 2 | 2021-06-16T01:54:36.000Z | 2021-11-08T13:00:39.000Z | setup.py | br-g/pyroaman | 86d9a4771e4e0657c96e1c45dacbbde579e527d9 | [
"MIT"
] | null | null | null | setup.py | br-g/pyroaman | 86d9a4771e4e0657c96e1c45dacbbde579e527d9 | [
"MIT"
] | 1 | 2021-04-24T17:02:26.000Z | 2021-04-24T17:02:26.000Z | from distutils.core import setup
from setuptools import find_packages
with open('README.md', 'r') as fh:
long_description = fh.read()
setup(
name='pyroaman',
version='0.1.1',
license='MIT',
description='Roam Research with Python',
author = 'Bruno Godefroy',
author_email='brgo@mail.com',
url = 'https://github.com/br-g/pyroaman',
download_url = 'https://github.com/br-g/pyroaman/archive/v0.1.1.tar.gz',
keywords = ['Roam Research'],
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(exclude=['tests']),
python_requires='>=3.6',
install_requires=[
'cached_property',
'dataclasses',
'loguru',
'tqdm',
'pathlib',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)
| 29.722222 | 76 | 0.627103 |
e7ff7ca7cdc4e23499b3182976ee2bee8f1569cf | 974 | py | Python | pgel_sat.py | AndrewIjano/pgel-sat | 25b6ef5922a9fa79bbcf9896cf9a5eefd9925e45 | [
"MIT"
] | null | null | null | pgel_sat.py | AndrewIjano/pgel-sat | 25b6ef5922a9fa79bbcf9896cf9a5eefd9925e45 | [
"MIT"
] | null | null | null | pgel_sat.py | AndrewIjano/pgel-sat | 25b6ef5922a9fa79bbcf9896cf9a5eefd9925e45 | [
"MIT"
] | null | null | null | import sys
from pgel_sat import ProbabilisticKnowledgeBase, solve
import argparse
if __name__ == '__main__':
main()
| 23.190476 | 79 | 0.637577 |
e7ffb07502a866daacad535d6c162c3df47ed0fa | 1,075 | py | Python | 001-050/029-divide-two-integers.py | bbram10/leetcode-master | 565f5f0cb3c9720e59a78ddf2e5e6e829c70bac6 | [
"MIT"
] | 134 | 2017-01-16T11:17:44.000Z | 2022-03-16T17:13:26.000Z | 001-050/029-divide-two-integers.py | bbram10/leetcode-master | 565f5f0cb3c9720e59a78ddf2e5e6e829c70bac6 | [
"MIT"
] | 1 | 2019-11-18T02:10:51.000Z | 2019-11-18T02:10:51.000Z | 001-050/029-divide-two-integers.py | bbram10/leetcode-master | 565f5f0cb3c9720e59a78ddf2e5e6e829c70bac6 | [
"MIT"
] | 54 | 2017-07-17T01:24:00.000Z | 2022-02-06T05:28:44.000Z | """
STATEMENT
Divide two integers without using multiplication, division and mod operator.
CLARIFICATIONS
- Do I have to handle 32-bit integer overflow? Yes, return the MAX_INT in that case.
- Can the divisor be zero? Yes, return the MAX_INT.
EXAMPLES
34/3 -> 11
COMMENTS
- This solution is by tusizi in Leetcode (picked up from https://discuss.leetcode.com/topic/8714/clear-python-code)
"""
def divide(dividend, divisor):
"""
:type dividend: int
:type divisor: int
:rtype: int
"""
sign = (dividend < 0) is (divisor < 0)
dividend, divisor = abs(dividend), abs(divisor)
INT_MIN, INT_MAX = -2147483648, 2147483647
if (not divisor) or (dividend < INT_MIN and divisor == -1):
return INT_MAX
to_return = 0
while dividend >= divisor:
temp, i = divisor, 1
while dividend >= temp:
dividend -= temp
to_return += i
i <<= 1
temp <<= 1
if not sign:
to_return = -to_return
return min(max(INT_MIN, to_return), INT_MAX)
| 27.564103 | 115 | 0.613953 |
f000c275681d6eb860ca8edd89619bd04e3efa9d | 508 | py | Python | conv/setup.py | hughpyle/GW-BASIC | f0c1ef3c9655b36cd312d18e4620bb076f03afd3 | [
"MIT"
] | 26 | 2020-05-23T18:09:05.000Z | 2022-01-30T10:07:04.000Z | conv/setup.py | hughpyle/GW-BASIC | f0c1ef3c9655b36cd312d18e4620bb076f03afd3 | [
"MIT"
] | 1 | 2020-06-25T06:20:01.000Z | 2020-06-25T06:20:01.000Z | conv/setup.py | hughpyle/GW-BASIC | f0c1ef3c9655b36cd312d18e4620bb076f03afd3 | [
"MIT"
] | 4 | 2020-05-23T12:36:44.000Z | 2022-01-16T00:20:20.000Z | from setuptools import setup, find_packages
"""
https://tia.mat.br/posts/2020/06/21/converting-gwbasic-to-z80.html
"""
setup(
name="z80conv",
version='0.0.1',
author="lp",
description="Porting GW-BASIC from 8086 back to the Z80",
license="GPLv2",
packages=find_packages(),
long_description="Porting GW-BASIC from 8086 back to the Z80",
install_requires=[],
tests_require=['pytest'],
entry_points = {
'console_scripts': ['z80conv=z80conv.conv:main'],
}
)
| 24.190476 | 66 | 0.661417 |
f000f73c7ff791dd3f202fae2e9cd2cdf7773f23 | 8,046 | py | Python | hera_cc_utils/catalog.py | pagano-michael/hera_cc_utils | 2d61f8ab0bb4d75b9a2e5891450256195851db08 | [
"MIT"
] | null | null | null | hera_cc_utils/catalog.py | pagano-michael/hera_cc_utils | 2d61f8ab0bb4d75b9a2e5891450256195851db08 | [
"MIT"
] | 6 | 2021-09-08T21:28:12.000Z | 2021-09-15T18:18:33.000Z | hera_cc_utils/catalog.py | pagano-michael/hera_cc_utils | 2d61f8ab0bb4d75b9a2e5891450256195851db08 | [
"MIT"
] | 1 | 2021-12-01T15:29:55.000Z | 2021-12-01T15:29:55.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2021 The HERA Collaboration
# Licensed under the MIT License
"""Utilities for dealing with galaxy/QSO catalogs."""
import numpy as np
import matplotlib.pyplot as plt
from astropy.coordinates import SkyCoord
from .util import deg_per_hr
_xshooter_ref = "https://ui.adsabs.harvard.edu/abs/2020ApJ...905...51S/abstract"
# VIKING
_viking_ref1 = "https://ui.adsabs.harvard.edu/abs/2013ApJ...779...24V/abstract"
_viking_ref2 = "https://ui.adsabs.harvard.edu/abs/2015MNRAS.453.2259V/abstract"
_viking = {
"J2348-3054": {
"ra": "23h48m33.34s",
"dec": "-30d54m10.0s",
"z": 6.886,
"ref": _viking_ref1,
},
"J0109-3047": {
"ra": "01h09m53.13s",
"dec": "-30d47m26.3s",
"z": 6.745,
"ref": _viking_ref1,
},
"J0305-3150": {
"ra": "03h05m16.92s",
"dec": "-31d50m56.0s",
"z": 6.604,
"ref": _viking_ref1,
},
"J0328-3253": {
"ra": "03h28m35.511s",
"dec": "-32d53m22.92s",
"z": 5.860,
"ref": _viking_ref2,
},
"J0046-2837": {
"ra": "00h46m23.645s",
"dec": "-28d37m47.34s",
"z": 5.9926,
"ref": _xshooter_ref,
},
"J2211-3206": {
"ra": "22h11m12.391s",
"dec": "-32d06m12.95s",
"z": 6.3394,
"ref": _xshooter_ref,
},
"J2318-3029": {
"ra": "23h18m33.103s",
"dec": "-30d29m33.36s",
"z": 6.1456,
"ref": _xshooter_ref,
},
"J2348-3054_xshooter": {
"ra": "23h48m33.336s",
"dec": "-30d54m10.24s",
"z": 6.9007,
"ref": _xshooter_ref,
},
}
# Pan-STARRS1
_ps1_ref1 = "https://ui.adsabs.harvard.edu/abs/2014AJ....148...14B/abstract"
_ps1_ref2 = "https://ui.adsabs.harvard.edu/abs/2017ApJ...849...91M/abstract"
_ps1 = {
"PSO 231-20": {"ra": "231.6576", "dec": "-20.8335", "z": 6.5864, "ref": _ps1_ref2},
"PSO J037.9706-28.8389": {
"ra": "02h31m52.96s",
"dec": "-28d50m20.1s",
"z": 5.99,
"ref": _ps1_ref1,
},
"PSO J065.4085-26.9543": {
"ra": "04h21m38.049s",
"dec": "-26d57m15.61s",
"z": 6.1871,
"ref": _xshooter_ref,
},
}
# Banados+ 2016 https://ui.adsabs.harvard.edu/abs/2016ApJS..227...11B/abstract
# has table of all z > 5.6 quasars known at that point (March 2016).
# https://ned.ipac.caltech.edu/inrefcode?search_type=Search&refcode=2016ApJS..227...11B
# VLT ATLAS
# https://ui.adsabs.harvard.edu/abs/2015MNRAS.451L..16C/abstract
_atlas_ref1 = "https://ui.adsabs.harvard.edu/abs/2015MNRAS.451L..16C/abstract"
_atlas_ref2 = "https://ui.adsabs.harvard.edu/abs/2018MNRAS.478.1649C/abstract"
_atlas = {
"J025.6821-33.4627": {
"ra": "025.6821",
"dec": "-33.4627",
"z": 6.31,
"ref": _atlas_ref1,
},
"J332.8017-32.1036": {
"ra": "332.8017",
"dec": "-32.1036",
"z": 6.32,
"ref": _atlas_ref2,
},
}
# VHS-DES
_ps1_vhs_des = "https://ui.adsabs.harvard.edu/abs/2019MNRAS.487.1874R/abstract"
_des = {
"VDES J0020-3653": {
"ra": "00h20m31.47s",
"dec": "-36d53m41.8s",
"z": 6.5864,
"ref": _ps1_vhs_des,
},
}
_yang = "https://ui.adsabs.harvard.edu/abs/2020ApJ...904...26Y/abstract"
_decarli = "https://ui.adsabs.harvard.edu/abs/2018ApJ...854...97D/abstract"
_other = {
"J01423327": {"ra": "0142", "dec": "-3327", "z": 6.3379, "ref": _yang},
"J01482826": {"ra": "0148", "dec": "-2826", "z": 6.54, "ref": _yang},
"J20023013": {"ra": "2002", "dec": "-3013", "z": 6.67, "ref": _yang},
"J23183113": {
"ra": "23h18m18.351s",
"dec": "-31d13m46.35s",
"z": 6.444,
"ref": _decarli,
},
}
_qso_catalogs = {"viking": _viking, "panstarrs": _ps1, "atlas": _atlas, "other": _other}
| 28.83871 | 88 | 0.527716 |
f002326f1a28c7e060443caad098a4b8ad312c0c | 216 | py | Python | src/myutils/__init__.py | yyHaker/TextClassification | dc3c5ffe0731609c8f0c7a18a4daa5f149f83e9f | [
"MIT"
] | 3 | 2019-06-08T14:11:56.000Z | 2020-05-26T15:08:23.000Z | src/myutils/__init__.py | yyHaker/TextClassification | dc3c5ffe0731609c8f0c7a18a4daa5f149f83e9f | [
"MIT"
] | null | null | null | src/myutils/__init__.py | yyHaker/TextClassification | dc3c5ffe0731609c8f0c7a18a4daa5f149f83e9f | [
"MIT"
] | null | null | null | #!/usr/bin/python
# coding:utf-8
"""
@author: yyhaker
@contact: 572176750@qq.com
@file: __init__.py
@time: 2019/3/9 15:41
"""
from .util import *
from .functions import *
from .nn import *
from .attention import *
| 14.4 | 26 | 0.685185 |
f0023ff5d4658332709a6d9a26c8392cbad88994 | 1,236 | py | Python | configs/config.py | AcordUch/open-heartmagic | aa76b098cc19b2ac6d1bc149461c421fcbbd3301 | [
"MIT"
] | null | null | null | configs/config.py | AcordUch/open-heartmagic | aa76b098cc19b2ac6d1bc149461c421fcbbd3301 | [
"MIT"
] | null | null | null | configs/config.py | AcordUch/open-heartmagic | aa76b098cc19b2ac6d1bc149461c421fcbbd3301 | [
"MIT"
] | null | null | null | from configparser import ConfigParser
from os import path
_config: ConfigParser = ConfigParser()
_path: str = path.join(path.dirname(__file__), "config.ini")
if not path.exists(_path):
create_config()
print(" configs.ini , api ")
exit()
_config.read(_path)
API_ID = _config['Telegram']['api_id']
API_HASH = _config['Telegram']['api_hash']
USERNAME: str = _config['Telegram']['username']
SESSION_STRING = (None
if _config['Telegram']['session_string'] == "None" or
_config['Telegram']['session_string'] == ""
else _config['Telegram']['session_string'])
| 32.526316 | 71 | 0.673139 |
f0057eec2984c2e77cf59e2e17b878ea511d289d | 609 | py | Python | Python/squirrel-simulation.py | xiaohalo/LeetCode | 68211ba081934b21bb1968046b7e3c1459b3da2d | [
"MIT"
] | 9 | 2019-06-30T07:15:18.000Z | 2022-02-10T20:13:40.000Z | Python/squirrel-simulation.py | pnandini/LeetCode | e746c3298be96dec8e160da9378940568ef631b1 | [
"MIT"
] | 1 | 2018-07-10T03:28:43.000Z | 2018-07-10T03:28:43.000Z | Python/squirrel-simulation.py | pnandini/LeetCode | e746c3298be96dec8e160da9378940568ef631b1 | [
"MIT"
] | 9 | 2019-01-16T22:16:49.000Z | 2022-02-06T17:33:41.000Z | # Time: O(n)
# Space: O(1)
| 26.478261 | 69 | 0.489327 |
f0068035c6bebf4ad8dfcbde5996ed5461d03f51 | 345 | py | Python | scripts/utils/merge.py | GabrielTavernini/TelegramMap | 96879d037a3e65b555a8f13f4f468645a02cf1f2 | [
"MIT"
] | 3 | 2021-02-19T21:43:49.000Z | 2022-03-30T07:50:06.000Z | scripts/utils/merge.py | GabrielTavernini/TelegramMap | 96879d037a3e65b555a8f13f4f468645a02cf1f2 | [
"MIT"
] | null | null | null | scripts/utils/merge.py | GabrielTavernini/TelegramMap | 96879d037a3e65b555a8f13f4f468645a02cf1f2 | [
"MIT"
] | 2 | 2021-02-20T16:50:48.000Z | 2022-01-25T15:15:07.000Z | import pandas as pd
import sys
from dotenv import load_dotenv
load_dotenv()
src = pd.read_csv(sys.argv[1])
dst = pd.read_csv(os.getenv('FILE_PATH'))
fdf = pd.concat([dst, src])
fdf = fdf[~((fdf['user'].duplicated(keep='first')) & (fdf['user']!='Point'))]
fdf = fdf[~fdf.duplicated(keep='first')]
fdf.to_csv(os.getenv('FILE_PATH'), index=False) | 28.75 | 77 | 0.692754 |
f00829ce69ca21d2a75d867579f5065b5c43824d | 395 | py | Python | lib/locator/location_test.py | alt-locator/address-locator-python | 9f052dc7721223bde926723648790a17b06e9d7a | [
"MIT"
] | null | null | null | lib/locator/location_test.py | alt-locator/address-locator-python | 9f052dc7721223bde926723648790a17b06e9d7a | [
"MIT"
] | null | null | null | lib/locator/location_test.py | alt-locator/address-locator-python | 9f052dc7721223bde926723648790a17b06e9d7a | [
"MIT"
] | null | null | null | import location
import unittest
| 30.384615 | 78 | 0.698734 |
f0089faf3980c65d96a9b87de2dfb4cc044e17a8 | 41,489 | py | Python | ProjectiveClusteringCoreset.py | muradtuk/ProjectiveClusteringCoresets | 2dcb59723934dc545da9ff84a1f71eb5e02b49d1 | [
"MIT"
] | null | null | null | ProjectiveClusteringCoreset.py | muradtuk/ProjectiveClusteringCoresets | 2dcb59723934dc545da9ff84a1f71eb5e02b49d1 | [
"MIT"
] | null | null | null | ProjectiveClusteringCoreset.py | muradtuk/ProjectiveClusteringCoresets | 2dcb59723934dc545da9ff84a1f71eb5e02b49d1 | [
"MIT"
] | null | null | null | """*****************************************************************************************
MIT License
Copyright (c) 2022 Murad Tukan, Xuan Wu, Samson Zhou, Vladimir Braverman, Dan Feldman
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*****************************************************************************************"""
import Utils
from helper_functions import Fast_Caratheodory
import numpy as np
from scipy.optimize import linprog
from numpy import linalg as la
from scipy.linalg import null_space
from numpy.linalg import matrix_rank
from sklearn.decomposition import TruncatedSVD
import time
######################################################## Caratheodory ##################################################
def computeInitialWeightVector(P, p):
"""
This function given a point, solves the linear program dot(self.P.P^T, x) = p where x \in [0, \infty)^n,
and n denotes the number of rows of self.P.P.
:param p: A numpy array representing a point.
:return: A numpy array of n non-negative weights with respect to each row of self.P.P
"""
N = P.shape[0] # number of rows of P
# # Solve the linear program using scipy
# ts = time.time()
Q = P.T
Q = np.vstack((Q, np.ones((1, N))))
b = np.hstack((p, 1))
res = linprog(np.ones((N,)), A_eq=Q, b_eq=b, options={'maxiter': int(1e7), 'tol': 1e-10})
w = res.x
assert (np.linalg.norm(np.dot(P.T, w) - p) <= 1e-9, np.linalg.norm(np.dot(P.T, w) - p))
return w
def attainCaratheodorySet(P, p):
"""
The function at hand returns a set of at most d+1 indices of rows of P where d denotes the dimension of
rows of P. It calls the algorithms implemented by Alaa Maalouf, Ibrahim Jubran and Dan Feldman at
"Fast and Accurate Least-Mean-Squares Solvers".
:param p: A numpy array denoting a point.
:return: The indices of points from self.P.P which p is a convex combination of.
"""
d = P.shape[1]
u = computeInitialWeightVector(P, p) # compute initial weight vector
# print('Sum of weights {}'.format(np.sum(u)))
if np.count_nonzero(u) > (d + 1): # if the number of positive weights exceeds d+1
u = Fast_Caratheodory(P, u.flatten(), False)
assert(np.linalg.norm(p - np.dot(P.T, u)) <= 1e-9, np.linalg.norm(p - np.dot(P.T, u)))
return np.where(u != 0)[0]
############################################################ AMVEE #####################################################
def isPD(B):
"""Returns true when input is positive-definite, via Cholesky"""
try:
_ = la.cholesky(B)
return True
except la.LinAlgError:
return False
def nearestPD(A):
"""Find the nearest positive-definite matrix to input
A Python/Numpy port of John D'Errico's `nearestSPD` MATLAB code [1], which
credits [2].
[1] https://www.mathworks.com/matlabcentral/fileexchange/42885-nearestspd
[2] N.J. Higham, "Computing a nearest symmetric positive semidefinite
matrix" (1988): https://doi.org/10.1016/0024-3795(88)90223-6
"""
B = (A + A.T) / 2
_, s, V = la.svd(B)
H = np.dot(V.T, np.dot(np.diag(s), V))
A2 = (B + H) / 2
A3 = (A2 + A2.T) / 2
if isPD(A3):
return A3
spacing = np.spacing(la.norm(A))
# The above is different from [1]. It appears that MATLAB's `chol` Cholesky
# decomposition will accept matrixes with exactly 0-eigenvalue, whereas
# Numpy's will not. So where [1] uses `eps(mineig)` (where `eps` is Matlab
# for `np.spacing`), we use the above definition. CAVEAT: our `spacing`
# will be much larger than [1]'s `eps(mineig)`, since `mineig` is usually on
# the order of 1e-16, and `eps(1e-16)` is on the order of 1e-34, whereas
# `spacing` will, for Gaussian random matrixes of small dimension, be on
# othe order of 1e-16. In practice, both ways converge, as the unit test
# below suggests.
I = np.eye(A.shape[0])
k = 1
while not isPD(A3):
mineig = np.min(np.real(la.eigvals(A3)))
A3 += I * (-mineig * k ** 2 + spacing)
k += 1
return A3
def computeAxesPoints(E, C):
"""
This function finds the vertices of the self.E (the MVEE of P or the inscribed version of it)
:return: A numpy matrix containing the vertices of the ellipsoid.
"""
if not isPD(E):
E = nearestPD(E)
# L = np.linalg.cholesky(self.E) # compute the cholesky decomposition of self.E
# U, D, V = np.linalg.svd(L, full_matrices=True) # attain the length of each axis of the ellipsoid and the
# # rotation of the ellipsoid
_, D, V = np.linalg.svd(E, full_matrices=True)
ellips_points = np.multiply(1.0 / np.sqrt(D[:, np.newaxis]), V.T) # attain the vertices of the ellipsoid assuming it was
# centered at the origin
return np.vstack((ellips_points + C.flatten(), - ellips_points + C.flatten()))
def volumeApproximation(P):
"""
This is our implementation of Algorithm 4.1 at the paper "On Khachiyans Algorithm for te Computation of Minimum
Volume Enclosing Ellipsoids" by Michael J. Todd and E. Alper Yldrm. It serves to compute a set of at most
2*self.P.d points which will be used for computing an initial ellipsoid.
:return: A numpy array of 2 * self.P.d indices of points from self.P.P
"""
basis = None
basis_points = []
n, d = P
if n <= 2 * d:
# if number of points is less than 2*self.P.d, then return their indices in self.P.P
return [i for i in range(n)]
v = np.random.randn(d) # start with a random vector
while np.linalg.matrix_rank(basis) < d: # while rank of basis is less than self.P.d
if basis is not None: # if we already have computed basis points
if basis.shape[1] == d:
# if this line is reached then it means that there is numerical instability
print('Numerical Issues!')
_, _, V = np.linalg.svd(basis[:, :-1], full_matrices=True)
return list(range(n))
orth_basis = null_space(basis.T) # get the orthant of basis
v = orth_basis[:, 0] if orth_basis.ndim > 1 else orth_basis # set v to be the first column of basis
Q = np.dot(P, v.T) # get the dot product of each row of self.P.P and v
if len(basis_points) > 0: # if there are already chosen points, then their dot product is depricated
Q[basis_points] = np.nan
p_alpha = np.nanargmax(np.dot(P, v.T)) # get the index of row with largest non nan dot product value
p_beta = np.nanargmin(np.dot(P, v.T)) # get the index of row with smallest non nan dot product value
v = np.expand_dims(P[p_beta, :] - P[p_alpha, :], 1) # let v be the substraction between the
# row of the largest dot product and the
# point with the smallest dot product
if basis is None: # if no basis was computed
basis = v / np.linalg.norm(v)
else: # add v to the basis
basis = np.hstack((basis, v / np.linalg.norm(v, 2)))
basis_points.append(p_alpha) # add the index of the point with largest dot product
basis_points.append(p_beta) # add the index of the point with smallest dot product
return basis_points
def computemahalanobisDistance(Q, ellip):
"""
This function is used for computing the distance between the rows of Q and ellip using the Mahalanobis
loss function.
:param ellip: A numpy array representing a p.s.d matrix (an ellipsoid)
:return: The Mahalanobis distance between each row in self.P.P to ellip.
"""
s = np.einsum("ij,ij->i", np.dot(Q, ellip), Q) # compute the distance efficiently
return s
def computeEllipsoid(P, weights):
"""
This function computes the ellipsoid which is the MVEE of self.P.
:param weights: a numpy of array of weights with respest to the rows of self.P.P.
:return:
- The MVEE of self.P.P in a p.s.d. matrix form.
- The center of the MVEE of self.P.P.
"""
if weights.ndim == 1: # make sure that the weights are not flattened
weights = np.expand_dims(weights, 1)
c = np.dot(P.T, weights) # attain the center of the MVEE
d = P.shape[1]
Q = P[np.where(weights.flatten() > 0.0)[0], :] # get all the points with positive weights
weights2 = weights[np.where(weights.flatten() > 0.0)[0], :] # get all the positive weights
# compute a p.s.d matrix which will represent the ellipsoid
ellipsoid = 1.0 / d * np.linalg.inv(np.dot(np.multiply(Q, weights2).T, Q)
- np.multiply.outer(c.T.ravel(), c.T.ravel()))
return ellipsoid, c
def enlargeByTol(ellipsoid):
"""
The function at hand enlarges the MVEE (the ellipsoid) by a fact or (1 + Utils.TOL).
:param ellipsoid: A numpy matrix represent a p.s.d matrix
:return: An enlarged version of ellipsoid.
"""
return ellipsoid / (1 + Utils.TOL) ** 2.0
def getContainedEllipsoid(ellipsoid):
"""
This function returns a dialtion of E such that it will be contained in the convex hull of self.P.P.
:param ellipsoid: A p.s.d matrix which represents the MVEE of self.P.P
:return: A dilated version of the MVEE of self.P.P such that it will be contained in the convex hull
of self.P.P.
"""
return ellipsoid * ellipsoid.shape[1] ** 2 * (1 + Utils.TOL) ** 2 # get inscribed ellipsoid
def computeEllipInHigherDimension(Q, weights):
"""
The function at hand computes the ellipsoid in a self.P.d + 1 dimensional space (with respect to the
lifted points) which is centered at the origin.
:param weights: A numpy array of weights with respect to each lifter point in self.Q
:return:
"""
idxs = np.where(weights > 0.0)[0] # get all indices of points with positive weights
weighted_Q = np.multiply(Q[idxs, :], np.expand_dims(np.sqrt(weights[idxs]), 1)) # multiply the postive
# weights with their
# corresponding points
delta = np.sum(np.einsum('bi,bo->bio', weighted_Q, weighted_Q), axis=0) # compute an ellipsoid which is
# centered at the origin
return delta
def optimalityCondition(d, Q, ellip, weights):
"""
This function checks if the MVEE of P is found in the context of Michael J. Todd and E. Alper Yldrm
algorithm.
:param ellip: A numpy array representing a p.s.d matrix.
:param weights: A numpy array of weights with respect to the rows of P.
:return: A boolean value whether the desired MVEE has been achieved or not.
"""
pos_weights_idxs = np.where(weights > 0)[0] # get the indices of all the points with positive weights
current_dists = computemahalanobisDistance(Q, ellip) # compute the Mahalanobis distance between ellip and
# the rows of P
# check if all the distance are at max (1 + self.tol) * (self.P.d +1) and the distances of the points
# with positive weights are at least (1.0 - self.tol) * (self.P.d + 1)
return np.all(current_dists <= (1.0 + Utils.TOL) * (d + 1)) and \
np.all(current_dists[pos_weights_idxs] >= (1.0 - Utils.TOL) * (d + 1)), current_dists
def yilidrimAlgorithm(P):
"""
This is our implementation of Algorithm 4.2 at the paper "On Khachiyans Algorithm for te Computation of Minimum
Volume Enclosing Ellipsoids" by Michael J. Todd and E. Alper Yldrm. It serves to compute an MVEE of self.P.P
faster than Khachiyan's algorithm.
:return: The MVEE ellipsoid of self.P.P.
"""
n, d = P.shape
Q = np.hstack((P, np.ones((n, 1))))
chosen_indices = volumeApproximation(P) # compute an initial set of points which will give the initial
# ellipsoid
if len(chosen_indices) == n: # if all the points were chosen then simply run Khachiyan's algorithm.
# Might occur due to numerical instabilities.
return khachiyanAlgorithm(P)
weights = np.zeros((n, 1)).flatten() # initial the weights to zeros
weights[chosen_indices] = 1.0 / len(chosen_indices) # all the chosen indices of points by the
# volume Approximation algorithm are given uniform weights
ellip = np.linalg.inv(computeEllipInHigherDimension(Q, weights)) # compute the initial ellipsoid
while True: # run till conditions are fulfilled
stop_flag, distances = optimalityCondition(d, Q, ellip, weights) # check if current ellipsoid is desired
# MVEE, and get the distance between rows
# of self.P.P to current ellipsoid
pos_weights_idx = np.where(weights > 0)[0] # get indices of points with positive weights
if stop_flag: # if desired MVEE is achieved
break
j_plus = np.argmax(distances) # index of maximal distance from the ellipsoid
k_plus = distances[j_plus] # maximal distance from the ellipsoid
j_minus = pos_weights_idx[np.argmin(distances[pos_weights_idx])] # get the the index of the points with
# positive weights which also have the
# smallest distance from the current
# ellipsoid
k_minus = distances[j_minus] # the smallest distance of the point among the points with positive weights
eps_plus = k_plus / (d + 1.0) - 1.0
eps_minus = 1.0 - k_minus / (d + 1.0)
if eps_plus > eps_minus: # new point is found and it is important
beta_current = (k_plus - d - 1.0) / ((d + 1) * (k_plus - 1.0))
weights = (1.0 - beta_current) * weights
weights[j_plus] = weights[j_plus] + beta_current
else: # a point which was already found before, yet has large impact on the ellipsoid
beta_current = min((d + 1.0 - k_minus) / ((d + 1.0) * (k_minus - 1.0)),
weights[j_minus]/(1 - weights[j_minus]))
weights = weights * (1 + beta_current)
weights[j_minus] = weights[j_minus] - beta_current
weights[weights < 0.0] = 0.0 # all negative weights are set to zero
ellip = np.linalg.inv(computeEllipInHigherDimension(weights)) # recompute the ellipsoid
return computeEllipsoid(P, weights)
def khachiyanAlgorithm(P):
"""
This is our implementation of Algorithm 3.1 at the paper "On Khachiyans Algorithm for te Computation of Minimum
Volume Enclosing Ellipsoids" by Michael J. Todd and E. Alper Yldrm. It serves to compute an MVEE of self.P.P
using Khachiyan's algorithm.
:return: The MVEE ellipsoid of self.P.P.
"""
err = 1
count = 1 # used for debugging purposes
n, d = P.shape
u = np.ones((n, 1)) / n # all points have uniform weights
Q = np.hstack((P, np.ones((n, 1))))
while err > Utils.TOL: # while the approximation of the ellipsoid is higher than desired
X = np.dot(np.multiply(Q, u).T, Q) # compute ellipsoid
M = computemahalanobisDistance(Q, np.linalg.inv(X)) # get Mahalanobis distances between rows of self.P.P
# and current ellipsoid
j = np.argmax(M) # index of point with maximal distance from current ellipsoid
max_val = M[j] # the maximal Mahalanobis distance from the rows of self.P.P and the current ellipsoid
step_size = (max_val - d - 1) / ((d + 1) * (max_val - 1))
new_u = (1 - step_size) * u # update weights
new_u[j, 0] += step_size
count += 1
err = np.linalg.norm(new_u - u) # set err to be the change between updated weighted and current weights
u = new_u
return computeEllipsoid(P, u)
def computeMVEE(P, alg_type=1):
"""
This function is responsible for running the desired algorithm chosen by the user (or by default value) for
computing the MVEE of P.
:param alg_type: An algorithm type indicator where 1 stands for yilidrim and 0 stands kachaiyan.
:return:
- The inscribed version of MVEE of P.
- The center of the MVEE of P.
- The vertices of the inscribed ellipsoid.
"""
global ax
if alg_type == 1: # yilidrim is chosen or by default
E, C = yilidrimAlgorithm(P)
else: # Kachaiyan, slower yet more numerically stable
E, C = khachiyanAlgorithm(P)
# self.plotEllipsoid(self.E, self.C, self.computeAxesPoints())
contained_ellipsoid = getContainedEllipsoid(E) # get inscribed ellipsoid
return contained_ellipsoid, C, computeAxesPoints(contained_ellipsoid, C)
################################################## ApproximateCenterProblems ###########################################
def computeLINFCoresetKOne(P):
"""
The function at hand computes an L coreset for the matrix vector multiplication or the dot product, with
respect to the weighted set of points P.
:return:
- C: the coreset points, which are a subset of the rows of P
- idx_in_P: the indices with respect to the coreset points C in P.
- an upper bound on the approximation which our L coreset is associated with.
"""
global max_time
r = matrix_rank(P[:, :-1]) # get the rank of P or the dimension of the span of P
d = P.shape[1]
if r < d - 1: # if the span of P is a subspace in REAL^d
svd = TruncatedSVD(n_components=r) # an instance of TruncatedSVD
Q = svd.fit_transform(P[:, :-1]) # reduce the dimensionality of P by taking their dot product by the
# subspace which spans P
Q = np.hstack((Q, np.expand_dims(P[:, -1], 1))) # concatenate the indices to their respected "projected"
# points
else: # if the span of P is REAL^d where d is the dimension of P
Q = P
start_time = time.time() # start counting the time here
if r > 1: # if the dimension of the "projected points" is not on a line
if Q.shape[1] - 1 >= Q.shape[0]:
return Q, np.arange(Q.shape[0]).astype(np.int), Utils.UPPER_BOUND(r)
else:
_, _, S = computeMVEE(Q[:, :-1], alg_type=0) # compute the MVEE of Q
else: # otherwise
# get the index of the maximal and minimal point on the line, i.e., both its ends
idx_in_P = np.unique([np.argmin(Q[:, :-1]).astype(np.int),
np.argmax(Q[:, :-1]).astype(np.int)]).tolist()
return Q[idx_in_P], idx_in_P, Utils.UPPER_BOUND(r)
C = []
# idx_in_P_list = []
# C_list = []
# ts = time.time()
# for q in S: # for each boundary points along the axis of the MVEE of Q
# K = attainCaratheodorySet(P[:, :-1], q) # get d+1 indices of points from Q where q is their convex
# # combination
# idx_in_P_list += [int(idx) for idx in K] # get the indices of the coreset point in Q
# C_list += [int(Q[idx, -1]) for idx in K] # the actual coreset points
# # print('Time for list {}'.format(time.time() - ts))
idx_in_P = np.empty((2*(Utils.J + 1) ** 2, )).astype(np.int)
C = np.empty((2*(Utils.J + 1) ** 2, )).astype(np.int)
idx = 0
# ts = time.time()
for q in S: # for each boundary points along the axis of the MVEE of Q
K = attainCaratheodorySet(Q[:, :-1], q) # get d+1 indices of points from Q where q is their convex
# combination
idx_in_P[idx:idx+K.shape[0]] = K.astype(np.int) # get the indices of the coreset point in Q
C[idx:idx+K.shape[0]] = Q[idx_in_P[idx:idx+K.shape[0]], -1].astype(np.int)
idx += K.shape[0]
# print('Time for numpy {}'.format(time.time() - ts))
return np.unique(C[:idx]), np.unique(idx_in_P[:idx]), Utils.UPPER_BOUND(r)
####################################################### Bicriteria #####################################################
def attainClosestPointsToSubspaces(P, W, flats, indices):
"""
This function returns the closest n/2 points among all of the n points to a list of flats.
:param flats: A list of flats where each flat is represented by an orthogonal matrix and a translation vector.
:param indices: A list of indices of points in self.P.P
:return: The function returns the closest n/2 points to flats.
"""
dists = np.empty((P[indices, :].shape[0], ))
N = indices.shape[0]
if not Utils.ACCELERATE_BICRETERIA:
for i in range(N):
dists[i] = np.min([
Utils.computeDistanceToSubspace(P[np.array([indices[i]]), :], flats[j][0], flats[j][1])
for j in range(len(flats))])
else:
dists = Utils.computeDistanceToSubspace(P[indices, :], flats[0], flats[1])
idxs = np.argpartition(dists, N // 2)[:N//2]
return idxs.tolist()
return np.array(indices)[np.argsort(dists).astype(np.int)[:int(N / 2)]].tolist()
def sortDistancesToSubspace(P, X, v, points_indices):
"""
The function at hand sorts the distances in an ascending order between the points and the flat denoted by (X,v).
:param X: An orthogonal matrix which it's span is a subspace.
:param v: An numpy array denoting a translation vector.
:param points_indices: a numpy array of indices for computing the distance to a subset of the points.
:return: sorted distances between the subset points addressed by points_indices and the flat (X,v).
"""
dists = Utils.computeDistanceToSubspace(P[points_indices, :], X, v) # compute the distance between the subset
# of points towards
# the flat which is represented by (X,v)
return np.array(points_indices)[np.argsort(dists).astype(np.int)].tolist() # return sorted distances
def computeSubOptimalFlat(P, weights):
"""
This function computes the sub optimal flat with respect to l2^2 loss function, which relied on computing the
SVD factorization of the set of the given points, namely P.
:param P: A numpy matrix which denotes the set of points.
:param weights: A numpy array of weightes with respect to each row (point) in P.
:return: A flat which best fits P with respect to the l2^2 loss function.
"""
v = np.average(P, axis=0, weights=weights) # compute the weighted mean of the points
svd = TruncatedSVD(algorithm='randomized', n_iter=1, n_components=Utils.J).fit(P-v)
V = svd.components_
return V, v # return a flat denoted by an orthogonal matrix and a translation vector
def clusterIdxsBasedOnKSubspaces(P, B):
"""
This functions partitions the points into clusters a list of flats.
:param B: A list of flats
:return: A numpy array such each entry contains the index of the flat to which the point which is related to the
entry is assigned to.
"""
n = P.shape[0]
idxs = np.arange(n) # a numpy array of indices
centers = np.array(B) # a numpy array of the flats
dists = np.apply_along_axis(lambda x: Utils.computeDistanceToSubspace(P[idxs, :], x[0], x[1]), 1, centers) # compute the
# distance between
# each point and
# each flat
idxs = np.argmin(dists, axis=0)
return idxs # return the index of the closest flat to each point in self.P.P
def addFlats(P, W, S, B):
"""
This function is responsible for computing a set of all possible flats which passes through j+1 points.
:param S: list of j+1 subsets of points.
:return: None (Add all the aforementioned flats into B).
"""
indices = [np.arange(S[i].shape[0]) for i in range(len(S))]
points = np.meshgrid(*indices) # compute a mesh grid using the duplicated coefs
points = np.array([p.flatten() for p in points]) # flatten each point in the meshgrid for computing the
# all possible ordered sets of j+1 points
idx = len(B)
for i in range(points.shape[1]):
A = [S[j][points[j, i]][0] for j in range(points.shape[0])]
P_sub, W_sub = P[A, :], W[A]
B.append(computeSubOptimalFlat(P_sub, W_sub))
return np.arange(idx, len(B)), B
def computeBicriteria(P, W):
"""
The function at hand is an implemetation of Algorithm Approx-k-j-Flats(P, k, j) at the paper
"Bi-criteria Linear-time Approximations for Generalized k-Mean/Median/Center". The algorithm returns an
(2^j, O(log(n) * (jk)^O(j))-approximation algorithm for the (k,j)-projective clustering problem using the l2^2
loss function.
:return: A (2^j, O(log(n) * (jk)^O(j)) approximation solution towards the optimal solution.
"""
n = P.shape[0]
Q = np.arange(0, n, 1)
t = 1
B = []
tol_sample_size = Utils.K * (Utils.J + 1)
sample_size = (lambda t: int(np.ceil(Utils.K * (Utils.J + 1) * (2 + np.log(Utils.J + 1) +
np.log(Utils.K) +
min(t, np.log(np.log(n)))))))
while np.size(Q) >= tol_sample_size: # run we have small set of points
S = []
for i in range(0, Utils.J+1): # Sample j + 1 subsets of the points in an i.i.d. fashion
random_sample = np.random.choice(Q, size=sample_size(t))
S.append(random_sample[:, np.newaxis])
if not Utils.ACCELERATE_BICRETERIA:
F = addFlats(P, W, S, B)
else:
S = np.unique(np.vstack(S).flatten())
F = computeSubOptimalFlat(P[S, :], W[S])
B.append(F)
sorted_indices = attainClosestPointsToSubspaces(P, W, F, Q)
Q = np.delete(Q, sorted_indices)
t += 1
if not Utils.ACCELERATE_BICRETERIA:
_, B = addFlats(P, W, [Q for i in range(Utils.J + 1)], B)
else:
F = computeSubOptimalFlat(P[Q.flatten(), :], W[Q.flatten()])
B.append(F)
return B
################################################### L1Coreset ##########################################################
def applyBiCriterea(P, W):
"""
The function at hand runs a bicriteria algorithm, which then partition the rows of P into clusters.
:return:
- B: The set of flats which give the bicriteria algorithm, i.e., O((jk)^{j+1}) j-flats which attain 2^j
approximation towards the optimal (k,j)-projective clustering problem involving self.P.P.
- idxs: The set of indices where each entry is with respect to a point in P and contains
index of the flat in B which is assigned to respected point in P.
"""
B = computeBicriteria(P,W) # compute the set of flats which bi-cirteria algorithm returns
idxs = clusterIdxsBasedOnKSubspaces(P, B) # compute for each point which flat fits it best
return B, idxs
def initializeSens(P, B, idxs):
"""
This function initializes the sensitivities using the bicriteria algorithm, to be the distance between each
point to it's closest flat from the set of flats B divided by the sum of distances between self.P.P and B.
:param B: A set of flats where each flat is represented by an orthogonal matrix and a translation vector.
:param idxs: A numpy array which represents the clustering which B imposes on self.P.P
:return: None.
"""
centers_idxs = np.unique(idxs) # number of clusters imposed by B
sensitivity_additive_term = np.zeros((P.shape[0], ))
for center_idx in centers_idxs: # go over each cluster of points from self.P.P
cluster_per_center = np.where(idxs == center_idx)[0] # get all points in certain cluster
# compute the distance of each point in the cluster to its respect flat
cost_per_point_in_cluster = Utils.computeDistanceToSubspace(P[cluster_per_center, :-1],
B[center_idx][0], B[center_idx][1])
# ost_per_point_in_cluster = np.apply_along_axis(lambda x:
# Utils.computeDistanceToSubspace(x, B[center_idx][0],
# B[center_idx][1]), 1,
# self.set_P.P[cluster_per_center, :-1])
# set the sensitivity to the distance of each point from its respected flat divided by the total distance
# between cluster points and the respected flat
sensitivity_additive_term[cluster_per_center] = 2 ** Utils.J * \
np.nan_to_num(cost_per_point_in_cluster /
np.sum(cost_per_point_in_cluster))
return sensitivity_additive_term
def Level(P, k, V, desired_eps=0.01):
"""
The algorithm is an implementation of Algorithm 7 of "Coresets for Gaussian Mixture Models of Any shapes" by Zahi
Kfir and Dan Feldman.
:param P: A Pointset object, i.e., a weighted set of points.
:param k: The number of $j$-subspaces which defines the (k,j)-projective clustering problem.
:param V: A set of numpy arrays
:param desired_eps: An approximation error, default value is set to 0.01.
:return: A list "C" of subset of points of P.P.
"""
t = V.shape[0] # numnber of points in V
d = P.shape[1] - 1 # exclude last entry of each point for it is the concatenated index
# C = [[]] #np.empty((P.shape[0] + Utils.J ** (2 * Utils.K), P.shape[1])) # initialize list of coresets
# U = [[]] #np.empty((P.shape[0] + Utils.J ** (2 * Utils.K), P.shape[1])) # list of each point in V \setminus V_0 minus its
# projection onto a specific affine subspace, see below
C = np.zeros((P.shape[0], ), dtype="bool")
D = np.zeros((P.shape[0], ), dtype="bool")
if k <= 1 or t-1 >= Utils.J:
return np.array([])
# ts = time.time()
A, v = Utils.computeAffineSpan(V)
# print('Affine took {}'.format(time.time() - ts))
dists_from_P_to_A = Utils.computeDistanceToSubspace(P[:, :-1], A.T, v)
non_zero_idxs = np.where(dists_from_P_to_A > 1e-11)[0]
d_0 = 0 if len(non_zero_idxs) < 1 else np.min(dists_from_P_to_A[non_zero_idxs])
c = 1 / d ** (1.5 * (d + 1))
M = np.max(np.abs(P[:, :-1]))
on_j_subspace = np.where(dists_from_P_to_A <= 1e-11)[0]
B = [[]]
if on_j_subspace.size != 0:
B[0] = P[on_j_subspace, :]
if B[0].shape[0] >= Utils.J ** (2 * k):
indices_in_B = B[0][:, -1]
Q = np.hstack((B[0][:,:-1], np.arange(B[0].shape[0])[:, np.newaxis]))
temp = computeLInfCoreset(B[0], k-1)
C[indices_in_B[temp].astype(np.int)] = True
else:
C[B[0][:, -1].astype(np.int)] = True
# current_point += temp.shape[0]
# D = [P[C]]
# print('Bound is {}'.format(int(np.ceil(8 * np.log(M) + np.log(1.0/c)) + 1)))
if d_0 > 0:
for i in range(1, int(np.ceil(8 * np.log(M) + np.log(1.0/c)) + 1)):
B.append(P[np.where(np.logical_and(2 ** (i-1) * d_0 <= dists_from_P_to_A,
dists_from_P_to_A <= 2 ** i * d_0))[0], :])
if len(B[i]) > 0:
if len(B[i]) >= Utils.J ** (2 * k):
indices_B = B[i][:, -1]
Q_B = np.hstack((B[i][:, :-1], np.arange(B[i].shape[0])[:, np.newaxis]))
temp = computeLInfCoreset(Q_B, k-1)
if temp.size > 0:
C[indices_B[temp].astype(np.int)] = True
else:
C[B[i][:, -1].astype(np.int)] = True
temp = np.arange(B[i].shape[0]).astype(np.int)
list_of_coresets = [x for x in B if len(x) > 0]
Q = np.vstack(list_of_coresets)
indices_Q = Q[:, -1]
Q = np.hstack((Q[:, :-1], np.arange(Q.shape[0])[:, np.newaxis]))
if temp.size > 0:
for point in B[i][temp, :]:
indices = Level(Q, k-1, np.vstack((V, point[np.newaxis, :-1])))
if indices.size > 0:
D[indices_Q[indices].astype(np.int)] = True
# D.extend(Level(Q, k-1, np.vstack((V, point[np.newaxis, :-1]))))
return np.where(np.add(C, D))[0]
def computeLInfCoreset(P, k):
"""
This function is our main L_\infty coreset method, as for k = 1 it runs our fast algorithm for computing the
L_\infty coreset. When k > 1, it runs a recursive algorithm for computing a L_\infty coreset for the
(k,j)-projective clustering problem.
This algorithm is a variant of Algorithm 6 of "Coresets for Gaussian Mixture Models of Any shapes" by Zahi
Kfir and Dan Feldman.
:param P: A PointSet object, i.e., a weighted set of points.
:param k: The number of $j$-subspaces which defines the (k,j)-projective clustering problem.
:return: A PointSet object which contains a subset of P which serves as a L_\infty coreset for the
(k,j)-projective clustering problem.
"""
C = []
if k == 1: # if subspace clustering problem
_, idxs_in_Q, upper_bound = computeLINFCoresetKOne(P) # Compute our L_\infty coreset for P
return idxs_in_Q
elif k < 1: # should return None here
return np.array([])
else: # If k > 1
temp = computeLInfCoreset(P, k-1) # call recursively till k == 1
C = np.zeros((P.shape[0], ), dtype="bool")
C[P[temp, -1].astype(np.int)] = True
# Q = np.empty((P.shape[0] + Utils.J ** (2 * Utils.K), P.shape[1]))
# Q[:C_0.shape[0], :] = C_0
for p in P[temp, :]: # for each point in coreset
# print('K = {}'.format(k))
recursive_core = Level(P, k, p[np.newaxis, :-1]) # compute a coreset for (k,j)-projective clustering
# problem using a coreset for (k-1,j)-projective
# clustering problem
if recursive_core.size > 0: # if the coreset for the (k,j)-projective clustering problem is not empty
C[P[recursive_core, -1].astype(np.int)] = True
if np.where(C == False)[0].size < 1:
return np.where(C)[0]
return np.where(C)[0] # return a L_\infty coreset for (k,j)-projective clustering problem
def computeSensitivity(P, W):
"""
The function at hand computes the sensitivity of each point using a reduction from L_\infty to L1.
:return: None
"""
P = np.hstack((P, np.arange(P.shape[0])[:, np.newaxis]))
B, idxs = applyBiCriterea(P[:, :-1], W) # attain set of flats which gives 2^j approximation to the optimal solution
sensitivity_additive_term = initializeSens(P, B, idxs) # initialize the sensitivities
unique_cetner_idxs = np.unique(idxs) # get unique indices of clusters
sensitivity = np.empty((P.shape[0], ))
clusters = [np.where(idxs == idx)[0] for idx in unique_cetner_idxs]
Qs = [[] for idx in range(len(clusters))]
for idx in range(len(clusters)): # apply L_\infty conversion to L_1 on each cluster of points
# Qs[idx] = np.hstack(((P[clusters[idx], :-1] - B[idx][1]).dot(B[idx][0].T.dot(B[idx][0])), P[clusters[idx], -1][:, np.newaxis]))
Qs[idx] = np.hstack(((P[clusters[idx], :-1] - B[idx][1]).dot(B[idx][0].T), P[clusters[idx], -1][:, np.newaxis]))
ts = time.time()
# s = computeSensitivityPerCluster(Qs[0])
# print('max = {}, min = {}'.format(np.max(s[0,:]), np.min(s[0,:])))
# print('Time for one cluster took {} secs'.format(time.time() - ts))
# input()
# pool = multiprocessing.Pool(3)
# list_of_sensitivities = pool.map(computeSensitivityPerCluster, Qs)
# print('Time for parallel took {} secs'.format(time.time() - ts))
for i in range(len(Qs)):
s = computeSensitivityPerCluster(Qs[i])
sensitivity[s[:, -1].astype(np.int)] = s[:, 0]
# print('Number of unique values = {}, max = {}, min = {}'.format(np.unique(sensitivity).shape[0],
# np.max(sensitivity), np.min(sensitivity)))
sensitivity += 2 ** Utils.J * sensitivity_additive_term # add the additive term for the sensitivity
return sensitivity
if __name__ == '__main__':
P = np.random.randn(10000, 5)
P = np.hstack((P, np.arange(10000)[:, np.newaxis]))
W = np.ones((P.shape[0], ))
s = computeSensitivity(P, W) | 49.157583 | 139 | 0.576803 |
f00939c44715cbb46e21a3b0bd4e2b066d1b7f29 | 2,549 | py | Python | extras/pyrepl/console.py | dillionhacker/python222 | 205414c33fba8166167fd8a6a03eda1a68f16316 | [
"Apache-2.0"
] | 1 | 2019-05-27T00:58:46.000Z | 2019-05-27T00:58:46.000Z | extras/pyrepl/console.py | tuankien2601/python222 | 205414c33fba8166167fd8a6a03eda1a68f16316 | [
"Apache-2.0"
] | null | null | null | extras/pyrepl/console.py | tuankien2601/python222 | 205414c33fba8166167fd8a6a03eda1a68f16316 | [
"Apache-2.0"
] | null | null | null | # Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
| 27.117021 | 71 | 0.634759 |
f009b3d518e1b8520f28ad27fc966139292e346f | 15,818 | py | Python | robotpy_build/hooks_datacfg.py | ConnectionMaster/robotpy-build | 9571a84fdd6268be5e945b31ea8929d84355071a | [
"BSD-3-Clause"
] | null | null | null | robotpy_build/hooks_datacfg.py | ConnectionMaster/robotpy-build | 9571a84fdd6268be5e945b31ea8929d84355071a | [
"BSD-3-Clause"
] | null | null | null | robotpy_build/hooks_datacfg.py | ConnectionMaster/robotpy-build | 9571a84fdd6268be5e945b31ea8929d84355071a | [
"BSD-3-Clause"
] | null | null | null | #
# Defines data that is consumed by the header2whatever hooks/templates
# to modify the generated files
#
import enum
from typing import Dict, List, Tuple, Optional
from pydantic import validator
from .util import Model, _generating_documentation
if not _generating_documentation:
FunctionData.update_forward_refs()
| 29.845283 | 123 | 0.637249 |
f009d6a3b56d42edfcb8bf537787593ecb613a4c | 27,482 | py | Python | src/auspex/qubit/qubit_exp.py | minhhaiphys/Auspex | 3b9480120f0cdaf8a1e890a59e0e45e0fab5f1dd | [
"Apache-2.0"
] | null | null | null | src/auspex/qubit/qubit_exp.py | minhhaiphys/Auspex | 3b9480120f0cdaf8a1e890a59e0e45e0fab5f1dd | [
"Apache-2.0"
] | null | null | null | src/auspex/qubit/qubit_exp.py | minhhaiphys/Auspex | 3b9480120f0cdaf8a1e890a59e0e45e0fab5f1dd | [
"Apache-2.0"
] | null | null | null | from auspex.log import logger
from auspex.experiment import Experiment, FloatParameter
from auspex.stream import DataStream, DataAxis, SweepAxis, DataStreamDescriptor, InputConnector, OutputConnector
import auspex.instruments
import auspex.filters
import bbndb
import numpy as np
import sys
import os
if sys.platform == 'win32' or 'NOFORKING' in os.environ:
from threading import Thread as Process
from threading import Event
else:
from multiprocessing import Process
from multiprocessing import Event
from multiprocessing import Value
from . import pipeline
import time
import datetime
import json
stream_hierarchy = [
bbndb.auspex.Demodulate,
bbndb.auspex.Integrate,
bbndb.auspex.Average,
bbndb.auspex.OutputProxy
]
filter_map = {
bbndb.auspex.Demodulate: auspex.filters.Channelizer,
bbndb.auspex.Average: auspex.filters.Averager,
bbndb.auspex.Integrate: auspex.filters.KernelIntegrator,
bbndb.auspex.Write: auspex.filters.WriteToFile,
bbndb.auspex.Buffer: auspex.filters.DataBuffer,
bbndb.auspex.Display: auspex.filters.Plotter,
bbndb.auspex.FidelityKernel: auspex.filters.SingleShotMeasurement
}
stream_sel_map = {
'X6-1000M': auspex.filters.X6StreamSelector,
'AlazarATS9870': auspex.filters.AlazarStreamSelector
}
instrument_map = {
'DigitalAttenuator': auspex.instruments.DigitalAttenuator,
'X6-1000M': auspex.instruments.X6,
'AlazarATS9870': auspex.instruments.AlazarATS9870,
'APS2': auspex.instruments.APS2,
'TDM': auspex.instruments.TDM,
'APS': auspex.instruments.APS,
'HolzworthHS9000': auspex.instruments.HolzworthHS9000,
'Labbrick': auspex.instruments.Labbrick,
'AgilentN5183A': auspex.instruments.AgilentN5183A,
'BNC845': auspex.instruments.BNC845,
'SpectrumAnalyzer': auspex.instruments.SpectrumAnalyzer,
'YokogawaGS200': auspex.instruments.YokogawaGS200
}
| 47.138937 | 282 | 0.640965 |
f00b1f413db4083c2b4c12dfb8af15b799f387ae | 2,288 | py | Python | mtconnect/mtconnect_ros_bridge/scripts/closedoor.py | mtconnect/ros_bridge | b578e8c3edca83ea0de8ed15aff0f7733dd23e04 | [
"Apache-2.0"
] | 5 | 2015-04-30T21:51:46.000Z | 2019-03-18T06:24:38.000Z | mtconnect/mtconnect_ros_bridge/scripts/closedoor.py | CubeSpawn/ros_bridge | b578e8c3edca83ea0de8ed15aff0f7733dd23e04 | [
"Apache-2.0"
] | null | null | null | mtconnect/mtconnect_ros_bridge/scripts/closedoor.py | CubeSpawn/ros_bridge | b578e8c3edca83ea0de8ed15aff0f7733dd23e04 | [
"Apache-2.0"
] | 4 | 2016-02-21T20:04:31.000Z | 2021-01-04T13:48:41.000Z | #! /usr/bin/env python
"""
Copyright 2013 Southwest Research Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import roslib; roslib.load_manifest('mtconnect_msgs')
import rospy
# Brings in the SimpleActionClient
import actionlib
# Brings in the messages used by the material_load action.
import mtconnect_msgs.msg
if __name__ == '__main__':
try:
# Initializes a rospy node so that the SimpleActionClient can
# publish and subscribe over ROS.
rospy.init_node('CloseDoorActionClient')
result = close_door_client()
rospy.loginfo('Action Result --> %s' % result)
except rospy.ROSInterruptException:
print 'program interrupted before completion'
| 34.666667 | 96 | 0.723776 |
f00bfebe8f465035bb8191daaed17fe817eb4bdf | 4,112 | py | Python | cte/__main__.py | iqbal-lab-org/covid-truth-eval | a11125538699f21a5483f15bd5aac952340d3797 | [
"MIT"
] | 1 | 2022-01-21T11:54:21.000Z | 2022-01-21T11:54:21.000Z | cte/__main__.py | iqbal-lab-org/covid-truth-eval | a11125538699f21a5483f15bd5aac952340d3797 | [
"MIT"
] | null | null | null | cte/__main__.py | iqbal-lab-org/covid-truth-eval | a11125538699f21a5483f15bd5aac952340d3797 | [
"MIT"
] | 1 | 2022-03-21T09:48:32.000Z | 2022-03-21T09:48:32.000Z | #!/usr/bin/env python3
import argparse
import logging
import sys
import cte
if __name__ == "__main__":
main()
| 34.266667 | 225 | 0.614543 |
f00d8a2ff37a2b007fa4edfda74f6d8657793532 | 3,684 | py | Python | piton/lib/inquirer/questions.py | piton-package-manager/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 19 | 2016-04-08T04:00:07.000Z | 2021-11-12T19:36:56.000Z | piton/lib/inquirer/questions.py | LookLikeAPro/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 9 | 2017-01-03T13:39:47.000Z | 2022-01-15T20:38:20.000Z | piton/lib/inquirer/questions.py | LookLikeAPro/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 6 | 2017-04-01T03:38:45.000Z | 2021-05-06T11:25:31.000Z | # -*- coding: utf-8 -*-
"""
Module that implements the questions types
"""
import json
from . import errors
def load_from_dict(question_dict):
"""
Load one question from a dict.
It requires the keys 'name' and 'kind'.
:return: The Question object with associated data.
:return type: Question
"""
return question_factory(**question_dict)
def load_from_list(question_list):
"""
Load a list of questions from a list of dicts.
It requires the keys 'name' and 'kind' for each dict.
:return: A list of Question objects with associated data.
:return type: List
"""
return [load_from_dict(q) for q in question_list]
def load_from_json(question_json):
"""
Load Questions from a JSON string.
:return: A list of Question objects with associated data if the JSON
contains a list or a Question if the JSON contains a dict.
:return type: List or Dict
"""
data = json.loads(question_json)
if isinstance(data, list):
return load_from_list(data)
if isinstance(data, dict):
return load_from_dict(data)
raise TypeError(
'Json contained a %s variable when a dict or list was expected',
type(data))
def _solve(self, prop, *args, **kwargs):
if callable(prop):
return prop(self.answers, *args, **kwargs)
if isinstance(prop, str):
return prop.format(**self.answers)
return prop
class Text(Question):
kind = 'text'
class Password(Question):
kind = 'password'
class Confirm(Question):
kind = 'confirm'
class List(Question):
kind = 'list'
class Checkbox(Question):
kind = 'checkbox'
| 24.236842 | 72 | 0.604777 |
f00ff90a15569e736314d9e7505d121e6996f894 | 4,216 | py | Python | json_replacer.py | MrMusicMan/json-item-replacer | 04362b5e5ecf3cf9dd12ef3e72a7a1474a5239fa | [
"Apache-2.0"
] | null | null | null | json_replacer.py | MrMusicMan/json-item-replacer | 04362b5e5ecf3cf9dd12ef3e72a7a1474a5239fa | [
"Apache-2.0"
] | null | null | null | json_replacer.py | MrMusicMan/json-item-replacer | 04362b5e5ecf3cf9dd12ef3e72a7a1474a5239fa | [
"Apache-2.0"
] | null | null | null | import os
import json
import string
from tkinter import filedialog, simpledialog
from tkinter import *
if __name__ == '__main__':
root = Tk()
root.csv_filename = filedialog.askopenfilename(
title="Select CSV file with translations",
filetypes=(("CSV Files", "*.csv"),)
)
root.json_filename = filedialog.askopenfilename(
title="Select master JSON file to build tranlated JSON files",
filetypes=(("JSON Files","*.json"),("All Files", "*.*"))
)
target_key = simpledialog.askstring(
"Input",
"What is the target key for the values we are replacing?",
initialvalue="title"
)
base_output_filename = simpledialog.askstring(
"Input",
"What would you like the base file to be named?"
)
# Import CSV.
csv = CsvImporter()
csv_data = csv.import_csv(root.csv_filename)
# Import JSON.
make_json = JsonEditor()
# Make changes per language.
for language in csv_data:
# Edit JSON.
input_json = make_json.import_json(root.json_filename)
for key, value in csv_data[language].items():
updated_json = make_json.update_json(input_json, target_key, key, value)
# Create filename per language.
language_filename = base_output_filename + "_" + language + ".json"
made_json = make_json.export_new_json(language_filename, updated_json)
# Finished.
print("Success!")
| 34.842975 | 89 | 0.57851 |
f01114fcd31b24a944a91cf16636601c7b3cffa8 | 6,134 | py | Python | src/func.py | yygr/datascience_utility | aa6aa37508e46ab3568805dd1bb514ef10652240 | [
"MIT"
] | null | null | null | src/func.py | yygr/datascience_utility | aa6aa37508e46ab3568805dd1bb514ef10652240 | [
"MIT"
] | null | null | null | src/func.py | yygr/datascience_utility | aa6aa37508e46ab3568805dd1bb514ef10652240 | [
"MIT"
] | null | null | null | from pdb import set_trace
from time import time
import matplotlib.pyplot as plt
import numpy as np
from numpy import random
from scipy.stats import chi2
import renom as rm
| 31.137056 | 80 | 0.5 |
f0113aeb5d7960eefb66a0247171970b6a1b3515 | 2,245 | py | Python | portality/cms/implied_attr_list.py | gaybro8777/doaj | 27d9d98ce4f496ae52acbaba6ee8e42c84cf1a58 | [
"Apache-2.0"
] | 47 | 2015-04-24T13:13:39.000Z | 2022-03-06T03:22:42.000Z | portality/cms/implied_attr_list.py | gaybro8777/doaj | 27d9d98ce4f496ae52acbaba6ee8e42c84cf1a58 | [
"Apache-2.0"
] | 1,215 | 2015-01-02T14:29:38.000Z | 2022-03-28T14:19:13.000Z | portality/cms/implied_attr_list.py | gaybro8777/doaj | 27d9d98ce4f496ae52acbaba6ee8e42c84cf1a58 | [
"Apache-2.0"
] | 14 | 2015-11-27T13:01:23.000Z | 2021-05-21T07:57:23.000Z | import markdown
import re
from markdown.extensions import attr_list
| 34.538462 | 117 | 0.629399 |
f0123837d9cb8c6159b0ec92e3dc57d8e6054cf3 | 704 | py | Python | services/web/apps/main/pool/views.py | xUndero/noc | 9fb34627721149fcf7064860bd63887e38849131 | [
"BSD-3-Clause"
] | 1 | 2019-09-20T09:36:48.000Z | 2019-09-20T09:36:48.000Z | services/web/apps/main/pool/views.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | services/web/apps/main/pool/views.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# main.pool application
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# NOC modules
from noc.lib.app.extdocapplication import ExtDocApplication
from noc.main.models.pool import Pool
from noc.core.translation import ugettext as _
| 28.16 | 71 | 0.473011 |
f012b80503a597191471f367c16412e1f714452d | 2,396 | py | Python | new_corpus/_sympy.py | y-akinobu/multiese | e28e6424b9714c5f145f438c8502c4194b70fe25 | [
"MIT"
] | null | null | null | new_corpus/_sympy.py | y-akinobu/multiese | e28e6424b9714c5f145f438c8502c4194b70fe25 | [
"MIT"
] | null | null | null | new_corpus/_sympy.py | y-akinobu/multiese | e28e6424b9714c5f145f438c8502c4194b70fe25 | [
"MIT"
] | null | null | null | import sympy
'''
@test($$;type(sympy))
@alt(||)
@alt(|)
[||][|]
'''
s = 'z'
sympy.symbol(s)
'''
@test(sympy=missing;$$)
s
'''
z = sympy.symbol(s)
'''
@test(sympy=missing;$$;z)
@prefix(z;[|])
s[|][|]z
'''
e = e2 = sympy.symbol(s)
n = 2
e.subs(z, n)
'''
@test(e=missing;e2='e2';z='x';$$)
@prefix(e;)
ezn
'''
e.subs(z, e2)
'''
@test(e=missing;e2='e2';z='x';$$)
eze2
eze2
'''
sympy.expand(e)
'''
@test(sympy=missing;e='e';$$)
e
e
'''
sympy.factor(e)
'''
@test(sympy=missing;e='e';$$)
e
e
'''
sympy.sympify(e)
'''
@test(sympy=missing;e='e';$$)
e[|]
e[|]
e[|]
'''
sympy.apart(e)
'''
@test(sympy=missing;e='e';$$)
e[|]
e
'''
sympy.solve(e)
'''
@test(sympy=missing;e='e';$$)
e[=0|][|]
'''
sympy.solve(e, z)
'''
@test(sympy=missing;e='e';z='x';$$)
e[=0|]z
'''
sympy.solve([e, e2])
'''
@test(sympy=missing;e='e';e2='e2';$$)
e[=0|], e2[|=0]
'''
sympy.limit(e, z, 0)
'''
@test(sympy=missing;e='e';z='x';$$)
@alt(|||)
z0[|]e
'''
sympy.limit(e, z, oo)
'''
@test(sympy=missing;e='e';z='x';oo='oo';$$)
z[|]e
'''
sympy.limit(e, z, -oo)
'''
@test(sympy=missing;e='e';z='x';oo=0;$$)
z[|]e
'''
sympy.diff(e)
'''
@test(sympy=missing;e='e';z='x';$$)
e
e
'''
sympy.diff(e, z)
'''
@test(sympy=missing;e='e';z='x';$$)
ze[|]
ez
'''
sympy.diff(e, z, n)
'''
@test(sympy=missing;e='e';z='x';$$)
{e|z}n
e[z|]n[|]
'''
sympy.integrate(e)
'''
@test(sympy=missing;e='e';z='x';$$)
e
e[|][|]
'''
sympy.integrate(e, z)
'''
@test(sympy=missing;e='e';z='x';$$)
ze
ze[|][|]
'''
float(e)
'''
@test(sympy=missing;e='3.14159';z='x';$$)
e[|]
e
e[|]
'''
__X__ = e
sympy.sqrt(__X__)
'''
@test(sympy=missing;e='e';z='x';$$)
@X(e;z)
@Y(e;z)
__Y__
'''
# sympy.E**(sympy.I * sympy.pi) == -1
# '''
#
# '''
# sympy.summation(e, (z, 1, N))
# '''
# @test(import sympy;z,N=sympy.Symbol('z N');e=z**2;$$)
# e[|]
# '''
| 14.261905 | 55 | 0.604758 |
f013b73782802e7be9ad94ff6ab1e1a0a57d6410 | 1,224 | py | Python | saleor/app/tests/test_models.py | fairhopeweb/saleor | 9ac6c22652d46ba65a5b894da5f1ba5bec48c019 | [
"CC-BY-4.0"
] | 15,337 | 2015-01-12T02:11:52.000Z | 2021-10-05T19:19:29.000Z | saleor/app/tests/test_models.py | fairhopeweb/saleor | 9ac6c22652d46ba65a5b894da5f1ba5bec48c019 | [
"CC-BY-4.0"
] | 7,486 | 2015-02-11T10:52:13.000Z | 2021-10-06T09:37:15.000Z | saleor/app/tests/test_models.py | aminziadna/saleor | 2e78fb5bcf8b83a6278af02551a104cfa555a1fb | [
"CC-BY-4.0"
] | 5,864 | 2015-01-16T14:52:54.000Z | 2021-10-05T23:01:15.000Z | from ...app.models import App
from ...webhook.event_types import WebhookEventType
| 32.210526 | 88 | 0.768791 |
f01546244daef76f91454218d243e57cff9b2fef | 113 | py | Python | feast/DetectionModules/__init__.py | ChandlerKemp/FEAST_PtE | 9551824932379149dd6bc9135cfac6edf60c40c8 | [
"MIT"
] | 3 | 2020-04-21T18:59:01.000Z | 2021-01-14T22:56:17.000Z | feast/DetectionModules/__init__.py | ChandlerKemp/FEAST_PtE | 9551824932379149dd6bc9135cfac6edf60c40c8 | [
"MIT"
] | null | null | null | feast/DetectionModules/__init__.py | ChandlerKemp/FEAST_PtE | 9551824932379149dd6bc9135cfac6edf60c40c8 | [
"MIT"
] | null | null | null | from . import null
from . import abstract_detection_method
from . import tech_detect
from . import tiered_detect
| 22.6 | 39 | 0.823009 |
f015bf5e2e71b04cd941a3ba7f14c687b44c2b00 | 263 | py | Python | apps/transactions/__init__.py | lsdlab/djshop_toturial | 6d450225cc05e6a1ecd161de2b522e1af0b68cc0 | [
"MIT"
] | null | null | null | apps/transactions/__init__.py | lsdlab/djshop_toturial | 6d450225cc05e6a1ecd161de2b522e1af0b68cc0 | [
"MIT"
] | 6 | 2020-06-07T15:18:58.000Z | 2021-09-22T19:07:33.000Z | apps/transactions/__init__.py | lsdlab/djshop_toturial | 6d450225cc05e6a1ecd161de2b522e1af0b68cc0 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
default_app_config = 'apps.transactions.TransactionsConfig'
| 20.230769 | 59 | 0.752852 |
f01636a07a87cf93e98d3a0d5e5e79dd6e4913ce | 1,260 | py | Python | 8/code.py | DeclanOGorman/AdventofCode2021 | 71a25327d5ab1f88124d09ec8ef853610cbff8ef | [
"MIT"
] | null | null | null | 8/code.py | DeclanOGorman/AdventofCode2021 | 71a25327d5ab1f88124d09ec8ef853610cbff8ef | [
"MIT"
] | null | null | null | 8/code.py | DeclanOGorman/AdventofCode2021 | 71a25327d5ab1f88124d09ec8ef853610cbff8ef | [
"MIT"
] | null | null | null | with open('./8/input_a.txt', 'r') as f:
input = [[a.strip().split(' | ')[0].split(' '), a.strip().split(' | ')[1].split(' ')] for a in f]
num = sum([sum([1 if len(a) in {2,3,4,7} else 0 for a in o[1]]) for o in input ])
print(f'Part A: Number of 1,4,7 or 8s in output - {num}')
print(f'Part B: total output sum value - {sum([getoutput(a) for a in input])}') | 57.272727 | 118 | 0.52381 |
f0172d0fc69d85a2da2f03f4a401ed701e820bb2 | 6,144 | py | Python | pythonium/orders/galaxy.py | cacrespo/pythonium | 74cc5d4333212adfb6eedade8fcd8dfe86d221d5 | [
"MIT"
] | null | null | null | pythonium/orders/galaxy.py | cacrespo/pythonium | 74cc5d4333212adfb6eedade8fcd8dfe86d221d5 | [
"MIT"
] | null | null | null | pythonium/orders/galaxy.py | cacrespo/pythonium | 74cc5d4333212adfb6eedade8fcd8dfe86d221d5 | [
"MIT"
] | null | null | null | import logging
from itertools import groupby
import attr
import numpy as np
from ..explosion import Explosion
from .core import GalaxyOrder
logger = logging.getLogger("game")
| 30.415842 | 80 | 0.495605 |
f0178f93e06a5ab22b51ea951cf67bdba0d3c339 | 59 | py | Python | pdip/processing/factories/__init__.py | ahmetcagriakca/pdip | c4c16d5666a740154cabdc6762cd44d98b7bdde8 | [
"MIT"
] | 2 | 2021-12-09T21:07:46.000Z | 2021-12-11T22:18:01.000Z | pdip/processing/factories/__init__.py | PythonDataIntegrator/pdip | c4c16d5666a740154cabdc6762cd44d98b7bdde8 | [
"MIT"
] | null | null | null | pdip/processing/factories/__init__.py | PythonDataIntegrator/pdip | c4c16d5666a740154cabdc6762cd44d98b7bdde8 | [
"MIT"
] | 3 | 2021-11-15T00:47:00.000Z | 2021-12-17T11:35:45.000Z | from .process_manager_factory import ProcessManagerFactory
| 29.5 | 58 | 0.915254 |
f01853fdef99763aa76db241019fe3f05895618d | 4,221 | py | Python | assets/src/ba_data/python/ba/_analytics.py | SahandAslani/ballistica | 7e3814cd2a1920ea8f5820cb1cdbb4dc5420d30e | [
"MIT"
] | 2 | 2020-07-02T22:18:58.000Z | 2020-07-02T22:19:49.000Z | assets/src/ba_data/python/ba/_analytics.py | MalTarDesigns/ballistica | c38ae5c39b3cc7985be166a959245ca060d3bf31 | [
"MIT"
] | null | null | null | assets/src/ba_data/python/ba/_analytics.py | MalTarDesigns/ballistica | c38ae5c39b3cc7985be166a959245ca060d3bf31 | [
"MIT"
] | null | null | null | # Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Functionality related to analytics."""
from __future__ import annotations
from typing import TYPE_CHECKING
import _ba
if TYPE_CHECKING:
pass
def game_begin_analytics() -> None:
"""Update analytics events for the start of a game."""
# pylint: disable=too-many-branches
# pylint: disable=cyclic-import
from ba._dualteamsession import DualTeamSession
from ba._freeforallsession import FreeForAllSession
from ba._coopsession import CoopSession
from ba._gameactivity import GameActivity
activity = _ba.getactivity(False)
session = _ba.getsession(False)
# Fail gracefully if we didn't cleanly get a session and game activity.
if not activity or not session or not isinstance(activity, GameActivity):
return
if isinstance(session, CoopSession):
campaign = session.campaign
assert campaign is not None
_ba.set_analytics_screen(
'Coop Game: ' + campaign.name + ' ' +
campaign.getlevel(_ba.app.coop_session_args['level']).name)
_ba.increment_analytics_count('Co-op round start')
if len(activity.players) == 1:
_ba.increment_analytics_count('Co-op round start 1 human player')
elif len(activity.players) == 2:
_ba.increment_analytics_count('Co-op round start 2 human players')
elif len(activity.players) == 3:
_ba.increment_analytics_count('Co-op round start 3 human players')
elif len(activity.players) >= 4:
_ba.increment_analytics_count('Co-op round start 4+ human players')
elif isinstance(session, DualTeamSession):
_ba.set_analytics_screen('Teams Game: ' + activity.getname())
_ba.increment_analytics_count('Teams round start')
if len(activity.players) == 1:
_ba.increment_analytics_count('Teams round start 1 human player')
elif 1 < len(activity.players) < 8:
_ba.increment_analytics_count('Teams round start ' +
str(len(activity.players)) +
' human players')
elif len(activity.players) >= 8:
_ba.increment_analytics_count('Teams round start 8+ human players')
elif isinstance(session, FreeForAllSession):
_ba.set_analytics_screen('FreeForAll Game: ' + activity.getname())
_ba.increment_analytics_count('Free-for-all round start')
if len(activity.players) == 1:
_ba.increment_analytics_count(
'Free-for-all round start 1 human player')
elif 1 < len(activity.players) < 8:
_ba.increment_analytics_count('Free-for-all round start ' +
str(len(activity.players)) +
' human players')
elif len(activity.players) >= 8:
_ba.increment_analytics_count(
'Free-for-all round start 8+ human players')
# For some analytics tracking on the c layer.
_ba.reset_game_activity_tracking()
| 45.880435 | 79 | 0.664298 |
f01944d27e76d31f7d24bb6d6aee8d5e5c5f6995 | 10,940 | py | Python | todo_app/display.py | WeaverDyl/python-todo | 80c533b79c6170ba9ba4923ba78f4900fece8339 | [
"MIT"
] | 3 | 2020-01-16T09:39:11.000Z | 2021-11-15T08:38:52.000Z | todo_app/display.py | WeaverDyl/python-todo | 80c533b79c6170ba9ba4923ba78f4900fece8339 | [
"MIT"
] | null | null | null | todo_app/display.py | WeaverDyl/python-todo | 80c533b79c6170ba9ba4923ba78f4900fece8339 | [
"MIT"
] | null | null | null | import os
import math
import shutil
import textwrap
from datetime import datetime
from terminaltables import AsciiTable
def print_task_list_formatted(self, rows):
""" Prints each formatted task to the terminal in the form
of a table """
header = [self.color_message(i, 'BOLD') for i in ['ID', 'Added', 'Title', 'Description', 'Due', 'Finished?']]
table_data = [task.values() for task in rows]
table_data.insert(0, header) # The column headers are the first element of the list
table = AsciiTable(table_data) # Create the table -- but test width before printing
table.inner_row_border = True # Separates each task
if not self.check_table_fit(table):
max_width_table = table.table_width
term_width = shutil.get_terminal_size().columns
self.print_message(f'The task list has a width of {max_width_table} and cannot fit in the terminal of width {term_width}.')
return
# The table fits and we can print it
self.print_message('Here are your current tasks:')
print(table.table)
# Methods for ADDING tasks
def ask_user_title(self):
""" Asks the user for the title of the task """
title = ''
while title == '':
title = input(self.color_message('Give your task a name: ', 'BOLD'))
if title == '':
self.print_error('The title can\'t be an empty string!')
return title
def ask_user_description(self):
""" Gets an optional description from the user """
description = input(self.color_message('Optionally, give your task a description: ', 'BOLD'))
return description
def ask_user_due(self):
""" Gets an optional due date for the task from the user """
date = ''
asked = False
while not asked or not self.validate_date(date):
date = input(self.color_message('Optionally, give your task a due date (\'mm/dd/yyyy\' or \'mm-dd-yyyy\'): ', 'BOLD'))
asked = True
if date == '':
return date
if not self.validate_date(date):
self.print_error('That\'s not a valid date format!')
return date
def ask_user_finished(self):
""" Asks a user if a task is finished """
valid_responses = {
'yes': True,
'y': True,
'no': False,
'n': False
}
default_resp = False
while True:
user_resp = input(self.color_message('Is the task already finished? (y/N): ', 'BOLD')).lower()
if user_resp in valid_responses:
return valid_responses[user_resp]
if user_resp == '':
return default_resp
self.print_error('That\'s not a valid answer! Answer (y/N).')
def ask_user_id(self, action):
""" Ask the user for a task ID to remove/finish/unfinish/update """
row_id = input(self.color_message(f'What task would you like to {action}? (Enter an ID or `-1` to cancel): ', 'BOLD'))
return row_id
| 39.927007 | 135 | 0.599543 |
f019487c4d2bfcb30f0598d1b5c51468e7c7807d | 797 | py | Python | linked_list/adding_nodes_value/test.py | Shawn-Ng/algorithms-test | 1ca740d288b9b3fee580f1ac557a1c1b17ea33b1 | [
"BSD-2-Clause"
] | null | null | null | linked_list/adding_nodes_value/test.py | Shawn-Ng/algorithms-test | 1ca740d288b9b3fee580f1ac557a1c1b17ea33b1 | [
"BSD-2-Clause"
] | 1 | 2018-01-12T18:56:58.000Z | 2018-01-13T01:14:51.000Z | linked_list/adding_nodes_value/test.py | Shawn-Ng/algorithms | 1ca740d288b9b3fee580f1ac557a1c1b17ea33b1 | [
"BSD-2-Clause"
] | null | null | null |
list1 = Node(5)
list1.next = Node(6)
list1.next.next = Node(3)
list2 = Node(8)
list2.next = Node(4)
list2.next.next = Node(2)
sumLinkedListNodes(list1, list2)
| 18.97619 | 46 | 0.599749 |
f0199c2ddd6cf1a82c3279d8fee04fa2d5d2f015 | 3,674 | py | Python | env2048.py | qhduan/rl-2048 | 9730d366625ac7ffdd8875586ffbb8615468f110 | [
"MIT"
] | 3 | 2022-02-10T02:19:58.000Z | 2022-03-06T14:39:20.000Z | env2048.py | qhduan/rl-2048 | 9730d366625ac7ffdd8875586ffbb8615468f110 | [
"MIT"
] | null | null | null | env2048.py | qhduan/rl-2048 | 9730d366625ac7ffdd8875586ffbb8615468f110 | [
"MIT"
] | null | null | null | import logic
import numpy as np
import gym
ACTION_MAP = {
0: 'up',
1: 'down',
2: 'left',
3: 'right'
}
if __name__ == '__main__':
main()
| 29.15873 | 107 | 0.531301 |
f019f56e66a32402b7c9862f91bbe2284661cc13 | 1,697 | py | Python | users/views.py | Paulwamaria/instagram | 546c5472bbebd868e647fd600519a91ccfc47054 | [
"MIT"
] | null | null | null | users/views.py | Paulwamaria/instagram | 546c5472bbebd868e647fd600519a91ccfc47054 | [
"MIT"
] | 4 | 2020-06-05T23:46:45.000Z | 2021-06-10T19:06:27.000Z | users/views.py | Paulwamaria/instagram | 546c5472bbebd868e647fd600519a91ccfc47054 | [
"MIT"
] | null | null | null | from django.shortcuts import render,redirect
from django.contrib.auth.decorators import login_required
from .forms import InstaRegistrationForm, UserUpdateForm, ProfileUpdateForm
from django.views.generic import DetailView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib import messages
from .models import Profile
| 33.27451 | 94 | 0.675899 |
f01a75f5202b2a67529c1984f10926191041214e | 9,865 | py | Python | 1D_CNN.py | alex386/EEGPatternRecognition | d84085880baa9172a7cfd73b2737b93472394f3e | [
"MIT"
] | null | null | null | 1D_CNN.py | alex386/EEGPatternRecognition | d84085880baa9172a7cfd73b2737b93472394f3e | [
"MIT"
] | null | null | null | 1D_CNN.py | alex386/EEGPatternRecognition | d84085880baa9172a7cfd73b2737b93472394f3e | [
"MIT"
] | 1 | 2019-02-25T18:24:37.000Z | 2019-02-25T18:24:37.000Z | # -*- coding: utf-8 -*-
"""
Created on Tue Nov 13 12:55:47 2018
@name: CSVMachLearn.py
@description: 1D CNN using CSV vector for machine learning
@author: Aleksander Dawid
"""
from __future__ import absolute_import, division, print_function
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from sklearn.decomposition import PCA
import numpy as np
import tensorflow as tf
import tensorflow.contrib.eager as tfe
from tensorflow import set_random_seed
tf.enable_eager_execution()
set_random_seed(0)
nrds='S0'
#==============================================================================
# Global parameters
#==============================================================================
total_dataset_fp="D:\\AI_experiments\\CSV\\"+nrds+"\\DAT"+nrds+".csv"
pathlog="D:\\AI_experiments\\CSV\\"+nrds+"\\"+nrds+"pub.log"
pathimg="D:\\AI_experiments\\CSV\\"+nrds+"\\IMG"
num_epochs = 1001 # number of epochs
lrate=2e-5 # learning rate
test_procent=0.2 # procentage of test_dataset
learn_batch_size=32 # batch size
print("Local copy of the dataset file: {}".format(total_dataset_fp))
print("TensorFlow version: {}".format(tf.VERSION))
print("Eager execution: {}".format(tf.executing_eagerly()))
#==============================================================================
# Methods
#==============================================================================
def pack_features_vector(features, labels):
"""Pack the features into a single array."""
features = tf.stack(list(features.values()), axis=1)
return features, labels
with open(total_dataset_fp) as f:
content = f.readlines()
grup=content[0].split(',')
print(grup[1])
f_size=int(grup[1])-1 #number of points in data vector
print("Vector size: "+str(f_size))
filtr1=32
filtr_size1=5
filtr2=32
filtr_size2=5
filtr3=64
filtr_size3=5
filtr4=64
filtr_size4=4
DenseLast=4096
filtr5=512
filtr_size5=5
mapcolor=['red','green','blue']
# column order in CSV file
column_names = []
for a in range(0,f_size):
column_names.append(str(a))
column_names.append('signal')
print(len(column_names))
feature_names = column_names[:-1]
label_name = column_names[-1]
#class_names = ['Left','Right','NONE']
class_names = ['LIP','JAW','NONE']
batch_size = 200000
#train_dataset = tf.data.experimental.make_csv_dataset(
# total_dataset_fp,
# batch_size,
# column_names=column_names,
# label_name=label_name,
# num_epochs=1,
# shuffle=False)
#train_dataset = train_dataset.map(pack_features_vector)
total_dataset = tf.data.experimental.make_csv_dataset(
total_dataset_fp,
batch_size,
column_names=column_names,
label_name=label_name,
num_epochs=1,
shuffle=True)
features, labels = next(iter(total_dataset))
setsize=float(str(labels.shape[0]))
ts_size=setsize*test_procent
tr_size=setsize-ts_size
print("Total_CSV_size: "+str(setsize) )
print("Train_size: "+str(tr_size) )
print("Test_size: "+str(ts_size) )
total_dataset = total_dataset.map(pack_features_vector)
total_dataset=ChangeBatchSize(total_dataset,tr_size)
#==============================================================================
#Split dataset into train_dataset and test_dataset.
#==============================================================================
i=0
for (parts, labels) in total_dataset:
if(i==0):
k1 = parts
l1 = labels
else:
k2 = parts
l2 = labels
i=i+1
train_dataset = tf.data.Dataset.from_tensors((k1, l1))
train_dataset = ChangeBatchSize(train_dataset,learn_batch_size)
test_dataset = tf.data.Dataset.from_tensors((k2, l2))
test_dataset = ChangeBatchSize(test_dataset,ts_size)
#==============================================================================
# Create model object
#==============================================================================
model=create_model()
model.summary()
optimizer = tf.train.AdamOptimizer(learning_rate=lrate)
global_step = tf.train.get_or_create_global_step()
legend_elements = [Line2D([0], [0], marker='o', color='w', label=class_names[0],markerfacecolor='r', markersize=10),
Line2D([0], [0], marker='o', color='w', label=class_names[1],markerfacecolor='g', markersize=10),
Line2D([0], [0], marker='o', color='w', label=class_names[2],markerfacecolor='b', markersize=10)]
# keep results for plotting
train_loss_results = []
train_accuracy_results = []
np.set_printoptions(threshold=np.nan)
#==============================================================================
# Make machine learning process
#==============================================================================
old_loss=1000
for epoch in range(num_epochs):
epoch_loss_avg = tfe.metrics.Mean()
epoch_accuracy = tfe.metrics.Accuracy()
# Training loop - using batches of 32
for x, y in train_dataset:
# Optimize the model
#print(str(type(x)))
#print(str(x.shape))
loss_value, grads = grad(model, x, y)
optimizer.apply_gradients(zip(grads, model.variables),
global_step)
# Track progress
epoch_loss_avg(loss_value) # add current batch loss
# compare predicted label to actual label
epoch_accuracy(tf.argmax(model(x), axis=1, output_type=tf.int32), y)
# end epoch
train_loss_results.append(epoch_loss_avg.result())
train_accuracy_results.append(epoch_accuracy.result())
if epoch % 5 == 0:
test_accuracy = tfe.metrics.Accuracy()
for (x, y) in test_dataset:
logits = model(x)
prediction = tf.argmax(logits, axis=1, output_type=tf.int32)
test_accuracy(prediction, y)
X=logits.numpy()
Y=y.numpy()
PCA(copy=True, iterated_power='auto', n_components=2, random_state=None, svd_solver='auto', tol=0.0, whiten=False)
X = PCA(n_components=2).fit_transform(X)
arrcolor = []
for cl in Y:
arrcolor.append(mapcolor[cl])
plt.scatter(X[:, 0], X[:, 1], s=40, c=arrcolor)
#plt.show()
imgfile="{:s}\\epoch{:03d}.png".format(pathimg,epoch)
plt.title("{:.3%}".format(test_accuracy.result()))
plt.legend(handles=legend_elements, loc='upper right')
plt.savefig(imgfile)
plt.close()
new_loss=epoch_loss_avg.result()
accur=epoch_accuracy.result()
test_acc=test_accuracy.result()
msg="Epoch {:03d}: Loss: {:.6f}, Accuracy: {:.3%}, Test: {:.3%}".format(epoch,new_loss,accur,test_acc)
msg2 = "{0} {1:.6f} {2:.6f} {3:.6f} \n".format(epoch,accur,test_acc,new_loss)
print(msg)
if new_loss>old_loss:
break
file = open(pathlog,"a");
file.write(msg2)
file.close();
old_loss=epoch_loss_avg.result()
#==============================================================================
# Save trained model to disk
#==============================================================================
model.compile(optimizer=tf.train.AdamOptimizer(),
loss=tf.keras.losses.sparse_categorical_crossentropy,
metrics=['accuracy'])
filepath="csvsignal.h5"
tf.keras.models.save_model(
model,
filepath,
overwrite=True,
include_optimizer=True
)
print("Model csvsignal.h5 saved to disk")
| 32.557756 | 166 | 0.604055 |
f01db4ce612793fa6669b67b17c501ac73c893ec | 6,037 | py | Python | eslearn/machine_learning/classfication/el_classify_sensitive_person_test.py | dongmengshi/easylearn | df528aaa69c3cf61f5459a04671642eb49421dfb | [
"MIT"
] | null | null | null | eslearn/machine_learning/classfication/el_classify_sensitive_person_test.py | dongmengshi/easylearn | df528aaa69c3cf61f5459a04671642eb49421dfb | [
"MIT"
] | null | null | null | eslearn/machine_learning/classfication/el_classify_sensitive_person_test.py | dongmengshi/easylearn | df528aaa69c3cf61f5459a04671642eb49421dfb | [
"MIT"
] | 1 | 2021-01-11T08:21:35.000Z | 2021-01-11T08:21:35.000Z | # -*- coding: utf-8 -*-
"""
Created on 2020/03/16
Feature selection: Relief-based feature selection algorithm.
------
@author: LI Chao
"""
import numpy as np
from sklearn import preprocessing
import os
from sklearn.externals import joblib
from el_classify_sensitive_person_train_validation import ClassifyFourKindOfPersonTrain
from eslearn.utils.lc_evaluation_model_performances import eval_performance
#
if __name__ == '__main__':
# =============================================================================
# All inputs
data_file = r'D:\workstation_b\Fundation\.xlsx'
path_out = r'D:\workstation_b\Fundation'
models_path = r'D:\workstation_b\Fundation'
# =============================================================================
selftest = ClassifyFourKindOfPersonTest(data_test_file=r'D:\workstation_b\Fundation\feature_test.npy',
label_test_file=r'D:\workstation_b\Fundation\label_test.npy',
data_train_file=r'D:\workstation_b\Fundation\feature_train.npy',
path_out=path_out,
models_path=models_path,
is_feature_selection=1)
selftest.main_function()
| 41.349315 | 164 | 0.630777 |
f01e36c7e52b2f29e3153f9812f722135e5763dd | 2,483 | py | Python | Curso em Video/D_045.py | tonmarcondes/UNIVESP | a66a623d4811e8f3f9e2999f09e38a4470035ae2 | [
"MIT"
] | null | null | null | Curso em Video/D_045.py | tonmarcondes/UNIVESP | a66a623d4811e8f3f9e2999f09e38a4470035ae2 | [
"MIT"
] | null | null | null | Curso em Video/D_045.py | tonmarcondes/UNIVESP | a66a623d4811e8f3f9e2999f09e38a4470035ae2 | [
"MIT"
] | null | null | null | import random
cor = {
'fim':'\033[m',
'amarelo':'\033[1;033m',
'vermelho':'\033[1;031m',
'vermelhof':'\033[7;031m',
'azul':'\033[1;034m',
'verde':'\033[1;32m',
'verdef':'\033[7;32m',
'branco':'\033[1;030m'
}
print('''
Escolha uma das opes abaixo:
\t {}1{} {}PEDRA{}:
\t {}2{} {}PAPEL{}:
\t {}3{} {}TESOURA{}:'''.format(
cor['vermelho'], cor['fim'], cor['azul'], cor['fim'],
cor['vermelho'], cor['fim'], cor['azul'], cor['fim'],
cor['vermelho'], cor['fim'], cor['azul'], cor['fim']
))
eu = int(input('\t '))
if eu == 1:
me = 'PEDRA'
elif eu == 2:
me = 'PAPEL'
else:
me = 'TESOURA'
pc = ['PEDRA', 'PAPEL', 'TESOURA']
random.shuffle(pc)
if eu < 1 or eu > 3:
print('\n\t\t{}ESCOLHA UM VALOR VLIDO{}\n'.format(cor['vermelho'], cor['fim']))
elif eu == 1 and pc[0] == 'PEDRA' or eu == 2 and pc[0] == 'PAPEL' or eu == 3 and pc[0] == 'TESOURA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('{} EMPATE, JOGUE OUTRA VEZ {}\n'.format(cor['vermelhof'], cor['fim']))
elif eu == 1 and pc[0] == 'PAPEL':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PAPEL {}EMBRULHA{} PEDRA\n'.format(cor['amarelo'], cor['fim']))
elif eu == 1 and pc[0] == 'PAPEL':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PEDRA {}QUEBRA{} TESOURA\n'.format(cor['amarelo'], cor['fim']))
elif eu == 2 and pc[0] == 'PEDRA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PAPEL {}EMBRULHA{} PEDRA\n'.format(cor['amarelo'], cor['fim']))
elif eu == 2 and pc[0] == 'TESOURA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('TESOURA {}CORTA{} PAPEL\n'.format(cor['amarelo'], cor['fim']))
elif eu == 3 and pc[0] == 'PEDRA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PEDRA {}QUEBRA{} TESOURA\n'.format(cor['amarelo'], cor['fim']))
else:
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('TESOURA {}CORTA{} PAPEL\n'.format(cor['amarelo'], cor['fim']))
| 42.084746 | 114 | 0.515103 |
f01e8e597dc20bba7caf3b9b0fddc57695c216de | 5,316 | py | Python | train.py | ThiruRJST/Deformed-Yolo | c9eb4e8c090dff0e9fc4f8652897ff2c59dce889 | [
"MIT"
] | 1 | 2021-09-10T17:20:09.000Z | 2021-09-10T17:20:09.000Z | train.py | ThiruRJST/Deformed-Yolo | c9eb4e8c090dff0e9fc4f8652897ff2c59dce889 | [
"MIT"
] | 1 | 2021-09-10T17:19:54.000Z | 2021-09-11T08:17:14.000Z | wandb/run-20210904_163431-3lkn6hoe/files/code/train.py | ThiruRJST/Deformed-Yolo | c9eb4e8c090dff0e9fc4f8652897ff2c59dce889 | [
"MIT"
] | null | null | null | from pandas.core.algorithms import mode
import torch
import torch.nn as nn
from albumentations import Compose,Resize,Normalize
from albumentations.pytorch import ToTensorV2
import wandb
import time
import torchvision
import torch.nn.functional as F
import torch.optim as optim
from torch.cuda.amp import autocast,GradScaler
import os
import numpy as np
from tqdm import tqdm
from callbacks import EarlyStopping
import pandas as pd
from torch.utils.data import Dataset, DataLoader
import cv2
import torch.nn.functional as F
import random
from build_model import Deformed_Darknet53
torch.manual_seed(2021)
np.random.seed(2021)
random.seed(2021)
torch.backends.cudnn.benchmark = True
torch.backends.cudnn.deterministic = True
DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu"
TOTAL_EPOCHS = 100
scaler = GradScaler()
early_stop = EarlyStopping()
wandb.init(project='deformed-darknet',entity='tensorthug',name='new-darknet-256x256_32')
print("***** Loading the Model in {} *****".format(DEVICE))
Model = Deformed_Darknet53().to(DEVICE)
print("Model Shipped to {}".format(DEVICE))
data = pd.read_csv("data.csv")
train_loss_fn = nn.BCEWithLogitsLoss()
val_loss_fn = nn.BCEWithLogitsLoss()
optim = torch.optim.Adam(Model.parameters())
wandb.watch(Model)
if __name__ == "__main__":
train_per_epoch_loss,train_per_epoch_acc = [],[]
val_per_epoch_loss,val_per_epoch_acc = [],[]
train = dog_cat(data,transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()]))
val = dog_cat(data,mode='val',transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()]))
train_load = DataLoader(train,batch_size=32,shuffle=True,num_workers=4)
val_load = DataLoader(val,batch_size=32,num_workers=4)
for e in range(TOTAL_EPOCHS):
train_loss,train_acc = train_loop(e,train_load,Model,train_loss_fn,optim)
val_loss,val_acc = val_loop(e,val_load,Model,val_loss_fn)
train_per_epoch_loss.append(train_loss)
train_per_epoch_acc.append(train_acc)
val_per_epoch_loss.append(val_loss)
val_per_epoch_acc.append(val_acc)
print(f"TrainLoss:{train_loss:.4f} TrainAcc:{train_acc:.4f}")
print(f"ValLoss:{val_loss:.4f} ValAcc:{val_acc:.4f}")
early_stop(Model,val_loss)
if early_stop.early_stop:
break
| 29.04918 | 133 | 0.659518 |
f01e97fde7da87878e9d54736f7cb227db681497 | 257 | py | Python | test/test_encoder.py | mickey9910326/py-asa-loader | 75852a4c633f34a67f5de2b2a807d2d40ce423bf | [
"MIT"
] | null | null | null | test/test_encoder.py | mickey9910326/py-asa-loader | 75852a4c633f34a67f5de2b2a807d2d40ce423bf | [
"MIT"
] | null | null | null | test/test_encoder.py | mickey9910326/py-asa-loader | 75852a4c633f34a67f5de2b2a807d2d40ce423bf | [
"MIT"
] | null | null | null | import conftest
from asaprog import pac_encode
from asaprog.util import *
if __name__ == "__main__":
pac = {
'command': asaProgCommand.CHK_DEVICE.value,
'data': b'test'
}
res = pac_encode(pac)
print(res)
print(res[-1])
| 18.357143 | 51 | 0.626459 |
f01f136d0d4a9137fd6a7ceea105c26d2d1478ac | 1,098 | py | Python | tests/controllers/controller_with_throttling.py | DmitryKhursevich/winter | 9f3bf462f963059bab1f1bbb309ca57f8a43b46f | [
"MIT"
] | 1 | 2020-10-26T09:48:05.000Z | 2020-10-26T09:48:05.000Z | tests/controllers/controller_with_throttling.py | mikhaillazko/winter | cd4f11aaf28d500aabb59cec369817bfdb5c2fc1 | [
"MIT"
] | null | null | null | tests/controllers/controller_with_throttling.py | mikhaillazko/winter | cd4f11aaf28d500aabb59cec369817bfdb5c2fc1 | [
"MIT"
] | null | null | null | from http import HTTPStatus
import winter.web
from winter.web import ExceptionHandler
from winter.web.exceptions import ThrottleException
| 26.780488 | 69 | 0.721311 |
f020207356e26d12c8db3a4bedd4f52a81d8f981 | 269 | py | Python | appwebshare/files.py | cvakiitho/Webshare-download-manager | 4c79242d6a8562b269ee69a9096b7158e9f6c3c0 | [
"MIT"
] | 3 | 2015-02-06T11:22:58.000Z | 2019-08-14T21:25:29.000Z | appwebshare/files.py | cvakiitho/Webshare-download-manager | 4c79242d6a8562b269ee69a9096b7158e9f6c3c0 | [
"MIT"
] | 2 | 2015-02-04T11:45:51.000Z | 2015-03-04T22:01:11.000Z | appwebshare/files.py | cvakiitho/Webshare-download-manager | 4c79242d6a8562b269ee69a9096b7158e9f6c3c0 | [
"MIT"
] | null | null | null | # -*- coding: UTF=8 -*-
__author__ = 'Tomas Hartmann'
import glob
from appwebshare.scripts import config | 26.9 | 53 | 0.66171 |
f0229b401abe3feee370d9a51bbc8c817449f9e9 | 1,132 | py | Python | tests/checkout_four_sdk_test.py | riaz-bordie-cko/checkout-sdk-python | d9bc073306c1a98544c326be693ed722576ea895 | [
"MIT"
] | null | null | null | tests/checkout_four_sdk_test.py | riaz-bordie-cko/checkout-sdk-python | d9bc073306c1a98544c326be693ed722576ea895 | [
"MIT"
] | null | null | null | tests/checkout_four_sdk_test.py | riaz-bordie-cko/checkout-sdk-python | d9bc073306c1a98544c326be693ed722576ea895 | [
"MIT"
] | null | null | null | import pytest
import checkout_sdk
from checkout_sdk.environment import Environment
from checkout_sdk.exception import CheckoutArgumentException
| 30.594595 | 65 | 0.682862 |
f022af95545ca83849a19b9cfbeb75f2ed9c4fd0 | 181 | py | Python | transitfeed_web/__init__.py | ed-g/transitfeed_web | 1e9be7152823641c450612b27cace99a1efe0b4f | [
"Apache-2.0"
] | null | null | null | transitfeed_web/__init__.py | ed-g/transitfeed_web | 1e9be7152823641c450612b27cace99a1efe0b4f | [
"Apache-2.0"
] | null | null | null | transitfeed_web/__init__.py | ed-g/transitfeed_web | 1e9be7152823641c450612b27cace99a1efe0b4f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python2
import sys
import transitfeed
import run_transitfeed_web_server
import util
if __name__ == '__main__':
main()
| 12.928571 | 33 | 0.707182 |
f0238d97d920682e53df77bf6d0427a081fe7819 | 7,980 | py | Python | untiler/__init__.py | waissbluth/untiler | 866b3096196ac340597f77fbf5f2ce899e58238e | [
"MIT"
] | 37 | 2015-10-06T16:41:18.000Z | 2022-03-22T14:52:13.000Z | untiler/__init__.py | waissbluth/untiler | 866b3096196ac340597f77fbf5f2ce899e58238e | [
"MIT"
] | 18 | 2015-09-02T21:13:44.000Z | 2021-01-04T15:46:04.000Z | untiler/__init__.py | waissbluth/untiler | 866b3096196ac340597f77fbf5f2ce899e58238e | [
"MIT"
] | 8 | 2017-04-12T01:22:36.000Z | 2021-08-17T04:10:46.000Z | #!/usr/bin/env python
from __future__ import with_statement
from __future__ import print_function
from __future__ import division
import os
from multiprocessing import Pool
import click
import mercantile as merc
import numpy as np
import rasterio
from rasterio import Affine
from rasterio.warp import reproject
try:
from rasterio.warp import RESAMPLING as Resampling # pre-1.0
except ImportError:
from rasterio.warp import Resampling
import untiler.scripts.tile_utils as tile_utils
def make_affine(height, width, ul, lr):
"""
Create an affine for a tile of a given size
"""
xCell = (ul[0] - lr[0]) / width
yCell = (ul[1] - lr[1]) / height
return Affine(-xCell, 0.0, ul[0],
0.0, -yCell, ul[1])
def make_src_meta(bounds, size, creation_opts={}):
"""
Create metadata for output tiles
"""
ul = merc.xy(bounds.west, bounds.north)
lr = merc.xy(bounds.east, bounds.south)
aff = make_affine(size, size, ul, lr)
## default values
src_meta = {
'driver': 'GTiff',
'height': size,
'width': size,
'count': 4,
'dtype': np.uint8,
'affine': aff,
"crs": 'EPSG:3857',
'compress': 'JPEG',
'tiled': True,
'blockxsize': 256,
'blockysize': 256
}
for c in creation_opts.keys():
src_meta[c] = creation_opts[c]
return src_meta
def make_window(x, y, xmin, ymin, windowsize):
"""
Create a window for writing a child tile to a parent output tif
"""
if x < xmin or y < ymin:
raise ValueError("Indices can't be smaller than origin")
row = (y - ymin) * windowsize
col = (x - xmin) * windowsize
return (
(row, row + windowsize),
(col, col + windowsize)
)
globalArgs = None
if __name__ == "__main__":
stream_dir()
inspect_dir()
| 30.113208 | 153 | 0.590977 |
f023dd97d1d559d5d0d17b6855fef5c568625d43 | 236 | py | Python | loadenv.py | Natsu-dev/otenki | d962d44737a68a4751fd58051a670be4ecf852ce | [
"MIT"
] | null | null | null | loadenv.py | Natsu-dev/otenki | d962d44737a68a4751fd58051a670be4ecf852ce | [
"MIT"
] | null | null | null | loadenv.py | Natsu-dev/otenki | d962d44737a68a4751fd58051a670be4ecf852ce | [
"MIT"
] | null | null | null | import os
from os.path import join, dirname
from dotenv import load_dotenv
load_dotenv(verbose=True)
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(verbose=True, dotenv_path=dotenv_path)
TOKEN = os.getenv('DISCORD_TOKEN')
| 21.454545 | 50 | 0.792373 |
f024f2d1468cd63a89d1e5336dc2508a4542b04f | 1,476 | py | Python | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | 1 | 2022-01-25T22:17:55.000Z | 2022-01-25T22:17:55.000Z | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | null | null | null | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | null | null | null |
if __name__ == "__main__":
s = SpecialStack()
s.push(10)
s.push(20)
s.push(30)
print(s.getMin())
s.push(5)
print(s.getMin()) | 20.219178 | 36 | 0.443767 |
f02506946a855a60b83d59b8fe69069f7a64c710 | 1,316 | py | Python | fork_process/dataPreprocess/data_extraction_2.py | JianboTang/modified_GroundHog | cc511a146a51b42fdfb2b2c045205cca6ab306b7 | [
"BSD-3-Clause"
] | null | null | null | fork_process/dataPreprocess/data_extraction_2.py | JianboTang/modified_GroundHog | cc511a146a51b42fdfb2b2c045205cca6ab306b7 | [
"BSD-3-Clause"
] | null | null | null | fork_process/dataPreprocess/data_extraction_2.py | JianboTang/modified_GroundHog | cc511a146a51b42fdfb2b2c045205cca6ab306b7 | [
"BSD-3-Clause"
] | null | null | null | import numpy
import pickle
readfile1 = open('intermediate_data/post_1.txt','r');
readfile2 = open('intermediate_data/cmnt_1.txt','r');
writefile = open('intermediate_data/dictionary.pkl','w');
#writefile1 = open('intermediate_data/post_2.txt','w');
#writefile2 = open('intermediate_data/cmnt_2.txt','w');
if __name__ == '__main__':
main(1000000);
| 25.803922 | 57 | 0.660334 |
f027e6207f84d89378cfacc9c580753614b7155a | 4,245 | py | Python | visualization.py | Tommy-Johannessen/MovementRecognition | be84d7d014a272987dd20d03194336a9244eb900 | [
"MIT"
] | null | null | null | visualization.py | Tommy-Johannessen/MovementRecognition | be84d7d014a272987dd20d03194336a9244eb900 | [
"MIT"
] | null | null | null | visualization.py | Tommy-Johannessen/MovementRecognition | be84d7d014a272987dd20d03194336a9244eb900 | [
"MIT"
] | 1 | 2019-02-13T12:42:39.000Z | 2019-02-13T12:42:39.000Z | import itertools
import os
from collections import defaultdict
import matplotlib.pyplot as plt
#plt.style.use('ggplot')
from matplotlib.ticker import FuncFormatter
import pickle
import os
import numpy as np
def calculate_cm(pred_vals, true_vals, classes):
"""
This function calculates the confusion matrix.
"""
if len(pred_vals) != len(true_vals):
raise ValueError("Dimensions do not match")
n_classes = len(classes)
d = [[0 for _ in range(n_classes)] for _ in range(n_classes)]
for guess, ground_truth in zip(pred_vals, true_vals):
d[ground_truth][guess] += 1
d = np.asarray(d)
recall = []
precison = []
f1 = []
for index, values in enumerate(d):
recall.append(0 if sum(values) == 0 else values[index] / sum(values))
for index, values in enumerate(d.transpose()):
precison.append(0 if sum(values) == 0 else values[index] / sum(values))
for r, p in zip(recall, precison):
f1.append((r + p)/2)
return recall, precison, f1, d
def plot_confusion_matrix(cm, classes, path, name, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if not os.path.exists(path):
os.makedirs(path)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
plt.figure(figsize=(12, 6))
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
plt.savefig(path + name)
plt.clf()
plt.close()
if __name__ == '__main__':
search_folder = 'data/processed_data'
for folder in os.listdir(search_folder):
if folder == 'custom_movement':
for file in os.listdir(os.path.join(search_folder, folder)):
plot_data_distribution(file.split('.')[0],
folder.split('_')[0],
True if 'sliding_window' in file else False)
else:
print(f'Image created for {folder} at an earlier stage')
| 30.321429 | 113 | 0.640047 |
f028c9418a1b88c255939fa631a2c379765ba1a6 | 7,188 | py | Python | raw-myo-plot/Extract_Features.py | rjweld21/prostheticClinic | 1e1ab314fc31d85f455bd7a7868e1269f2808b50 | [
"MIT"
] | null | null | null | raw-myo-plot/Extract_Features.py | rjweld21/prostheticClinic | 1e1ab314fc31d85f455bd7a7868e1269f2808b50 | [
"MIT"
] | null | null | null | raw-myo-plot/Extract_Features.py | rjweld21/prostheticClinic | 1e1ab314fc31d85f455bd7a7868e1269f2808b50 | [
"MIT"
] | 1 | 2018-12-13T22:19:55.000Z | 2018-12-13T22:19:55.000Z | # -*- coding: utf-8 -*-
"""
Created on Thu Nov 1 12:54:07 2018
@author: bsala_000
"""
import os
import numpy as np
import pandas as pd
def getFilename(RECORDS_DIR='myo_data'):
"""
Function to allow user to pick filename of CSV to load in the
input records directory. Filename is picked by its
index within the directory which is output to the user
along with the associated filename. If an invalid
file index is entered, the function keeps asking for
a correct index until one is entered.
"""
if len(os.listdir(RECORDS_DIR)):
print('Records found:')
for i, f in enumerate(sorted(set(os.listdir(RECORDS_DIR)))):
print(i, ':', f)
f = input('Enter number of file to load: ')
fileFound = False
while not fileFound:
try:
i = int(f)
f = sorted(set(os.listdir(RECORDS_DIR)))[i]
fileFound = True
except:
print('Incorrect input... Must be number listed above.')
return os.path.join(RECORDS_DIR, f)
else:
print('No records found.')
return ''
def print_record_features(data):
"""
Function to print results of all feature extracting functions
based on data input
:data: - Dataframe of loaded EMG data from CSV
"""
print(get_RMS(data))
print(get_Var(data))
print(get_MAV(data))
print('Zero Crossings:\n',get_zero_crossings(data))
print('Waveform Lengths:\n',get_waveform_length(data))
def get_record_features(data, savefile=False):
"""
Function to return all feature results based on data input
INPUT
:data: - Dataframe of loaded EMG data from CSV
OUTPUT
:features: - Dictionary of dataframes for extracted features
"""
features = {}
features['rms'] = get_RMS(data)
features['var'] = get_Var(data)
features['mav'] = get_MAV(data)
features['zc'] = get_zero_crossings(data)
features['wfl'] = get_waveform_length(data)
if savefile:
save_features(features, savefile)
return features
def save_features(data_dict, f):
"""
Function to save extracted features in specified filename
INPUTs
:data_dict: - Dictionary of dataframes like the one output from
get_record_features()
:f: - CSV filename to save features to
"""
out = pd.DataFrame(columns=list(data_dict[0]))
for i in list(data_dict):
data_dict[i].loc[0, ('feature')] = id
out = pd.concat([out, data_dict[i]], ignore_axis=True)
out.to_csv(f, index=False)
def get_RMS(data):
"""
Function to get root mean squared value for each dataframe column.
INPUT
:data: - Dataframe of loaded EMG data from CSV
OUTPUT
:RMS: - Dataframe of RMS values from input in each column
"""
# Get number of rows
n = data.shape[0]
# Square each value in table
data = data.apply(lambda x:x**2)
# Sum each column values
s = data.apply(np.sum,axis = 0)
# SQRT resulting single value within each column
RMS = s.apply(lambda x:np.sqrt(x/n))
return RMS
def get_Var(data):
"""
Function to get variance feature for each column.
INPUT
:data: - Dataframe of loaded EMG data from CSV
OUTPUT
:data: - Dataframe of varience values for each input column
"""
# Gets standard deviation by column then squares that value
data = (np.std(data,axis = 0))**2
return data
def get_MAV(data):
"""
Function to get mean absolute value feature for each column.
INPUT
:data: - Dataframe of loaded EMG data from CSV
OUTPUT
:data: - Dataframe of MAV for each input column
"""
# Gets absolute value for each value in table then gets mean of each column.
data = np.mean(np.abs(data), axis = 0)
return data
def get_zero_crossing_matlab():
"""
DEPRECIATED - See get_zero_crossings() function below
Function to get number of zero crossings feature by using MATLAB script
"""
Z = eng.Zero_Crossing()
return np.array(Z).astype(int)
def get_zero_crossings(data):
"""
Function to get number of zero crossings feature for each column
INPUT
:data: - Dataframe of loaded EMG data from CSV
OUTPUT
:data: - Dataframe of zero crossings count per column
"""
crossings = pd.DataFrame(0, index=[0], columns=list(data))
for col in list(data):
crossings[col].loc[0] = len(np.where(np.diff(np.sign(data[col])))[0])
return crossings
def get_waveform_length_matlab():
"""
DEPRECIATED
Use function below instead (get_waveform_length)
"""
W = eng.Waveform_Length()
return np.array(W).astype(int)
def get_waveform_length(data):
"""
Function to get waveform length feature for each column
INPUT
:data: - Dataframe of loaded EMG data from CSV
OUTPUT
:data: - Dataframe of waveform length sums per column
"""
data = data.diff().abs().sum(axis=0)
return data
if __name__ == '__main__':
c = input('Batch or test (b/t): ')
if c == 'b':
c = input('Select specific files (enter 0) or select based on regex (enter 1)? ')
if c == '0':
select_batch('myo_data')
elif c == '1':
regex_batch('myo_data')
elif c == 't':
f = os.path.join('myo_data','myo_record_0.csv')
df = pd.read_csv(f)
get_record_features(df) | 28.86747 | 89 | 0.569839 |
f029112ff9d652c6d8e36f9059cb703264d4ebbd | 739 | py | Python | Hartree-Fock_H2/utils.py | WonhoZhung/CH502 | c64a174fe7218e6e86c84c73e6df441fb5074211 | [
"MIT"
] | null | null | null | Hartree-Fock_H2/utils.py | WonhoZhung/CH502 | c64a174fe7218e6e86c84c73e6df441fb5074211 | [
"MIT"
] | null | null | null | Hartree-Fock_H2/utils.py | WonhoZhung/CH502 | c64a174fe7218e6e86c84c73e6df441fb5074211 | [
"MIT"
] | null | null | null | #----------------------------------------------------------------------
# Basis Set Exchange
# Version v0.8.13
# https://www.basissetexchange.org
#----------------------------------------------------------------------
# Basis set: STO-3G
# Description: STO-3G Minimal Basis (3 functions/AO)
# Role: orbital
# Version: 1 (Data from Gaussian09)
#----------------------------------------------------------------------
# BASIS "ao basis" PRINT
# #BASIS SET: (3s) -> [1s]
# H S
# 0.3425250914E+01 0.1543289673E+00
# 0.6239137298E+00 0.5353281423E+00
# 0.1688554040E+00 0.4446345422E+00
# END
A_LIST = [3.425250914 , 0.6239137298, 0.1688554040]
D_LIST = [0.1543289673, 0.5353281423, 0.4446345422]
| 35.190476 | 71 | 0.460081 |
f02ceba7181acc45bf9bae1d138dd71123a318a6 | 422 | py | Python | my_wallet/apiv1/permissions.py | ibolorino/wallet_backend | 20c80e419eaef6b0577ca45ff35bf4eb9501e3a3 | [
"MIT"
] | null | null | null | my_wallet/apiv1/permissions.py | ibolorino/wallet_backend | 20c80e419eaef6b0577ca45ff35bf4eb9501e3a3 | [
"MIT"
] | null | null | null | my_wallet/apiv1/permissions.py | ibolorino/wallet_backend | 20c80e419eaef6b0577ca45ff35bf4eb9501e3a3 | [
"MIT"
] | null | null | null | from rest_framework import permissions | 35.166667 | 110 | 0.699052 |
f02d76a5fd8b5ecfd2e0de43f20b301ddaf039ba | 2,294 | py | Python | automate_insurance_pricing/preprocessing/descriptive_functions.py | nassmim/automate-insurance-pricing-nezz | 7a1cc48be9fb78bdadbbf7616fb01d4d6429e06c | [
"MIT"
] | 2 | 2021-11-09T15:47:22.000Z | 2021-11-14T13:54:56.000Z | automate_insurance_pricing/preprocessing/descriptive_functions.py | nassmim/automate-insurance-pricing-nezz | 7a1cc48be9fb78bdadbbf7616fb01d4d6429e06c | [
"MIT"
] | null | null | null | automate_insurance_pricing/preprocessing/descriptive_functions.py | nassmim/automate-insurance-pricing-nezz | 7a1cc48be9fb78bdadbbf7616fb01d4d6429e06c | [
"MIT"
] | 1 | 2021-07-09T04:12:57.000Z | 2021-07-09T04:12:57.000Z | import pandas as pd
def derive_termination_rate_year(df, start_business_year, extraction_year, main_column_contract_date, policy_id_column_name, column_to_sum_name):
"""Derives the contracts termination rates per year
Arguments --> the dataframe, the business starting year, the extraction year
the contracts start date and policy ids and the cancellation columns names
Returns --> a dictionnary with the termination rates per year and the overall one
"""
df_previous_year = df[df[main_column_contract_date].dt.year == start_business_year].drop_duplicates(subset=policy_id_column_name, keep='first')
policies_previous_year = df_previous_year[policy_id_column_name]
termination_rates = {}
gwp_year = df_previous_year[column_to_sum_name].sum()
total_gwp = gwp_year
weighted_rates = 0
for year in range(start_business_year+1, extraction_year+1):
df_next_year = df[df[main_column_contract_date].dt.year == year].drop_duplicates(subset=policy_id_column_name, keep='first')
policies_next_year = df_next_year[policy_id_column_name]
policies_from_previous_year = df_next_year[df_next_year[policy_id_column_name].isin(policies_previous_year)]
termination_rate = (len(policies_previous_year) - len(policies_from_previous_year)) / len(policies_previous_year)
termination_rates[year-1] = termination_rate
weighted_rates += termination_rate * gwp_year
gwp_year = df_next_year[column_to_sum_name].sum()
total_gwp += gwp_year
policies_previous_year = policies_next_year
termination_rates['weighted_average'] = weighted_rates / total_gwp
return termination_rates
def create_df_unique_values(df, features):
"""
Gets the unique values of features and the number of these unique values (mainly useful for categorical feature)
Arguments --> the dataframe and the list of features (either a list or a string)
Returns --> A new df with features and number of unique values for each
"""
df_feature_unique_values = pd.DataFrame.from_dict({'feature': features, 'number_of_uniques': df[features].nunique().values})
return df_feature_unique_values.reset_index()
| 46.816327 | 148 | 0.735397 |
f02d80a4afeebaf1a2e3f75631b09c3fc74059e3 | 2,538 | py | Python | src/flask_easy/auth.py | Josephmaclean/flask-easy | 64cb647b0dbcd031cb8d27cc60889e50c959e1ca | [
"MIT"
] | 1 | 2021-12-30T12:25:05.000Z | 2021-12-30T12:25:05.000Z | src/flask_easy/auth.py | Josephmaclean/flask-easy | 64cb647b0dbcd031cb8d27cc60889e50c959e1ca | [
"MIT"
] | null | null | null | src/flask_easy/auth.py | Josephmaclean/flask-easy | 64cb647b0dbcd031cb8d27cc60889e50c959e1ca | [
"MIT"
] | null | null | null | """
auth.py
Author: Joseph Maclean Arhin
"""
import os
import inspect
from functools import wraps
import jwt
from flask import request
from jwt.exceptions import ExpiredSignatureError, InvalidTokenError, PyJWTError
from .exc import Unauthorized, ExpiredTokenException, OperationError
def auth_required(other_roles=None):
"""auth required decorator"""
def authorize_user(func):
"""
A wrapper to authorize an action using
:param func: {function}` the function to wrap around
:return:
"""
return view_wrapper
return authorize_user
def is_authorized(access_roles, available_roles):
"""Check if access roles is in available roles"""
for role in access_roles:
if role in available_roles:
return True
return False
| 32.126582 | 79 | 0.593775 |
f02f263b4792b69303bcdec39c484284dc805802 | 1,221 | py | Python | src/prefect/engine/result_handlers/secret_result_handler.py | trapped/prefect | 128f11570c35e7156d65ba65fdcbc1f4ccd7c2b7 | [
"Apache-2.0"
] | 1 | 2019-12-20T07:43:55.000Z | 2019-12-20T07:43:55.000Z | src/prefect/engine/result_handlers/secret_result_handler.py | trapped/prefect | 128f11570c35e7156d65ba65fdcbc1f4ccd7c2b7 | [
"Apache-2.0"
] | null | null | null | src/prefect/engine/result_handlers/secret_result_handler.py | trapped/prefect | 128f11570c35e7156d65ba65fdcbc1f4ccd7c2b7 | [
"Apache-2.0"
] | null | null | null | import json
from typing import Any
import prefect
from prefect.engine.result_handlers import ResultHandler
| 27.133333 | 104 | 0.626536 |
f02f4e1f7df53040bb2247eb8bc8db48f7b3454e | 9,283 | py | Python | hnn_core/tests/test_dipole.py | mkhalil8/hnn-core | a761e248ddf360710dd60638269f70361f5d6cb3 | [
"BSD-3-Clause"
] | null | null | null | hnn_core/tests/test_dipole.py | mkhalil8/hnn-core | a761e248ddf360710dd60638269f70361f5d6cb3 | [
"BSD-3-Clause"
] | null | null | null | hnn_core/tests/test_dipole.py | mkhalil8/hnn-core | a761e248ddf360710dd60638269f70361f5d6cb3 | [
"BSD-3-Clause"
] | null | null | null | import os.path as op
from urllib.request import urlretrieve
import matplotlib
import numpy as np
from numpy.testing import assert_allclose
import pytest
import hnn_core
from hnn_core import read_params, read_dipole, average_dipoles
from hnn_core import Network, jones_2009_model
from hnn_core.viz import plot_dipole
from hnn_core.dipole import Dipole, simulate_dipole, _rmse
from hnn_core.parallel_backends import requires_mpi4py, requires_psutil
matplotlib.use('agg')
def test_dipole(tmpdir, run_hnn_core_fixture):
"""Test dipole object."""
hnn_core_root = op.dirname(hnn_core.__file__)
params_fname = op.join(hnn_core_root, 'param', 'default.json')
dpl_out_fname = tmpdir.join('dpl1.txt')
params = read_params(params_fname)
times = np.arange(0, 6000 * params['dt'], params['dt'])
data = np.random.random((6000, 3))
dipole = Dipole(times, data)
dipole._baseline_renormalize(params['N_pyr_x'], params['N_pyr_y'])
dipole._convert_fAm_to_nAm()
# test smoothing and scaling
dipole_raw = dipole.copy()
dipole.scale(params['dipole_scalefctr'])
dipole.smooth(window_len=params['dipole_smooth_win'])
with pytest.raises(AssertionError):
assert_allclose(dipole.data['agg'], dipole_raw.data['agg'])
assert_allclose(dipole.data['agg'],
(params['dipole_scalefctr'] * dipole_raw.smooth(
params['dipole_smooth_win']).data['agg']))
dipole.plot(show=False)
plot_dipole([dipole, dipole], show=False)
# Test IO
dipole.write(dpl_out_fname)
dipole_read = read_dipole(dpl_out_fname)
assert_allclose(dipole_read.times, dipole.times, rtol=0, atol=0.00051)
for dpl_key in dipole.data.keys():
assert_allclose(dipole_read.data[dpl_key],
dipole.data[dpl_key], rtol=0, atol=0.000051)
# average two identical dipole objects
dipole_avg = average_dipoles([dipole, dipole_read])
for dpl_key in dipole_avg.data.keys():
assert_allclose(dipole_read.data[dpl_key],
dipole_avg.data[dpl_key], rtol=0, atol=0.000051)
with pytest.raises(ValueError, match="Dipole at index 0 was already an "
"average of 2 trials"):
dipole_avg = average_dipoles([dipole_avg, dipole_read])
# average an n_of_1 dipole list
single_dpl_avg = average_dipoles([dipole])
for dpl_key in single_dpl_avg.data.keys():
assert_allclose(
dipole_read.data[dpl_key],
single_dpl_avg.data[dpl_key],
rtol=0,
atol=0.000051)
# average dipole list with one dipole object and a zero dipole object
n_times = len(dipole_read.data['agg'])
dpl_null = Dipole(np.zeros(n_times, ), np.zeros((n_times, 3)))
dpl_1 = [dipole, dpl_null]
dpl_avg = average_dipoles(dpl_1)
for dpl_key in dpl_avg.data.keys():
assert_allclose(dpl_1[0].data[dpl_key] / 2., dpl_avg.data[dpl_key])
# Test experimental dipole
dipole_exp = Dipole(times, data[:, 1])
dipole_exp.write(dpl_out_fname)
dipole_exp_read = read_dipole(dpl_out_fname)
assert_allclose(dipole_exp.data['agg'], dipole_exp_read.data['agg'],
rtol=1e-2)
dipole_exp_avg = average_dipoles([dipole_exp, dipole_exp])
assert_allclose(dipole_exp.data['agg'], dipole_exp_avg.data['agg'])
# XXX all below to be deprecated in 0.3
dpls_raw, net = run_hnn_core_fixture(backend='joblib', n_jobs=1,
reduced=True, record_isoma=True,
record_vsoma=True)
# test deprecation of postproc
with pytest.warns(DeprecationWarning,
match='The postproc-argument is deprecated'):
dpls, _ = run_hnn_core_fixture(backend='joblib', n_jobs=1,
reduced=True, record_isoma=True,
record_vsoma=True, postproc=True)
with pytest.raises(AssertionError):
assert_allclose(dpls[0].data['agg'], dpls_raw[0].data['agg'])
dpls_raw[0]._post_proc(net._params['dipole_smooth_win'],
net._params['dipole_scalefctr'])
assert_allclose(dpls_raw[0].data['agg'], dpls[0].data['agg'])
def test_dipole_simulation():
"""Test data produced from simulate_dipole() call."""
hnn_core_root = op.dirname(hnn_core.__file__)
params_fname = op.join(hnn_core_root, 'param', 'default.json')
params = read_params(params_fname)
params.update({'N_pyr_x': 3,
'N_pyr_y': 3,
'dipole_smooth_win': 5,
't_evprox_1': 5,
't_evdist_1': 10,
't_evprox_2': 20})
net = jones_2009_model(params, add_drives_from_params=True)
with pytest.raises(ValueError, match="Invalid number of simulations: 0"):
simulate_dipole(net, tstop=25., n_trials=0)
with pytest.raises(TypeError, match="record_vsoma must be bool, got int"):
simulate_dipole(net, tstop=25., n_trials=1, record_vsoma=0)
with pytest.raises(TypeError, match="record_isoma must be bool, got int"):
simulate_dipole(net, tstop=25., n_trials=1, record_vsoma=False,
record_isoma=0)
# test Network.copy() returns 'bare' network after simulating
dpl = simulate_dipole(net, tstop=25., n_trials=1)[0]
net_copy = net.copy()
assert len(net_copy.external_drives['evprox1']['events']) == 0
# test that Dipole.copy() returns the expected exact copy
assert_allclose(dpl.data['agg'], dpl.copy().data['agg'])
with pytest.warns(UserWarning, match='No connections'):
net = Network(params)
# warning triggered on simulate_dipole()
simulate_dipole(net, tstop=0.1, n_trials=1)
# Smoke test for raster plot with no spikes
net.cell_response.plot_spikes_raster()
def test_rmse():
"""Test to check RMSE calculation"""
data_url = ('https://raw.githubusercontent.com/jonescompneurolab/hnn/'
'master/data/MEG_detection_data/yes_trial_S1_ERP_all_avg.txt')
if not op.exists('yes_trial_S1_ERP_all_avg.txt'):
urlretrieve(data_url, 'yes_trial_S1_ERP_all_avg.txt')
extdata = np.loadtxt('yes_trial_S1_ERP_all_avg.txt')
exp_dpl = Dipole(times=extdata[:, 0],
data=np.c_[extdata[:, 1], extdata[:, 1], extdata[:, 1]])
hnn_core_root = op.join(op.dirname(hnn_core.__file__))
params_fname = op.join(hnn_core_root, 'param', 'default.json')
params = read_params(params_fname)
expected_rmse = 0.1
test_dpl = Dipole(times=extdata[:, 0],
data=np.c_[extdata[:, 1] + expected_rmse,
extdata[:, 1] + expected_rmse,
extdata[:, 1] + expected_rmse])
avg_rmse = _rmse(test_dpl, exp_dpl, tstop=params['tstop'])
assert_allclose(avg_rmse, expected_rmse)
| 43.378505 | 78 | 0.660885 |
f02fc9e2410362e641030d8eb9da915829910a4c | 1,280 | py | Python | setup.py | creeston/chinese | 44317b8aa9b909eda9cf3008f6bd0cf4d92f228c | [
"MIT"
] | 15 | 2018-11-15T16:54:41.000Z | 2022-01-12T00:53:10.000Z | setup.py | creeston/chinese | 44317b8aa9b909eda9cf3008f6bd0cf4d92f228c | [
"MIT"
] | 1 | 2021-05-19T04:01:21.000Z | 2021-05-19T04:01:21.000Z | setup.py | creeston/chinese | 44317b8aa9b909eda9cf3008f6bd0cf4d92f228c | [
"MIT"
] | 5 | 2019-03-01T09:30:34.000Z | 2022-03-07T19:25:40.000Z | from setuptools import setup, find_packages
with open('docs/README-rst') as f:
desc = f.read()
setup(
name='chinese',
version='0.2.1',
license='MIT',
url='https://github.com/morinokami/chinese',
keywords=['Chinese', 'text analysis'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Education',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='Chinese text analyzer',
long_description = desc,
author='Shinya Fujino',
author_email='shf0811@gmail.com',
packages=find_packages(where='src'),
package_dir={'chinese': 'src/chinese'},
package_data={'chinese': ['data/cedict.pickle', 'data/dict.txt.big']},
include_package_data=True,
install_requires=['jieba', 'pynlpir'],
)
| 32 | 74 | 0.613281 |
f03149894a1a1db841d1f4b4176a844bc1ba3dd2 | 2,880 | py | Python | glacis_core/api/get_keys.py | ImperiumSec/glacis_core | b9dd0ad0f92dfd89c8ee1791c03ee1a8c6e93500 | [
"MIT"
] | null | null | null | glacis_core/api/get_keys.py | ImperiumSec/glacis_core | b9dd0ad0f92dfd89c8ee1791c03ee1a8c6e93500 | [
"MIT"
] | null | null | null | glacis_core/api/get_keys.py | ImperiumSec/glacis_core | b9dd0ad0f92dfd89c8ee1791c03ee1a8c6e93500 | [
"MIT"
] | null | null | null | from ..models import EntityOnServer, AccessToken, Organisation, Server, ServerUser, Key, KeyFetchEvent, AuditNote, AuditEvent, LoginAttempt
from django.template import Context, Template
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
from uuid import uuid4
from datetime import datetime
import json | 27.692308 | 139 | 0.613194 |
f031c64cd48b598cd3b616708c05819e454b8bc1 | 2,870 | py | Python | core/translator.py | bfu4/mdis | fac5ec078ffeaa9339df4b31b9b71140563f4f14 | [
"MIT"
] | 13 | 2021-05-17T06:38:50.000Z | 2022-03-27T15:39:57.000Z | core/translator.py | bfu4/mdis | fac5ec078ffeaa9339df4b31b9b71140563f4f14 | [
"MIT"
] | null | null | null | core/translator.py | bfu4/mdis | fac5ec078ffeaa9339df4b31b9b71140563f4f14 | [
"MIT"
] | null | null | null | from typing import List
from parser import parse_bytes, split_bytes_from_lines, get_bytes, parse_instruction_set, wrap_parsed_set
from reader import dump_file_hex_with_locs
| 28.7 | 105 | 0.591289 |
f0339846cad63a7692947f289af6990dc4271899 | 3,987 | py | Python | easyp2p/p2p_signals.py | Ceystyle/easyp2p | 99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc | [
"MIT"
] | 4 | 2019-07-18T10:58:28.000Z | 2021-11-18T16:57:45.000Z | easyp2p/p2p_signals.py | Ceystyle/easyp2p | 99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc | [
"MIT"
] | 1 | 2019-07-05T09:21:47.000Z | 2019-07-05T09:21:47.000Z | easyp2p/p2p_signals.py | Ceystyle/easyp2p | 99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc | [
"MIT"
] | 2 | 2019-07-05T08:56:34.000Z | 2020-06-09T10:03:42.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Niko Sandschneider
"""Module implementing Signals for communicating with the GUI."""
from functools import wraps
import logging
from PyQt5.QtCore import QObject, pyqtSignal
| 34.37069 | 75 | 0.605719 |
f033f0846a998f9a5ac92cbb40712c19a572ab8c | 623 | py | Python | extra_tests/ctypes_tests/test_unions.py | nanjekyejoannah/pypy | e80079fe13c29eda7b2a6b4cd4557051f975a2d9 | [
"Apache-2.0",
"OpenSSL"
] | 333 | 2015-08-08T18:03:38.000Z | 2022-03-22T18:13:12.000Z | extra_tests/ctypes_tests/test_unions.py | nanjekyejoannah/pypy | e80079fe13c29eda7b2a6b4cd4557051f975a2d9 | [
"Apache-2.0",
"OpenSSL"
] | 7 | 2020-02-16T16:49:05.000Z | 2021-11-26T09:00:56.000Z | extra_tests/ctypes_tests/test_unions.py | nanjekyejoannah/pypy | e80079fe13c29eda7b2a6b4cd4557051f975a2d9 | [
"Apache-2.0",
"OpenSSL"
] | 55 | 2015-08-16T02:41:30.000Z | 2022-03-20T20:33:35.000Z | import sys
from ctypes import *
| 21.482759 | 48 | 0.523274 |
f0348185cb88efdb34b5de39fe352d2ee65ecef9 | 13,977 | py | Python | nssrc/com/citrix/netscaler/nitro/resource/config/snmp/snmpmib.py | guardicore/nitro-python | 5346a5086134aead80968f15a41ff527adaa0ec1 | [
"Apache-2.0"
] | null | null | null | nssrc/com/citrix/netscaler/nitro/resource/config/snmp/snmpmib.py | guardicore/nitro-python | 5346a5086134aead80968f15a41ff527adaa0ec1 | [
"Apache-2.0"
] | null | null | null | nssrc/com/citrix/netscaler/nitro/resource/config/snmp/snmpmib.py | guardicore/nitro-python | 5346a5086134aead80968f15a41ff527adaa0ec1 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2021 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
if self.ownernode is not None :
return str(self.ownernode)
return None
except Exception as e :
raise e
class snmpmib_response(base_response) :
| 37.980978 | 387 | 0.701867 |
f034b8b6b6d0852450c50577d53070c406d80750 | 770 | py | Python | lambda-archive/lambda-functions/codebreaker-update-testcaseCount/lambda_function.py | singaporezoo/codebreaker-official | 1fe5792f1c36f922abd0836d8dcb42d271a9323d | [
"MIT"
] | 11 | 2021-09-19T06:32:44.000Z | 2022-03-14T19:09:46.000Z | lambda-archive/lambda-functions/codebreaker-update-testcaseCount/lambda_function.py | singaporezoo/codebreaker-official | 1fe5792f1c36f922abd0836d8dcb42d271a9323d | [
"MIT"
] | null | null | null | lambda-archive/lambda-functions/codebreaker-update-testcaseCount/lambda_function.py | singaporezoo/codebreaker-official | 1fe5792f1c36f922abd0836d8dcb42d271a9323d | [
"MIT"
] | 1 | 2022-03-02T13:27:27.000Z | 2022-03-02T13:27:27.000Z | import json
import boto3 # Amazon S3 client library
s3 = boto3.resource('s3')
dynamodb = boto3.resource('dynamodb')
problems_table = dynamodb.Table('codebreaker-problems')
bucket = s3.Bucket('codebreaker-testdata')
| 28.518519 | 72 | 0.672727 |