blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7a473b1bddcf04174e18dfc2f4007503fea18978
|
90d24e35dd9122615759ac3c94cc72710296d51e
|
/input/process_requests.py
|
d6783d47807e47dce4c94d2215f01bdc4939bb52
|
[
"Apache-2.0"
] |
permissive
|
CecilePereiraTotal/ChemOS
|
8f827f6295fe39368fb16b16d9f95b4c9525e9e1
|
50117f572e95e68dc4dccb624cedb28dbfc6e419
|
refs/heads/master
| 2021-02-17T22:09:16.830417
| 2020-03-01T19:16:27
| 2020-03-01T19:16:27
| 245,130,647
| 0
| 1
|
NOASSERTION
| 2020-03-05T10:16:18
| 2020-03-05T10:16:17
| null |
UTF-8
|
Python
| false
| false
| 941
|
py
|
#!/usr/bin/env python
import os
import time
import shutil
import glob
import pickle
import numpy as np
#===================================
def run_experiments():
while True:
file_names = glob.glob('*rep*pkl')
for file_name in file_names:
exp_dict = pickle.load(open(file_name, 'rb'))
params = exp_dict['parameters']
objs = {}
exp_dict['obj_0'] = np.random.uniform(low = 0, high = 1)
exp_dict['obj_1'] = np.random.uniform(low = 0, high = 1)
exp_dict['obj_2'] = np.random.uniform(low = 0, high = 1)
# exp_dict['obj_0'] = np.sum(np.square(params))
# exp_dict['obj_1'] = np.sum(np.square(params))
# exp_dict['obj_2'] = np.sum(np.square(params))
pickle.dump(exp_dict, open('../output/%s' % file_name, 'wb'))
print(exp_dict)
print('========')
os.remove(file_name)
break
time.sleep(2)
#===================================
if __name__ == '__main__':
run_experiments()
|
[
"flo@FloriansMacBook.local"
] |
flo@FloriansMacBook.local
|
8ec53ea01e0cd5ce414999831dfcf179cd3c7434
|
e25b4bedc2d8f7abf93be9242b309265d9141012
|
/testing/helper.py
|
a15e558319d819a10bd08408cc1e1c5fcd709ff8
|
[] |
no_license
|
wware/postgres-md
|
6f40cc497d8665c3eb7047b097c0fda8cd312ff5
|
872639c799269c7233607c1039dd9ceb480bc2c9
|
refs/heads/master
| 2020-05-24T12:41:26.024380
| 2015-08-03T15:16:36
| 2015-08-03T15:16:36
| 39,483,236
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,404
|
py
|
from docker import Client
import os
import pytest
import time
from sqlalchemy import Table, MetaData, create_engine
from sqlalchemy_utils.functions import (
database_exists,
create_database,
drop_database
)
cli = Client(base_url='unix://var/run/docker.sock')
container = cli.create_container(image='postgres')
cli.start(container=container)
ip_addr = ip_addr = cli.inspect_container(
container
)['NetworkSettings']['IPAddress']
url = 'postgresql://postgres:@{0}:5432/test'.format(ip_addr)
# It takes several seconds for Postgres to warm up.
while True:
if "5432/tcp open" in os.popen("nmap -p 5432 " + ip_addr).read():
break
time.sleep(1)
_schema = None
def set_schema(fname):
global _schema
_schema = fname
def get_table(tname, columns=None):
"import table info from the database"
db = create_engine(url)
metadata = MetaData(db)
table = Table(tname, metadata, autoload=True, autoload_with=db)
if columns is not None:
assert [c.name for c in table.columns] == columns
return table
@pytest.fixture
def engine(request, url=url):
assert _schema is not None, "Please call set_schema(filename) first"
def fin():
drop_database(url)
request.addfinalizer(fin)
create_database(url)
cmd = "cat {0} | psql -h {1} -U postgres test".format(_schema, ip_addr)
os.system(cmd)
return create_engine(url)
|
[
"wware@veracode.com"
] |
wware@veracode.com
|
971042572faaf4b63994e034f4bde93444f40cdc
|
e5023be9805b83eb16297ea5f9e27b8b12da7db1
|
/apps/nick/migrations/0003_auto_20181121_2032.py
|
619ef33203a2211ec21366cd5315f969134bada2
|
[] |
no_license
|
rongDang/Django-Demo
|
dbd0a8de7b6a6ce692aadf9518106bdede027526
|
b6280cdbad5682bf0f09abd381e2e6b75772d8e2
|
refs/heads/master
| 2020-04-04T18:40:23.991318
| 2018-12-27T13:44:19
| 2018-12-27T13:44:19
| 156,173,360
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 889
|
py
|
# Generated by Django 2.1.2 on 2018-11-21 20:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nick', '0002_auto_20181119_1854'),
]
operations = [
migrations.AlterModelOptions(
name='blog',
options={'verbose_name': '博客', 'verbose_name_plural': '博客'},
),
migrations.AddField(
model_name='category',
name='number',
field=models.IntegerField(default=2),
),
migrations.AddField(
model_name='tag',
name='number',
field=models.IntegerField(default=1),
),
migrations.AlterField(
model_name='blog',
name='title',
field=models.CharField(max_length=50, verbose_name='标题'),
),
]
|
[
"2801293031@qq.com"
] |
2801293031@qq.com
|
c09f20d26fdaea17fc361a7d9af2ace3c46a9ec5
|
992fd9c34ec70887dddf7d6ea3b5a095a1293a77
|
/full-width-digit/plugin.py
|
5be9f7adeb2efaf41e89a9b47fe2973342bc212e
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Unlicense"
] |
permissive
|
Kennyl/Sigil-Ebook-Plugins
|
70dfd3259816119a4e18a1e685788c4b422756a7
|
67978f343c69b3020ca3b2b9a38850eb2e6eed27
|
refs/heads/master
| 2021-01-18T20:01:16.992849
| 2017-11-05T05:41:40
| 2017-11-05T05:41:40
| 86,926,203
| 13
| 3
|
Unlicense
| 2018-04-20T15:53:18
| 2017-04-01T16:19:17
|
Python
|
UTF-8
|
Python
| false
| false
| 1,643
|
py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import re
import sys
import sigil_bs4
conversionDict={"0": "\N{FULLWIDTH DIGIT ZERO}",
"1": "\N{FULLWIDTH DIGIT ONE}",
"2": "\N{FULLWIDTH DIGIT TWO}",
"3": "\N{FULLWIDTH DIGIT THREE}",
"4": "\N{FULLWIDTH DIGIT FOUR}",
"5": "\N{FULLWIDTH DIGIT FIVE}",
"6": "\N{FULLWIDTH DIGIT SIX}",
"7": "\N{FULLWIDTH DIGIT SEVEN}",
"8": "\N{FULLWIDTH DIGIT EIGHT}",
"9": "\N{FULLWIDTH DIGIT NINE}"}
def fixSelfCloseTags(html):
return html.replace("></input>"," />").replace("></img>"," />").replace("></meta>"," />").replace("></link>"," />").replace("<br></br>","<br />").replace("></img>"," />")
def run(bk):
print('start')
for (file_id, _) in bk.text_iter():
modified = False
html = bk.readfile(file_id)
soup = sigil_bs4.BeautifulSoup(html)
# br tag will cause p tag cannot be found
for elem in soup.findAll(['p','div','span'], text=re.compile('(\d+)')):
modified = True
text = elem.string
for key in conversionDict:
text = re.sub(key, conversionDict[key], text)
elem.string.replace_with(text)
# print(elem.string)
if modified:
print("Modifed File -> ", id)
bk.writefile(file_id, fixSelfCloseTags(str(soup)))
return 0
def main():
print("I reached main when I should not have\n")
return -1
if __name__ == "__main__":
sys.exit(main())
|
[
"apple@apples-MacBook-Pro.local"
] |
apple@apples-MacBook-Pro.local
|
0628d28942b07798a3581b0c726246718d0103bf
|
6ed233ec80984cd8d6eb5b8f2efde1ac5feadc4b
|
/ebc/nbr2018/tests/base.py
|
d8f1b8dc478a579559e54fd5ce377970766ca953
|
[
"Unlicense"
] |
permissive
|
lflrocha/ebc.nbr2018
|
ce03abd238dca532d8adedaae0778b519b334852
|
0259390fecda065bf040b08e5ae3050ba96b1c4e
|
refs/heads/master
| 2020-04-08T12:44:17.247750
| 2019-08-08T18:13:57
| 2019-08-08T18:13:57
| 159,359,454
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,962
|
py
|
"""Test setup for integration and functional tests.
When we import PloneTestCase and then call setupPloneSite(), all of
Plone's products are loaded, and a Plone site will be created. This
happens at module level, which makes it faster to run each test, but
slows down test runner startup.
"""
from Products.Five import zcml
from Products.Five import fiveconfigure
from Testing import ZopeTestCase as ztc
from Products.PloneTestCase import PloneTestCase as ptc
from Products.PloneTestCase.layer import onsetup
# When ZopeTestCase configures Zope, it will *not* auto-load products
# in Products/. Instead, we have to use a statement such as:
# ztc.installProduct('SimpleAttachment')
# This does *not* apply to products in eggs and Python packages (i.e.
# not in the Products.*) namespace. For that, see below.
# All of Plone's products are already set up by PloneTestCase.
@onsetup
def setup_product():
"""Set up the package and its dependencies.
The @onsetup decorator causes the execution of this body to be
deferred until the setup of the Plone site testing layer. We could
have created our own layer, but this is the easiest way for Plone
integration tests.
"""
# Load the ZCML configuration for the example.tests package.
# This can of course use <include /> to include other packages.
fiveconfigure.debug_mode = True
import ebc.nbr2018
zcml.load_config('configure.zcml', ebc.nbr2018)
fiveconfigure.debug_mode = False
# We need to tell the testing framework that these products
# should be available. This can't happen until after we have loaded
# the ZCML. Thus, we do it here. Note the use of installPackage()
# instead of installProduct().
# This is *only* necessary for packages outside the Products.*
# namespace which are also declared as Zope 2 products, using
# <five:registerPackage /> in ZCML.
# We may also need to load dependencies, e.g.:
# ztc.installPackage('borg.localrole')
ztc.installPackage('ebc.nbr2018')
# The order here is important: We first call the (deferred) function
# which installs the products we need for this product. Then, we let
# PloneTestCase set up this product on installation.
setup_product()
ptc.setupPloneSite(products=['ebc.nbr2018'])
class TestCase(ptc.PloneTestCase):
"""We use this base class for all the tests in this package. If
necessary, we can put common utility or setup code in here. This
applies to unit test cases.
"""
class FunctionalTestCase(ptc.FunctionalTestCase):
"""We use this class for functional integration tests that use
doctest syntax. Again, we can put basic common utility or setup
code in here.
"""
def afterSetUp(self):
roles = ('Member', 'Contributor')
self.portal.portal_membership.addMember('contributor',
'secret',
roles, [])
|
[
"lflrocha@gmail.com"
] |
lflrocha@gmail.com
|
04e1da397bcab72682e32c998aaaf2afbacae741
|
a3e42fad5891c6170d3639254710068642b7e887
|
/fitparse/usageExample.py
|
e84b1e27774c842db6ad3d59d9154450bbc56d71
|
[] |
no_license
|
Marco5de/fitAnalysis
|
7a6ed1e6caf3b0660a88351b3b18baee86621abd
|
2063d9c4f8255fb94027a92f2ede5eff5ed33c81
|
refs/heads/master
| 2020-11-25T02:37:47.906157
| 2020-06-30T12:03:32
| 2020-06-30T12:03:32
| 228,456,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 963
|
py
|
from fitparse import FitFile,FitParseError
import sys
file = open("ParsedFitOut.txt","w")
try:
fitfile = FitFile("file.fit")
fitfile.parse()
except FitParseError as e:
print("Error parsing .FIT file")
sys.exit(1)
# Get all data messages that are of type record
#fitfile.get_messages('record') returns record info!
for record in fitfile.get_messages():
# Go through all the data entries in this record
for record_data in record:
# Print the records name and value (and units if it has any)
if record_data.units:
print(" * %s: %s %s" % (record_data.name, record_data.value, record_data.units,))
file.write(" * %s: %s %s\n" % (record_data.name, record_data.value, record_data.units,))
else:
print(" * %s: %s" % (record_data.name, record_data.value))
file.write(" * %s: %s\n" % (record_data.name, record_data.value))
print("\n")
file.write("\n")
file.close()
|
[
"marco.deuscher@t-online.de"
] |
marco.deuscher@t-online.de
|
2ec5a1156d06c902673f739affb49f1533f4092d
|
24bc4990e9d0bef6a42a6f86dc783785b10dbd42
|
/build/fuchsia/PRESUBMIT.py
|
f8c7df28fc5fd1397f1569b6b65e371324b3fa65
|
[
"BSD-3-Clause"
] |
permissive
|
nwjs/chromium.src
|
7736ce86a9a0b810449a3b80a4af15de9ef9115d
|
454f26d09b2f6204c096b47f778705eab1e3ba46
|
refs/heads/nw75
| 2023-08-31T08:01:39.796085
| 2023-04-19T17:25:53
| 2023-04-19T17:25:53
| 50,512,158
| 161
| 201
|
BSD-3-Clause
| 2023-05-08T03:19:09
| 2016-01-27T14:17:03
| null |
UTF-8
|
Python
| false
| false
| 1,591
|
py
|
# Copyright 2021 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Fuchsia.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into depot_tools.
"""
USE_PYTHON3 = True
import os
def CommonChecks(input_api, output_api):
build_fuchsia_dir = input_api.PresubmitLocalPath()
def J(*dirs):
"""Returns a path relative to presubmit directory."""
return input_api.os_path.join(build_fuchsia_dir, *dirs)
tests = []
unit_tests = [
J('binary_sizes_test.py'),
J('binary_size_differ_test.py'),
J('device_target_test.py'),
J('gcs_download_test.py'),
J('update_images_test.py'),
J('update_product_bundles_test.py'),
J('update_sdk_test.py'),
]
# TODO(1309977): enable on Windows when fixed.
if os.name != 'nt':
unit_tests.extend([J('fvdl_target_test.py')])
tests.extend(
input_api.canned_checks.GetUnitTests(input_api,
output_api,
unit_tests=unit_tests,
run_on_python2=False,
run_on_python3=True,
skip_shebang_check=True))
return input_api.RunTests(tests)
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
|
[
"roger@nwjs.io"
] |
roger@nwjs.io
|
635734801373905925dc4741bdc7e1cb867d19fa
|
85790a5af1f58b81e6b94b3a2b53035a17b0edf3
|
/10-day/2-列表初识.py
|
ad20ef41614f731643bae4d8605902c0325f6005
|
[] |
no_license
|
liuruitao/Python-liuruitao
|
681734577484f803e2be14260ae4d0fc1505a762
|
1c028c4bfeb4e89cb5120af1eadfa3f7ad34b569
|
refs/heads/master
| 2021-04-18T22:34:26.479834
| 2018-04-18T06:45:35
| 2018-04-18T06:45:35
| 126,920,688
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
aa='人生苦短,我用python,life is short'
print(aa.count('p'))
print(aa.count('l'))
print(aa.rfind('s'))
print(aa.upper())
|
[
"2590011467@qq.com"
] |
2590011467@qq.com
|
1f718453299767f575260737c9ff1399680d531c
|
835a3928689f605493697605de4a6b16b06b54a0
|
/venv/Scripts/pip-script.py
|
094899f834171a2f0e3a873bf17b886ee975cb27
|
[] |
no_license
|
MockingLee/WebService-no-db
|
b21926b594d02ba667bb4e9ae055c7a463450c57
|
7734d8e3adeb4c5387b69515f15943484ad5af14
|
refs/heads/master
| 2020-04-10T22:28:04.101235
| 2018-12-11T11:43:05
| 2018-12-11T11:43:05
| 161,325,450
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 407
|
py
|
#!"D:\py workspace\WebService no db\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
|
[
"38075220+MockingLee@users.noreply.github.com"
] |
38075220+MockingLee@users.noreply.github.com
|
32ccc78930a5568bdf8f8aaf64c455ba4e3ae2b3
|
91a49713075a745824e18217d2dd99f2b9e90906
|
/nucleo/models/usuario_abstract.py
|
c59e11abaf7a69f4954837469fab9a2156f038f9
|
[] |
no_license
|
JMVasquezR/AdminECP
|
23c1426b4af07c223b81fd7d035a71ddafe38316
|
29f909c56d3c34724e10b9b121727fd2ee2d57db
|
refs/heads/master
| 2020-03-29T15:07:52.365804
| 2018-11-09T03:47:28
| 2018-11-09T03:47:28
| 150,045,785
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,721
|
py
|
from abc import abstractmethod
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.utils.translation import gettext_lazy as _
from nucleo.models.tipo_documento import TipoDocumento
from nucleo.utilitarios.genericos import CleanCharField
def solo_texto(value):
if not value.isspace():
raise ValidationError(
_('%(value)s debe contener solo letras'),
params={'value': value},
)
GENERO = (
('m', 'Masculino'),
('f', 'Femenino'),
)
class UsuarioAbstract(models.Model):
class Meta:
abstract = True
cuenta_de_usuario = models.ForeignKey(get_user_model(), blank=True, on_delete=models.CASCADE)
nombre = CleanCharField(blank=False, null=False, max_length=200)
apellido_paterno = CleanCharField(blank=False, null=False, max_length=100)
apellido_materno = CleanCharField(blank=True, null=False, max_length=100)
fecha_de_nacimiento = models.DateField(blank=False, null=True)
tipo_documento = models.ForeignKey(TipoDocumento, null=False, blank=False)
numero_de_documento = CleanCharField(max_length=25, unique=True)
genero = CleanCharField(blank=False, null=False, choices=GENERO, max_length=1)
telefono_o_celular = CleanCharField(max_length=15)
correo = models.EmailField(blank=False, null=False, unique=True)
def _str_(self):
return '%s, %s' % (self.nombre, self.apellido_paterno)
@transaction.atomic()
def save(self, **kwargs):
if self.pk:
self.update(**kwargs)
else:
self.create(**kwargs)
@transaction.atomic()
def create(self, **kwargs):
extra_data_from_here = {
'first_name': self.nombre,
'last_name': '%s %s' % (self.apellido_paterno, self.apellido_materno),
}
extra_data_from_son = self.get_extradata()
usuario = get_user_model().objects.create_user(
self.correo,
password=None,
**({**extra_data_from_here, **extra_data_from_son})
)
self.cuenta_de_usuario = usuario
super(UsuarioAbstract, self).save(**kwargs)
self.after_user_create()
@transaction.atomic()
def update(self, **kwargs):
self.cuenta_de_usuario.first_name = self.nombre
self.cuenta_de_usuario.last_name = '%s %s' % (self.apellido_paterno, self.apellido_materno)
self.cuenta_de_usuario.save()
super(UsuarioAbstract, self).save(**kwargs)
@abstractmethod
def get_extradata(self):
'''
:return:(dict)
'''
pass
@abstractmethod
def after_user_create(self):
pass
|
[
"josemartivr@gmail.com"
] |
josemartivr@gmail.com
|
5209cd9bbdb2b2a405122ea906618c4445928d7d
|
53ef0a2f7a3cebcac87f00e5e9edcebe69351978
|
/playlist_get.py
|
ff1bafb90626a28c91c7a7aa888a5577fa31b6ae
|
[] |
no_license
|
Cha-Euy-Sung/FLASKAPP
|
f68cde42ab9bca31f5af1deb7032b11520298884
|
51e92ea7f7100a646a936d644a07f97caa8dbfd5
|
refs/heads/master
| 2022-12-26T15:09:06.677697
| 2020-09-23T02:21:22
| 2020-09-23T02:21:22
| 297,815,455
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,178
|
py
|
def playlist_get(tag):
playlist_dic = {
0: ['bhDOqCR61jU', 'leIeBhYYJP0', 'CIvrCQ6alyM', 'lDp-XgVTuqA', 'K-jaMXMYTkk', '-6RqR2PCHQs', 'cLpO0U3p97c', 'tKdJHWo-kLI', 'ecKEnrWIh7Q', 'UAHP1VgljnE', '5f2AFS6PgEs', 'rhrFqwPYwig', 'scXuReCe2Co', '_ThBN0zMM9g', 'B48V42s3dnc'],
1: ['GRIMTo9Kyrk', 'dOQhvvNHAFk', 'hm33x3jVO70'],
2: ['6UxLDsT2VTk', 'L3Zyj2HU9tI', 'kmLmsXqL6BI', '8L-MINLlMIc', '39NWW8Q3mIk', 'CTYd-2pWXpM'],
3: ['F_wfPMaLeXs', 'RaNiHfXCp8A', '36S6frKMXX8', 'vhvU0-TU6Zc', 'eqfFj7OJ1u4'],
4: ['vQHqAfc6Mjk', '75tay629QyQ', '0189i1ripvE'],
5: ['SJAjezgTBZw'],
6: ['53Icr-FluPs', 'BlZ2kw1_xfw', 'v-TBS3qrQhI', '_kcw0ITkOKA', 'xMgicLWUebg', '0N2qS-O-uxo', 'EvW2Rjjyxn0', 'JX_oH8amzOI', 'b7fkx4VZuXg', 'l8nngQsCOc0', 'BHb-B0XEF14', 'hYuDceLh69g', '-d5xM1C1tlo', 'KnsIQFrPWHw', 'LNhB9uZnIBw', 'J2fclJEcGXQ', 'C_q2duwuPFw', '-Rc-6VMKydA', '5i19X5bPHNI', 'jyzo8G6yMAc', 'gHcCZzk4pxk', 'uR2h1hCJ-zg', 'hWCQcQqTLhg', 'Smzcq1E6bDU'],
7: ['gRAjH-FSofA', 'K4tgU1JN6P8', 'fhcY68lCQNU', '9zX6fljTfEw'],
8: ['YD5QQLhhCyQ', '8iaJA_Fs_w8', 'cFc99sHqBTU', 'jF7tuqxE0Gs', 'dthmyYUi7Es', 'z32h2JUiGzA', 'dVUJN3n4ePk', 'v5Xff5p4VcM', 'yMvy2JGGvxw', 'ed0CcFcBBMI', '70HO9Ga6IAY', '75tay629QyQ', 'hAopzWKVdX4', 'EYQJ1jnP-I8', 'css1gXopHfg', 'xkytDbjvi0c', '7Vwc2o6is6o', 'W50T9G-O7gU', 'JRgUXhQqO3w', 'nj9xmK11vPs', '8M80_logcOQ', 'RYK2Rrysqeg'],
9: ['Le85YH7K_mE', '81kzxCNBKM4', '_EfRa_ywkEw', 'TFRPcnw_oqY', '6FyN34oVD-w', '2zHjtH-6Oho', 'KIVP4USaVJk', 'jXaHDtZSR70', 'ZSat_xY9pcU', 'zV8HByBA3_8', '0mCQxNXT8A4', 'krIHTd-7PGY', 'ZvtzcQJaYJQ', '5u84jGWJxj0', 'VxwprP-O0So', '3OJicL9tdOU', 'huWdaVJS8yY', 'pnlZjGlCchA', 'nvhC-sfNqL4', 'ldY9mGCcql4', 'mC57r3dronE', 'VHDvG3bvkW4', 'wcO3ZXm3oSc', 'dOQhvvNHAFk', 'BMlmUnXSDy8', 'MFj42lBdJXU', 'PZynBaYrdpU', 'Gg3EHytbROU', 'JbmxSxxRl9Y', 'abmuyFkCka0', 'y0vcpL6GnyE'],
10: ['IDD5_z3kKCU'],
11: ['8ysfAJZlqWE', 'jukpx3vpRG4', '-K8sOQMbt2Y', 'WOGAwenS9oQ', 'fi7sBXorcC4', 'FMO377gw8F8', 'Ap7IAeNC4Ws', 'RlcShLzSLxc', 'MaQiJRMwJEw', 'm0HS45AygzM', 'Q1hOBbSdIYo', 'ATdl1_lyAbI', 'SR-12BXlt88', 'qVy8nB_hT18', 'Ge2oip3tmQA', 'g4YccawSQ1g', 'EXyeamh3Mbw', 'g_8062N5zQs', 'u4wOdsbgbMs', '91pQYxPqK2Q', 'XHHylBt_J74', '4jVJxPLo4Jg', 'LSpINqHE66g', 'oLR7OAaxKaY', 'QKm99bWL3TQ', 'L4TtasY-QTo', 'l6NxV5fcdkg', 'SmTRaSg2fTQ', 'mazPt-kEoUE', 'V1AbF_fnTq0', 'tr-GNtoT810', 'prflhTQah9w', 'qMQMqDp0WRM', 'bIoZgT3z1d0', '_Xfw7J9Hv7E', 'n9svf5QwfE0', 'sLl2Bx-aerA'],
12: ['aFLVzk-12Zk', '5yIFrqY8JHk', 'GjgZkEBZ-LE', 'OaOeMJnFTPU', '2n38FXtnv7Q', '3L23oeGRWgQ', 'CDgxtFjjI2w', '0KvNmjYU7fY', 'UOHy5Sc0gTs', 'OzSs_Zj3VrM', 'Wtuxke576mo', '8hGp4HRS-i8', 'djQRh-RjILo', 'bhT59fGmjms', 'K8DFpdCkdns', 'VMJS2aIpFtQ', '6gb9aV7nG_g', 'vrybNsmdg2Q', 'jM6WLmFNlYg', 'h1tTGsPp7kY', '3UlDGZAHU4Y', 'QkAyVvtApKs'],
13: ['ta_4hBWwlXU', 'teXv84f56TI', 'XJF5GaZ_GBU', '7_lopC6G_yk', '3EFUbX0RKY4', 'j5ZUbKz7cPQ'],
14: ['gR37CrZT-kY', 'JkXu2yE199g', '5U3gX9Qk4yk', '1pi92Bbi4DQ', 'nddpbzfX8CQ', 'w5cbKQdFY44', 'SiD0WinCeIo', '8AGToyVpTXA'],
15: ['jbK68mHsLfg', '98jt_U0jJ7E', 'g5tA599ejgI', 'vdZ6SW6CXCQ', 'zZN7Q9wg7-E', '92f_BNFNHNw', '3f5NfQV8s7o', 'USdGpAfazcg', 'p735iJXDVTI', '40C_kbazS4k', '0mCQxNXT8A4', 'hqvE4Xwc3ts', 'GqNmImL-dgw', '2J8IUUtmLE0', 'E2k-at-JWHw', 'OJFu4STrVH0', 'jQW3StI6fmQ', 'zPVA2J_8Rp0', 'rdLk2QBzvtc', '4UHcxwgifkA', 'DKNXjcJ9k6A', 'eAngyUowXvg'],
16: ['FhbCZ51HM8E', 'NAKsWDdxo5k', '1gABYdLOQpY', 'kFkp8JrkPsQ', 'nAtTxQTMHAo', 'A3ETK4WJvVo', 'rnidk5D8zXg', 'nt8kA3_5Igc', '32PKh_5GPnI', 'W5dzCop4tzU'],
17: ['hEWfJLMvx1A'],
18: ['TT9JkgZoiuY', 'nx59bl-BB1o', 'ADi66Vt5jK8', 'NEpZoGoQEUk', 'xxFK3evO7p8', 'l2bLKltY8Lo', '4uq8r6Czxl0', 'cj4AX1KLHVg', 'oCqpdihI0_4', 'kp573-v625Y', '1tJhYWaHyhE', 'lVdi1jaTxlc', 'H52OZEXtEnQ', 'pt_rU-ZC_7o', 'ecgp5SZgKHU', 'm32duJnYsvI', 'u-CCHDQVzr4', '34FubFtNC5g', 'tXWVFXkMhcE', 'n8n1YUVF7IA', 'vrybNsmdg2Q', 'mdskEOdONAg', 'LZ_WlubEQc0', 'zPVA2J_8Rp0', 'ZuGf78HgZRE', 'Hc4utd--utk'],
19: ['2Td_w5WXYnI', 'ZRUiNvCMgc4', 'yFLA3AotTHk', 'GptkdoVohr4', 'lcFQqLrgPpA', 'UO3RLLoWGLA'],
20: ['4a5yL9Bc4_o', '26V04Q5_jJQ', 'A76AhZxOV_c', 'PW6Gps7J-DM', 'xQSTdR3Po28', '9ygI-ARlYVI', '79y4Ld2_yt8', 'DZ6A7YAzAZA', '1W0OA_l9LiQ', 'HAaTY6BrIGg', 'A0yntW5zRg8', 'dGbV6VUvsFU'],
21: ['uUwiFtXsh7Q', 'CIu2g17Bubk', 'H52OZEXtEnQ', '1Oi12PBSlgk', 'YlKY4EImAWM', 'wD3l5vONVR0', '3OWljFqxfgY', 'pt13BTQ4smE', '9s5s7QuAYsk', 'nDRz01TrjgY', 'TJ5x_tDgrZE', 'lWY-5oeLTL4', 'K5IdTqP70Us', '3auRYESQkGw'],
22: ['XI3O0qO_XyY', 'AAf8cME5STM', 'iITAUEE9Ja0', 'zRc5o3NXHE4', '9VyK8M5gNt0', 'aFLVzk-12Zk', 'EXRjvh3-NFs', 'izunivHWztc', '4L0lylBK8gE', 'fNrF4v9MfIg', '2n38FXtnv7Q', '0kgii7qQwWQ', 'QbjNWULcdwI', 'wlbDK3PHcLI', 'u4wOdsbgbMs', 'QK3Q6igbvhA', '3sqRS_wqwBQ', 'vYRCBrY5KNk', 'e9FDun2_j7Y', '0aPgAMC_xLw', '5H6woZUYeHQ', 'ZjqInVMdzQY', '9CZaYE0pBNs', 'duCMwFOKtiE', 'hl16iyDGa6E', 'iONBZvzaFso', 'TcUYr4iokTg', '4UHcxwgifkA', 'TfyUUvhb_vs', 'xz4eOlHJS60', 'UmXwB1jiWD8'],
23: ['Emgq96oOKPw']
}
for playlist in playlist_dic:
if playlist == tag:
playlist_get = playlist_dic[tag]
return playlist_get
|
[
"58425500+Cha-Euy-Sung@users.noreply.github.com"
] |
58425500+Cha-Euy-Sung@users.noreply.github.com
|
ff1f784de6f3ad6715c7e5acb5529db50cd855ba
|
1ada91412841739e2a2b9ac4f5e9954d6a787182
|
/src/spider_match_data/spider_by_pro_user.py
|
ed3698e5105a77a051e13aac70b2ea31e5fb0d51
|
[] |
no_license
|
tobycc1990/dota2_predictor
|
b043a44b0b77436e90196385f5edb0b1c4339329
|
2a55111acc5023bb5d5738e23077040a4eab5315
|
refs/heads/master
| 2021-05-12T17:53:43.911216
| 2018-03-26T12:12:03
| 2018-03-26T12:12:03
| 117,056,790
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 824
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
CONFIG_PATH = "./../../conf"
# spider result will save in DATA_PATH
DATA_PATH = "./../../data"
import sys
import os
import time
import datetime
sys.path.append(CONFIG_PATH)
import requests
import spider_config
if __name__ == "__main__":
pro_players = requests.get("https://api.opendota.com/api/proPlayers", verify = False).json()
print len(pro_players)
print pro_players[0]
for each_player in pro_players:
account_id = each_player["account_id"]
name = each_player["name"]
player_name = each_player["personaname"]
if name != None:
name = name.encode('utf8')
if player_name != None:
player_name = player_name.encode('utf8')
print "%s\t\t\t%s\t\t\t%s" % (account_id, name, player_name)
|
[
"tobycc1990@gmail.com"
] |
tobycc1990@gmail.com
|
0bccda679a470479ed2e699aaf932957507e734f
|
5c0c0176db0ccf2c24b6b5ed459a8dc144518b13
|
/nni/nas/benchmarks/nasbench101/graph_util.py
|
10805685fec3ff7359ec39dc0ae1c019e67950ae
|
[
"MIT"
] |
permissive
|
petuum/nni
|
ac4f4a1c4d6df71684eeffa127b7c4858fd29e97
|
8134be6269902939232482d63649c06f9864be6d
|
refs/heads/master
| 2023-02-18T11:21:41.078889
| 2021-01-20T03:21:50
| 2021-01-20T03:21:50
| 302,736,456
| 4
| 3
|
MIT
| 2020-11-20T20:21:15
| 2020-10-09T19:34:11
|
Python
|
UTF-8
|
Python
| false
| false
| 3,790
|
py
|
import hashlib
import numpy as np
from .constants import INPUT, LABEL2ID, OUTPUT
def _labeling_from_architecture(architecture, vertices):
return [INPUT] + [architecture['op{}'.format(i)] for i in range(1, vertices - 1)] + [OUTPUT]
def _adjancency_matrix_from_architecture(architecture, vertices):
matrix = np.zeros((vertices, vertices), dtype=np.bool)
for i in range(1, vertices):
for k in architecture['input{}'.format(i)]:
matrix[k, i] = 1
return matrix
def nasbench_format_to_architecture_repr(adjacency_matrix, labeling):
"""
Computes a graph-invariance MD5 hash of the matrix and label pair.
Imported from NAS-Bench-101 repo.
Parameters
----------
adjacency_matrix : np.ndarray
A 2D array of shape NxN, where N is the number of vertices.
``matrix[u][v]`` is 1 if there is a direct edge from `u` to `v`,
otherwise it will be 0.
labeling : list of str
A list of str that starts with input and ends with output. The intermediate
nodes are chosen from candidate operators.
Returns
-------
tuple and int and dict
Converted number of vertices and architecture.
"""
num_vertices = adjacency_matrix.shape[0]
assert len(labeling) == num_vertices
architecture = {}
for i in range(1, num_vertices - 1):
architecture['op{}'.format(i)] = labeling[i]
assert labeling[i] not in [INPUT, OUTPUT]
for i in range(1, num_vertices):
architecture['input{}'.format(i)] = [k for k in range(i) if adjacency_matrix[k, i]]
return num_vertices, architecture
def infer_num_vertices(architecture):
"""
Infer number of vertices from an architecture dict.
Parameters
----------
architecture : dict
Architecture in NNI format.
Returns
-------
int
Number of vertices.
"""
op_keys = set([k for k in architecture.keys() if k.startswith('op')])
intermediate_vertices = len(op_keys)
assert op_keys == {'op{}'.format(i) for i in range(1, intermediate_vertices + 1)}
return intermediate_vertices + 2
def hash_module(architecture, vertices):
"""
Computes a graph-invariance MD5 hash of the matrix and label pair.
This snippet is modified from code in NAS-Bench-101 repo.
Parameters
----------
matrix : np.ndarray
Square upper-triangular adjacency matrix.
labeling : list of int
Labels of length equal to both dimensions of matrix.
Returns
-------
str
MD5 hash of the matrix and labeling.
"""
labeling = _labeling_from_architecture(architecture, vertices)
labeling = [LABEL2ID[t] for t in labeling]
matrix = _adjancency_matrix_from_architecture(architecture, vertices)
in_edges = np.sum(matrix, axis=0).tolist()
out_edges = np.sum(matrix, axis=1).tolist()
assert len(in_edges) == len(out_edges) == len(labeling)
hashes = list(zip(out_edges, in_edges, labeling))
hashes = [hashlib.md5(str(h).encode('utf-8')).hexdigest() for h in hashes]
# Computing this up to the diameter is probably sufficient but since the
# operation is fast, it is okay to repeat more times.
for _ in range(vertices):
new_hashes = []
for v in range(vertices):
in_neighbors = [hashes[w] for w in range(vertices) if matrix[w, v]]
out_neighbors = [hashes[w] for w in range(vertices) if matrix[v, w]]
new_hashes.append(hashlib.md5(
(''.join(sorted(in_neighbors)) + '|' +
''.join(sorted(out_neighbors)) + '|' +
hashes[v]).encode('utf-8')).hexdigest())
hashes = new_hashes
fingerprint = hashlib.md5(str(sorted(hashes)).encode('utf-8')).hexdigest()
return fingerprint
|
[
"noreply@github.com"
] |
petuum.noreply@github.com
|
50f55133949890fdd9c462e8134746052794d2b5
|
75fa4b5c30c0fa849be537b1bcae5933be02d581
|
/bookings/migrations/0005_auto_20201016_1847.py
|
f3769ff961358078366c2eac0b993af816008b39
|
[] |
no_license
|
bangina/yogayo-django-API
|
8da4375738239e77a67bdff554e3cc6a6e05f9ea
|
8074ae29e579cc2a21e96b6d2ff28caa4d85730b
|
refs/heads/master
| 2023-01-05T23:06:51.838909
| 2020-11-05T07:44:17
| 2020-11-05T07:44:17
| 297,549,879
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 370
|
py
|
# Generated by Django 3.1 on 2020-10-16 09:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bookings', '0004_auto_20201013_1544'),
]
operations = [
migrations.RenameField(
model_name='voucher',
old_name='duration',
new_name='voucherCode',
),
]
|
[
"enfzls1212@gmail.com"
] |
enfzls1212@gmail.com
|
fc04cd7ae737948a601e46cb7bd78245f884c77a
|
43aff5263a9f665d45660ea7659e62cd941f5b3a
|
/log_record_stream_handler.py
|
6c197174d0fc5b63ca83cc86598c86e058885abe
|
[] |
no_license
|
natgaertner/postmashworkers
|
1989ba5fbaf08ec83c01543470005b3eb59d390f
|
8f85ba508438e5ea8d33bdf2499b4f1560dd94d9
|
refs/heads/master
| 2021-01-19T18:52:06.544007
| 2014-03-22T11:14:23
| 2014-03-22T11:14:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,993
|
py
|
import pickle
import logging
import logging.handlers
import SocketServer
import struct
POSTMASH_LOG_NAME = 'file_postmash_work_log'
file_handler = logging.handlers.RotatingFileHandler('/var/log/postmash/postmashwork.log')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter(fmt='%(relativeCreated)5d %(name)-15s %(levelname)-8s %(message)s'))
logging.getLogger(POSTMASH_LOG_NAME).addHandler(file_handler)
class LogRecordStreamHandler(SocketServer.StreamRequestHandler):
"""Handler for a streaming logging request.
This basically logs the record using whatever logging policy is
configured locally.
"""
def handle(self):
"""
Handle multiple requests - each expected to be a 4-byte length,
followed by the LogRecord in pickle format. Logs the record
according to whatever policy is configured locally.
"""
while True:
chunk = self.connection.recv(4)
if len(chunk) < 4:
break
slen = struct.unpack('>L', chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + self.connection.recv(slen - len(chunk))
obj = self.unPickle(chunk)
record = logging.makeLogRecord(obj)
self.handleLogRecord(record)
def unPickle(self, data):
return pickle.loads(data)
def handleLogRecord(self, record):
# if a name is specified, we use the named logger rather than the one
# implied by the record.
if self.server.logname is not None:
name = self.server.logname
else:
name = record.name
logger = logging.getLogger(name)
# N.B. EVERY record gets logged. This is because Logger.handle
# is normally called AFTER logger-level filtering. If you want
# to do filtering, do it at the client end to save wasting
# cycles and network bandwidth!
logger.handle(record)
class LogRecordSocketReceiver(SocketServer.ThreadingTCPServer):
"""
Simple TCP socket-based logging receiver suitable for testing.
"""
allow_reuse_address = 1
def __init__(self, host='localhost',
port=logging.handlers.DEFAULT_TCP_LOGGING_PORT,
handler=LogRecordStreamHandler):
SocketServer.ThreadingTCPServer.__init__(self, (host, port), handler)
self.abort = 0
self.timeout = 1
self.logname = POSTMASH_LOG_NAME
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
abort = self.abort
def main():
tcpserver = LogRecordSocketReceiver()
tcpserver.serve_until_stopped()
if __name__ == '__main__':
main()
|
[
"natgaertner@gmail.com"
] |
natgaertner@gmail.com
|
c534d7c22c8c9cfe1da125036b9b6e7f079298dc
|
6cfa6d84722cf560b9dc144ba826d857e884d8fb
|
/redis/ticker/config/role.sample.py
|
793f9031371e1b17ce60702bbe524190ca842034
|
[] |
no_license
|
chaeplin/dash-ticker
|
b5e3702c87bc351ae40863de8cd8a55dddc74330
|
99e1fdc4e105601bdcfa55e80c524ca48294bee8
|
refs/heads/master
| 2021-01-18T23:07:24.246729
| 2017-11-23T09:39:40
| 2017-11-23T09:39:40
| 72,606,879
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 223
|
py
|
HOST_ROLE = 'MASTER'
#HOST_ROLE = 'SLAVE'
# SENTINEL CHECK
# MASTER
MASTER_SETINEL_HOST = '192.168.10.3'
MASTER_REDIS_MASTER = '192.168.10.2'
#SLAVE
SLAVE_SETINEL_HOST = '192.168.10.4'
SLAVE_REDIS_MASTER = '192.168.10.1'
|
[
"chaeplin@gmail.com"
] |
chaeplin@gmail.com
|
9115b0b53bd43147a63189d7de98f1bddd67d459
|
83da6ba69113b46fd5c00c37b1b61ac82e7559ad
|
/python/2η Εργασία/2a. Sympy_VAS.py
|
3f2942973527fe050f652b53f5babf3e941055b4
|
[] |
no_license
|
gapapado/Garyfallia-Papadopoulou
|
7df5682484876bcbaa721151911f0c2bceefe020
|
fdc46414d653bde8a57d2c80edaf87f08fea1af4
|
refs/heads/master
| 2021-07-19T01:33:07.302225
| 2020-09-23T09:07:59
| 2020-09-23T09:07:59
| 215,116,549
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,309
|
py
|
from sympy import var, ZZ
from sympy import intervals, refine_root, N
from sympy import real_roots, CRootOf, Rational, S
from sympy.polys.rootisolation import *
from sympy.polys.subresultants_qq_zz import *
x = var('x')
f = x**3 - 7*x + 7
print('f as an arithmetic expression:', f, '\n')
print('using the private function dup_... = ',
dup_isolate_real_roots( f.as_poly().all_coeffs(), ZZ), '\n')
intrvs = intervals(f)
print('using the @public function intervals = ', intrvs, '\n')
print('last root refined with Rational = ',
refine_root(x**3-7*x+7,Rational(2,1),Rational(3,2), eps=1e-16), '\n')
print('last root refined with S = ',
refine_root(x**3-7*x+7,S(3)/2,S(2)/1, eps=1e-16), '\n')
roots = []
for i in range(len(intrvs)):
a = intrvs[i][0]
b = a[0]
c = a[1]
d = refine_root(f, b, c, eps=1e-25)
roots.append(N(d[0],25))
print('ALL roots refined = ', roots, '\n')
print('ANOTHER way to handle roots with the public function real_roots() :',
real_roots(f), '\n')
print('Evaluate them WHEN you need them (lazy evaluation) :',
N( CRootOf(x**3 - 7*x + 7, 1), 50), '\n')
print('f as a Polynomial ... : ', f.as_poly(), '\n')
print('... has the attribute real_roots(); i.e. f.as_poly().real_roots() : ',
f.as_poly().real_roots(), '\n')
|
[
"noreply@github.com"
] |
gapapado.noreply@github.com
|
0e7db03285c31b8c54d7006c5447105a87ba5eee
|
381dca75c45449fb0d2252fdb0c5b40618a2dcfc
|
/src/aprl/agents/monte_carlo.py
|
b7c48c20b5564ea88da4cc9c1a0d32dd26768d87
|
[
"MIT"
] |
permissive
|
Nayan96/adversarial-policies
|
6b3daee49a954602778b76e5f0f3573718e20240
|
799f9e17db0d69d605c4055164e458705a0b64d8
|
refs/heads/master
| 2020-08-09T03:40:41.458682
| 2019-10-09T15:22:30
| 2019-10-09T15:22:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,490
|
py
|
"""Monte Carlo receding horizon control."""
from abc import ABC, abstractmethod
from multiprocessing import Pipe, Process
import gym
from stable_baselines.common.vec_env import CloudpickleWrapper
from aprl.common.mujoco import MujocoState, ResettableEnv
class MujocoResettableWrapper(ResettableEnv, gym.Wrapper):
"""Converts a MujocoEnv into a ResettableEnv.
Note all MuJoCo environments are resettable."""
def __init__(self, env):
"""Wraps a MujocoEnv, adding get_state and set_state methods.
:param env: a MujocoEnv. NOTE: it must not be wrapped in a TimeLimit."""
if hasattr(env, '_max_episode_steps'):
raise TypeError('Environment must not have a time limit '
'(try passing in env.unwrapped instead).')
gym.Wrapper.__init__(self, env)
self.sim = env.unwrapped.sim
def get_state(self):
"""Serializes the qpos and qvel state of the MuJoCo emulator."""
return MujocoState.from_mjdata(self.sim.data).flatten()
def set_state(self, x):
"""Restores qpos and qvel, calling forward() to derive other values."""
state = MujocoState.from_flattened(x, self.sim)
state.set_mjdata(self.sim.data)
self.sim.forward() # put mjData in consistent state
def reset(self):
"""See base class."""
return self.env.reset()
def step(self, a):
"""See base class."""
return self.env.step(a)
class MonteCarlo(ABC):
"""Selects an action for a ResettableEnv by random search. Randomly samples
fixed-length sequences of actions. Evaluates each trajectory in the
environment, resetting the state to the original after each trajectory."""
@abstractmethod
def __init__(self, horizon, trajectories):
"""Constructs a MonteCarlo instance for env.
:param horizon: the length of the trajectories to search over.
:param trajectories: the number of trajectories to evaluate."""
self.horizon = horizon
self.trajectories = trajectories
@abstractmethod
def seed(self, seed):
"""Sets a seed for the PRNG for the action sequences.
:param seed (int): a seed."""
pass
@abstractmethod
def best_action(self, state):
"""Returns the best action out of a random search of action sequences.
Generates self.trajectories action sequences, each of length
self.horizon. The cumulative reward of each action sequence is computed,
starting from state. The function returns the first action and the
cumulative reward of the action sequences with the largest cumulative
reward.
:param state: a value returned by env.get_state().
:return (action, reward): the best action found and associated reward."""
pass
class MonteCarloSingle(MonteCarlo):
"""Selects an action for a ResettableEnv by random search.
See base class for details. This implementation is not parallelized."""
def __init__(self, env, horizon, trajectories):
"""See base class."""
super().__init__(horizon, trajectories)
self.env = env
def seed(self, seed):
"""Sets a seed for the PRNG for the action sequences.
:param seed (int): a seed."""
self.env.action_space.np_random.seed(seed)
def best_action(self, state):
"""Returns the best action out of a random search of action sequences.
See base class for details.
Search takes place in a single environment, which is reset to state
before evaluating each action sequence."""
res = []
for _ in range(self.trajectories):
self.env.set_state(state)
us = [self.env.action_space.sample() for _ in range(self.horizon)]
total_rew = 0
for u in us:
_ob, rew, done, _info = self.env.step(u)
total_rew += rew
if done:
break
res.append((us[0], total_rew))
self.env.set_state(state)
best = max(res, key=lambda x: x[1])
return best
def _worker(remote, parent_remote, dynamic_fn_wrapper, horizon, trajectories):
parent_remote.close()
dynamics = dynamic_fn_wrapper.var()
dynamics.reset()
mc = MonteCarloSingle(dynamics, horizon, trajectories)
try:
while True:
cmd, x = remote.recv()
if cmd == 'seed':
mc.seed(x)
elif cmd == 'search':
best_u, best_r = mc.best_action(x)
remote.send((best_u, best_r))
elif cmd == 'close':
remote.close()
break
else:
raise NotImplementedError
except KeyboardInterrupt:
print('MonteCarloParallel worker: got KeyboardInterrupt')
finally:
dynamics.close()
class MonteCarloParallel(MonteCarlo):
"""Like MonteCarlo, but performs the random search in parallel."""
# This implementation is inspired by Baselines SubprocVecEnv.
def __init__(self, env_fns, horizon, trajectories, seed=0):
"""Launch subprocess workers and store configuration parameters.
:param env_fns (list<()->ResettableEnv>): list of thunks.
:param horizon (int): length of trajectories to search over.
:param trajectories (int): minimum number of trajectories to evaluate.
It will be rounded up to the nearest multiple of len(make_env)."""
super().__init__(horizon, trajectories)
nremotes = len(env_fns)
# Integer ceiling of self.trajectories / nworkers
traj_per_worker = (self.trajectories - 1) // nremotes + 1
pipes = [Pipe() for _ in range(nremotes)]
self.remotes, self.work_remotes = zip(*pipes)
worker_cfgs = zip(self.work_remotes, self.remotes, env_fns)
self.ps = []
for i, (work_remote, remote, dynamic_fn) in enumerate(worker_cfgs):
args = (work_remote, remote, CloudpickleWrapper(dynamic_fn),
horizon, traj_per_worker)
process = Process(target=_worker, args=args)
process.daemon = True
# If the main process crashes, we should not cause things to hang
process.start()
self.ps.append(process)
for remote in self.work_remotes:
remote.close()
def seed(self, seed):
"""See base class."""
for i, remote in enumerate(self.remotes):
remote.send(('seed', seed + i))
def best_action(self, state):
"""Returns the best action out of a random search of action sequences."""
for remote in self.remotes:
remote.send(('search', state))
results = [remote.recv() for remote in self.remotes]
best = max(results, key=lambda x: x[1])
return best
def close(self):
"""Shuts down parallel workers."""
for remote in self.remotes:
remote.send(('close', None))
for p in self.ps:
p.join()
def receding_horizon(monte_carlo, env):
"""Receding horizon control
:param monte_carlo(MonteCarlo): a Monte Carlo controller for env or a clone of env.
:param env(ResettableEnv): a resettable environment."""
while True:
state = env.get_state()
a, _seq_rew = monte_carlo.best_action(state)
ob, rew, done, info = env.step(a)
yield a, ob, rew, done, info
if done:
break
|
[
"adam@gleave.me"
] |
adam@gleave.me
|
7f968e8cacfb323f8bc7d760137b3a93d8bdde22
|
2e5d77779a0292ce8ef5bc0bf328515fed174900
|
/collections/where/processing/__init__.py
|
f9a660d0bb5ca0adf53e15a47f7ea2b68cccc953
|
[] |
no_license
|
craig-mndnr-perreault/qgis_landview
|
d0157f9d756c7adf9084ccf5e132780ae8ddeff3
|
674740411acd8910b35d0375bcc637b8777b6349
|
refs/heads/master
| 2021-05-02T10:26:08.227524
| 2018-02-21T18:31:54
| 2018-02-21T18:31:54
| 120,796,081
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,476
|
py
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
CraigTest
A QGIS plugin
Click on the map
-------------------
begin : 2017-12-08
copyright : (C) 2017 by Craig
email : craig.perreault@state.mn.us
git sha : $Format:%H$
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
This script initializes the plugin, making it known to QGIS.
"""
# noinspection PyPep8Naming
def classFactory(iface): # pylint: disable=invalid-name
"""Load CraigTest class from file CraigTest.
:param iface: A QGIS interface instance.
:type iface: QgsInterface
"""
#
from .craigtest import CraigTest
return CraigTest(iface)
|
[
"noreply@github.com"
] |
craig-mndnr-perreault.noreply@github.com
|
26fbfefad92c3162a6e54c112bcdb54f23ca1301
|
ea0df539c3b0cd14faf7d4f2bc3b955235fbf611
|
/motion_detection_tracking.py
|
e96b11371e5f409dca815df6284bc2cdbb28317c
|
[] |
no_license
|
aksharjo/opencv-python-examples
|
408a3d8e6e79d7e3eb91f546786f3bbc9a9da09c
|
372b8b44eeed5e198365438693b1265cd28c44bd
|
refs/heads/master
| 2020-06-29T11:03:04.612845
| 2019-08-04T16:54:33
| 2019-08-04T16:54:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,078
|
py
|
""" detects the motion of object and draw a contour around the moving object and status is printed """
import cv2
cap = cv2.VideoCapture(0)
ret, frame1= cap.read()
ret, frame2= cap.read()
while cap.isOpened():
diff = cv2.absdiff(frame1, frame2)
gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (5, 5), 0)
_, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
dilated = cv2.dilate(thresh, None, iterations=3)
contours, _ = cv2.findContours(dilated, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for contour in contours:
(x, y,w,h) = cv2.boundingRect(contour)
if cv2.contourArea(contour) <700:
continue
cv2.rectangle(frame1, (x,y), (x+w , y+h), (0, 255, 0), 2)
cv2.putText(frame1, "Status: {}".format('movement'),(10, 20), cv2.FONT_HERSHEY_COMPLEX,
1, (0, 0, 255), 3)
#cv2.drawContour( frame1, contours, -1, (0, 255, 0), 2)
if cv2.waitKey(40)== 27:
break
cv2.destroyAllWindows()
cap.release()
|
[
"noreply@github.com"
] |
aksharjo.noreply@github.com
|
2a4dd73d8fa0a792338f16d88cc04cd06a4656f6
|
b88a1deff2507cb003bbc7795138db6215f2e519
|
/py_project/signin.py
|
82905801a6572427eff874087d8541f6387388f6
|
[] |
no_license
|
jonisiaco/python-flask-crud
|
d984d1f5a703224844048719bd7f24f7423c37f3
|
c396376d3b7bcdf6b0d4f612c3773ee98e59ff4a
|
refs/heads/master
| 2020-08-03T21:59:09.631041
| 2019-09-30T21:45:58
| 2019-09-30T21:45:58
| 211,897,813
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,000
|
py
|
from py_project import app, db
from forms import LoginForm
from flask import render_template, redirect, url_for, flash, session
@app.route('/signin', methods=["GET", "POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
user_name = form.username.data
password = form.password.data
if authUser(user_name, password):
session['username'] = user_name
return redirect(url_for('news_index'))
#flash('custom message')
return render_template('signin.html', form=form)
@app.route('/logout')
def logout():
if 'username' in session:
session.pop('username',None)
return redirect(url_for('index'))
def authUser(user_name, password):
token = 'secret'
mysql = db._open(prepared=True, dictionary=False)
q1 = """SELECT id, name, AES_DECRYPT(pass, %s) pass FROM user WHERE name = %s"""
mysql.execute(q1,(token, user_name))
data = mysql.fetchone()
db._close()
if data == None:
return False
if str(data[2]) != str(password):
return False
return data
|
[
"jonisiaco@gmail.com"
] |
jonisiaco@gmail.com
|
a7bbc60feece73e88f0a57f6209db2d14d87241c
|
bc441bb06b8948288f110af63feda4e798f30225
|
/user_service_sdk/model/cmdb_extend/subsystem_dependency_pb2.pyi
|
56c626d516302fae9e256521a00d0df10a2ecd97
|
[
"Apache-2.0"
] |
permissive
|
easyopsapis/easyops-api-python
|
23204f8846a332c30f5f3ff627bf220940137b6b
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
refs/heads/master
| 2020-06-26T23:38:27.308803
| 2020-06-16T07:25:41
| 2020-06-16T07:25:41
| 199,773,131
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,344
|
pyi
|
# @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
from user_service_sdk.model.cmdb_extend.app_dependency_pb2 import (
AppDependency as user_service_sdk___model___cmdb_extend___app_dependency_pb2___AppDependency,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class SubsystemDependency(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class ConnectSubsystems(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
abbreviation = ... # type: typing___Text
object_id = ... # type: typing___Text
instance_id = ... # type: typing___Text
name = ... # type: typing___Text
def __init__(self,
*,
abbreviation : typing___Optional[typing___Text] = None,
object_id : typing___Optional[typing___Text] = None,
instance_id : typing___Optional[typing___Text] = None,
name : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> SubsystemDependency.ConnectSubsystems: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> SubsystemDependency.ConnectSubsystems: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"abbreviation",b"abbreviation",u"instance_id",b"instance_id",u"name",b"name",u"object_id",b"object_id"]) -> None: ...
abbreviation = ... # type: typing___Text
name = ... # type: typing___Text
object_id = ... # type: typing___Text
instance_id = ... # type: typing___Text
@property
def components(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[user_service_sdk___model___cmdb_extend___app_dependency_pb2___AppDependency]: ...
@property
def connect_subsystems(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[SubsystemDependency.ConnectSubsystems]: ...
def __init__(self,
*,
abbreviation : typing___Optional[typing___Text] = None,
name : typing___Optional[typing___Text] = None,
object_id : typing___Optional[typing___Text] = None,
instance_id : typing___Optional[typing___Text] = None,
components : typing___Optional[typing___Iterable[user_service_sdk___model___cmdb_extend___app_dependency_pb2___AppDependency]] = None,
connect_subsystems : typing___Optional[typing___Iterable[SubsystemDependency.ConnectSubsystems]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> SubsystemDependency: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> SubsystemDependency: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"abbreviation",b"abbreviation",u"components",b"components",u"connect_subsystems",b"connect_subsystems",u"instance_id",b"instance_id",u"name",b"name",u"object_id",b"object_id"]) -> None: ...
|
[
"service@easyops.cn"
] |
service@easyops.cn
|
fccee85fefc74d76c93148ccbcea8f7d6cde1535
|
4a3f0fac69ebfd7b6743a6abb6bc14e109f9f726
|
/searchableencryption/fhipe/ipe.py
|
aa439a5c6b226d2d64d61e8f4ed037c4492e072a
|
[
"ISC"
] |
permissive
|
duykienvp/searchableencryption
|
ff87f171fd8ac7269d463c123633c09452c746b6
|
7e6b4754bf3bd8efa1177c70286c6c77322af4fe
|
refs/heads/master
| 2020-04-10T03:43:52.989150
| 2018-12-07T07:21:05
| 2018-12-07T07:21:05
| 160,778,464
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,691
|
py
|
"""
Copyright (c) 2016, Kevin Lewi
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
"""
"""
Implementation of function-hiding inner product encryption (FHIPE).
"""
import os # noqa: E402
import math # noqa: E402
from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, pair # noqa: E402
from subprocess import Popen, PIPE # noqa: E402
def setup(n, group_name='MNT159', simulated=False):
"""
Performs the setup algorithm for IPE.
This function samples the generators from the group, specified optionally by
"group_name". This variable must be one of a few set of strings specified by
Charm.
Then, it invokes the C program ./gen_matrices, which samples random matrices
and outputs them back to this function. The dimension n is supplied, and the
prime is chosen as the order of the group. Additionally, /dev/urandom is
sampled for a random seed which is passed to ./gen_matrices.
Finally, the function constructs the matrices that form the secret key and
publishes the public parameters and secret key (pp, sk).
"""
group = PairingGroup(group_name)
g1 = group.random(G1)
g2 = group.random(G2)
assert g1.initPP(), "ERROR: Failed to init pre-computation table for g1."
assert g2.initPP(), "ERROR: Failed to init pre-computation table for g2."
proc = Popen(
[
os.path.dirname(os.path.realpath(__file__)) + '/gen_matrices',
str(n),
str(group.order()),
"1" if simulated else "0",
""
],
stdout=PIPE
)
detB_str = proc.stdout.readline().decode()
B_str = proc.stdout.readline().decode()
Bstar_str = proc.stdout.readline().decode()
detB = int(detB_str)
B = parse_matrix(B_str, group)
Bstar = parse_matrix(Bstar_str, group)
pp = ()
sk = (detB, B, Bstar, group, g1, g2)
return (pp, sk)
def keygen(sk, x):
"""
Performs the keygen algorithm for IPE.
"""
(detB, B, Bstar, group, g1, g2) = sk
n = len(x)
alpha = group.random(ZR)
k1 = (g1 ** alpha) ** detB
k2 = [0] * n
for j in range(n):
sum = 0
for i in range(n):
sum += x[i] * B[i][j]
k2[j] = alpha * sum
for i in range(n):
k2[i] = g1 ** k2[i]
return (k1, k2)
def encrypt(sk, x):
"""
Performs the encrypt algorithm for IPE.
"""
(detB, B, Bstar, group, g1, g2) = sk
n = len(x)
beta = group.random(ZR)
c1 = g2 ** beta
c2 = [0] * n
for j in range(n):
sum = 0
for i in range(n):
sum += x[i] * Bstar[i][j]
c2[j] = beta * sum
for i in range(n):
c2[i] = g2 ** c2[i]
return (c1, c2)
def decrypt(pp, skx, cty, max_innerprod=100):
"""
Performs the decrypt algorithm for IPE on a secret key skx and ciphertext cty.
The output is the inner product <x,y>, so long as it is in the range
[0,max_innerprod].
"""
(k1, k2) = skx
(c1, c2) = cty
d1 = pair(k1, c1)
d2 = innerprod_pair(k2, c2)
# check for unit element
# gt = group.random(GT)
# if(gt == gt * d2):
# print("Unit")
# return True
# return False
return solve_dlog_bsgs(d1, d2, max_innerprod + 1)
def parse_matrix(matrix_str, group):
"""
Parses the matrix as output from the call to ./gen_matrices.
The first number is the number of rows, and the second number is the number
of columns. Then, the entries of the matrix follow. These are stored and
returned as a matrix.
This function also needs the pairing group description to be passed in as a
parameter.
"""
L = matrix_str.split(" ")
rows, cols = int(L[0]), int(L[1])
A = [[0] * cols for _ in range(rows)]
L = L[3:]
assert rows == cols
assert len(L) == rows * cols
for i in range(len(L)):
A[int(i / rows)][i % rows] = group.init(ZR, int(L[i]))
return A
def innerprod_pair(x, y):
"""
Computes the inner product of two vectors x and y "in the exponent", using
pairings.
"""
assert len(x) == len(y)
L = map(lambda i: pair(x[i], y[i]), range(len(x)))
ret = 1
for i in L:
ret *= i
return ret
def solve_dlog_naive(g, h, dlog_max):
"""
Naively attempts to solve for the discrete log x, where g^x = h, via trial and
error. Assumes that x is at most dlog_max.
"""
for j in range(dlog_max):
if g ** j == h:
return j
return -1
def solve_dlog_bsgs(g, h, dlog_max):
"""
Attempts to solve for the discrete log x, where g^x = h, using the Baby-Step
Giant-Step algorithm. Assumes that x is at most dlog_max.
"""
alpha = int(math.ceil(math.sqrt(dlog_max))) + 1
g_inv = g ** -1
tb = {}
for i in range(alpha + 1):
tb[(g ** (i * alpha)).__str__()] = i
for j in range(alpha + 1):
s = (h * (g_inv ** j)).__str__()
if s in tb:
i = tb[s]
return i * alpha + j
return -1
|
[
"duykienvp@yahoo.com.vn"
] |
duykienvp@yahoo.com.vn
|
111903ce232a14277a6b8e90da627d4f7fbbecef
|
0fa6389eafa73ad01ae5f28fc0f807360c960b7c
|
/env/bin/easy_install
|
955d233430b719fe14f08855dd78fba7e65bfc6c
|
[] |
no_license
|
littleslayer/bots
|
46c5770e2800d4989547248bb98650b9922f826a
|
f230da15342b685524da54c88a69b98c6e90c0d2
|
refs/heads/master
| 2023-08-11T06:02:00.269799
| 2021-09-21T16:17:23
| 2021-09-21T16:17:23
| 408,888,045
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 251
|
#!/home/georgy/vsd/bots/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"giorgiy.baranov.2006@mail.ru"
] |
giorgiy.baranov.2006@mail.ru
|
|
2beba6ac551c8929f20422573779f239b13add27
|
519e0431a0d59db5cac646e5f21dd6c207e8c33c
|
/Lista-Simplesmente-Encadeada-Em-Python-master/Main.py
|
f014366c168affbcfe7519822bf926a3a3eb7241
|
[] |
no_license
|
vinny0965/phyton
|
1c79cc394b72202e53117502c3c58054a68541b5
|
5ba3fe7875ba30d052e357426c8869d3d9a6dbf5
|
refs/heads/master
| 2020-07-28T08:54:50.828084
| 2019-10-22T23:29:45
| 2019-10-22T23:29:45
| 209,371,958
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 844
|
py
|
from Lista import Lista
# Método para criar um menu no console
def menu():
print('''
[1] - Inserir Inicio
[2] - Inserir Fim
[3] - Remover Inicio
[4] - Remover Fim
[5] - Listar
[0] - Sair
''')
# Instancio a lista
lista = Lista()
dado = 0
opcao = -1
while opcao is not 0:
# Chamada no menu
menu()
dado = int(input('Digite uma opção: '))
# Verifica a opção desejada
if dado is 1:
dado = input('Digite um dado: ')
lista.inserir_Inicio(dado)
elif dado is 2:
dado = input('Digite um dado: ')
lista.inserir_Fim(dado)
elif dado is 3:
lista.remover_Primeiro()
elif dado is 4:
lista.remover_Ultimo()
elif dado is 5:
lista.listar()
elif dado is 0:
break
else:
print('Digite uma opção do menu!')
|
[
"carvalho.vinicius163@gmail.com"
] |
carvalho.vinicius163@gmail.com
|
496afa8406a6ad5f9584ceddba65ba6374ac3cfb
|
cc44edfa1edbedea3ad044805be7548e0ccba70d
|
/0x0C-python-almost_a_circle/models/square.py
|
dc5e2428176dcbb01162c0529f50870f361569e2
|
[] |
no_license
|
set808/holbertonschool-higher_level_programming
|
421f0da1f91cd56eb2daa4e07a51b4a505d53edc
|
eb276a4e68e5cc43498459eec78fc05f72e2cd48
|
refs/heads/master
| 2020-03-09T13:07:43.824914
| 2018-09-08T00:26:46
| 2018-09-08T00:26:46
| 128,802,718
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,833
|
py
|
#!/usr/bin/python3
'''
Defines the class Square
'''
from models.rectangle import Rectangle
class Square(Rectangle):
'''Square class that inherits from Rectangle
'''
def __init__(self, size, x=0, y=0, id=None):
'''Initializes the Square object
Args:
size (int): size of the square
x (int): position on the x axis
y (int): position on the y axis
id (int): the id of the object
'''
super().__init__(size, size, x, y, id)
def __str__(self):
'''Returns a string representation of a Square object
'''
return ('[Square] ({:d}) {:d}/{:d} - {:d}'.
format(self.id, self.x, self.y, self.width))
@property
def size(self):
'''Returns the size of the Square object
Return:
returns the size
'''
return self.width
@size.setter
def size(self, value):
'''Sets the size of the Square object
Args:
value (int): the new size value
'''
self.width = value
self.height = value
def update(self, *args, **kwargs):
'''Updates the Square instance
Args:
list of new values to update Square values
'''
if args:
keys = ['id', 'size', 'x', 'y']
for key, value in zip(keys, args):
setattr(self, key, value)
return
else:
for key, value in kwargs.items():
if key in kwargs.keys():
setattr(self, key, value)
def to_dictionary(self):
'''Returns a dictionary representation of a Square
Return:
returns the dictionary representation
'''
return {'id': self.id, 'size': self.size, 'x': self.x, 'y': self.y}
|
[
"spencertaylor808@gmail.com"
] |
spencertaylor808@gmail.com
|
a2aad80e1bd8dcac5b76b43c7c1b79f9d346ecb5
|
b501a5eae1018c1c26caa96793c6ee17865ebb2d
|
/data_persistence_and_exchange/sqlite3/sqlite3_iterdump.py
|
b3e3cb6c5fd4e677c2df637f142fcfd822cb06dd
|
[] |
no_license
|
jincurry/standard_Library_Learn
|
12b02f9e86d31ca574bb6863aefc95d63cc558fc
|
6c7197f12747456e0f1f3efd09667682a2d1a567
|
refs/heads/master
| 2022-10-26T07:28:36.545847
| 2018-05-04T12:54:50
| 2018-05-04T12:54:50
| 125,447,397
| 0
| 1
| null | 2022-10-02T17:21:50
| 2018-03-16T01:32:50
|
Python
|
UTF-8
|
Python
| false
| false
| 866
|
py
|
import sqlite3
schema_filename = 'todo_schema.sql'
with sqlite3.connect(':memory:') as conn:
conn.row_factory = sqlite3.Row
print('Creating schema')
with open(schema_filename, 'rt') as f:
schema = f.read()
conn.executescript(schema)
print('Inserting initial data')
conn.execute("""
insert INTO project (name, description, deadline)
VALUES ('pymotw', 'Python Module fo the Week', '2018-12-01')
""")
data = [
('write about select', 'done', '2010-10-03', 'pymotw'),
('write about random', 'waiting', '2010-11-10', 'pymotw'),
('write about sqlite3', 'active', '2010-10-17', 'pymotw'),
]
conn.executemany("""
insert INTO task (details, status, deadline, project)
VALUES (?, ?, ?, ?)
""", data)
print('Dumping:')
for text in conn.iterdump():
print(text)
|
[
"jintao422516@gmail.com"
] |
jintao422516@gmail.com
|
044aeb311eb74d6a14e920c6ab0dca891776c52f
|
9d07eb049761b7c4b20886eb508b374b4fc1d0ae
|
/app.py
|
2ce9a9127448d1ad1abbf77fd5f7b3e9d8585f4a
|
[] |
no_license
|
constantinadrian/flask-task-manager-project
|
3a38a95ec07b549f34bb92e91c7cd69f2c7e1707
|
16fba5d01848eca067a5591899f0c7b2037b7e03
|
refs/heads/master
| 2023-04-09T21:58:26.495807
| 2021-04-17T23:08:23
| 2021-04-17T23:08:23
| 358,011,741
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,620
|
py
|
import os
from flask import (
Flask, flash, render_template,
redirect, request, session, url_for)
from flask_pymongo import PyMongo
from bson.objectid import ObjectId
from werkzeug.security import generate_password_hash, check_password_hash
if os.path.exists("env.py"):
import env
app = Flask(__name__)
app.config["MONGO_DBNAME"] = os.environ.get("MONGO_DBNAME")
app.config["MONGO_URI"] = os.environ.get("MONGO_URI")
app.secret_key = os.environ.get("SECRET_KEY")
mongo = PyMongo(app)
@app.route("/")
@app.route("/get_tasks")
def get_tasks():
tasks = list(mongo.db.tasks.find())
return render_template("tasks.html", tasks=tasks)
@app.route("/search", methods=["GET", "POST"])
def search():
query = request.form.get("query")
tasks = list(mongo.db.tasks.find({"$text": {"$search": query}}))
return render_template("tasks.html", tasks=tasks)
@app.route("/register", methods=["GET", "POST"])
def register():
if request.method == "POST":
# check if username already exists
existing_user = mongo.db.users.find_one(
{"username": request.form.get("username").lower()})
if existing_user:
flash("Username already exists")
return redirect(url_for("register"))
register = {
"username": request.form.get("username").lower(),
"password": generate_password_hash(request.form.get("password")),
}
mongo.db.users.insert_one(register)
# put the new user into "sesion" cookie
session["user"] = request.form.get("username")
flash("Registration Successful!")
return redirect(url_for("profile", username=session["user"]))
return render_template("register.html")
@app.route("/login", methods=["GET", "POST"])
def login():
if request.method == "POST":
# check if username already exists
existing_user = mongo.db.users.find_one(
{"username": request.form.get("username").lower()})
if existing_user:
# ensure hashed password matches user input
if check_password_hash(
existing_user["password"], request.form.get("password")):
session["user"] = request.form.get("username").lower()
flash("Welcome, {}".format(request.form.get("username")))
return redirect(url_for(
"profile", username=session["user"]))
else:
flash("Incorrect Username and/or Password")
return redirect(url_for("login"))
else:
flash("Incorrect Username and/or Password")
return redirect(url_for("login"))
return render_template("login.html")
@app.route("/profile/<username>", methods=["GET", "POST"])
def profile(username):
# grab the session user's username from db
username = mongo.db.users.find_one(
{"username": session["user"]})["username"]
if session["user"]:
return render_template("profile.html", username=username)
return redirect(url_for("login"))
@app.route("/logout")
def logout():
# remove user from session cookies
flash("You have been logged out")
session.pop("user")
return redirect(url_for("login"))
@app.route("/add_task", methods=["GET", "POST"])
def add_task():
if request.method == "POST":
is_urgent = "on" if request.form.get("is_urgent") else "off"
task = {
"category_name": request.form.get("category_name"),
"task_name": request.form.get("task_name"),
"task_description": request.form.get("task_description"),
"due_date": request.form.get("due_date"),
"is_urgent": is_urgent,
"created_by": session["user"]
}
mongo.db.tasks.insert_one(task)
flash("Task Successfully Added")
return redirect(url_for("get_tasks"))
categories = mongo.db.categories.find().sort("category_name", 1)
return render_template("add_task.html", categories=categories)
@app.route("/edit_task/<task_id>", methods=["GET", "POST"])
def edit_task(task_id):
if request.method == "POST":
is_urgent = "on" if request.form.get("is_urgent") else "off"
update_task = {
"category_name": request.form.get("category_name"),
"task_name": request.form.get("task_name"),
"task_description": request.form.get("task_description"),
"due_date": request.form.get("due_date"),
"is_urgent": is_urgent,
"created_by": session["user"]
}
mongo.db.tasks.update({"_id": ObjectId(task_id)}, update_task)
flash("Task Successfully Updated")
task = mongo.db.tasks.find_one({"_id": ObjectId(task_id)})
categories = mongo.db.categories.find().sort("category_name", 1)
return render_template("edit_task.html", task=task, categories=categories)
@app.route("/delete_task/<task_id>")
def delete_task(task_id):
mongo.db.tasks.remove({"_id": ObjectId(task_id)})
flash("Task Successfully Deleted")
return redirect(url_for("get_tasks"))
@app.route("/get_categories")
def get_categories():
categories = mongo.db.categories.find().sort("category_name", 1)
return render_template("categories.html", categories=categories)
@app.route("/add_category", methods=["GET", "POST"])
def add_category():
if request.method == "POST":
category = {
"category_name": request.form.get("category_name")
}
mongo.db.categories.insert_one(category)
flash("New Category Added")
return redirect(url_for("get_categories"))
return render_template("add_category.html")
@app.route("/edit_category/<category_id>", methods=["GET", "POST"])
def edit_category(category_id):
if request.method == "POST":
update_category = {
"category_name": request.form.get("category_name")
}
mongo.db.categories.update({"_id": ObjectId(category_id)},
update_category)
flash("Category Successfully Updated")
return redirect(url_for("get_categories"))
category = mongo.db.categories.find_one({"_id": ObjectId(category_id)})
return render_template("edit_category.html", category=category)
@app.route("/delete_category/<category_id>")
def delete_category(category_id):
mongo.db.categories.remove({"_id": ObjectId(category_id)})
flash("Category Successfully Deleted")
return redirect(url_for("get_categories"))
if (__name__) == "__main__":
app.run(host=os.environ.get("IP"),
port=int(os.environ.get("PORT")),
debug=True)
|
[
"adrianc.chiriac@yahoo.com"
] |
adrianc.chiriac@yahoo.com
|
12897c4dac1c5c46fdeab4214fcf8afd00824753
|
8aa89201a90616ad85c81c5ddf58340c6e15eb11
|
/koalixcrm/crm/migrations/0065_productattributeassociation.py
|
a28e30086cdc49d60ff4fef8a8221ceca771970d
|
[
"BSD-3-Clause"
] |
permissive
|
bahron83/koalixcrm
|
98d2e26e004f1934a1e51ad102daa927696dedac
|
6c9240f954a25d9177e8a44b26e261aa7f838c71
|
refs/heads/master
| 2021-04-27T07:38:43.429100
| 2019-02-10T18:19:56
| 2019-02-10T18:19:56
| 122,636,738
| 0
| 0
| null | 2018-02-23T15:13:26
| 2018-02-23T15:13:26
| null |
UTF-8
|
Python
| false
| false
| 1,156
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-04-09 18:55
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('crm', '0064_auto_20180408_1929'),
]
operations = [
migrations.CreateModel(
name='ProductAttributeAssociation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.CharField(blank=True, max_length=255, null=True, verbose_name='Value')),
('attribute', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_attribute', to='crm.Attribute')),
('product', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_product', to='crm.Product')),
],
options={
'verbose_name': 'Specific field',
'verbose_name_plural': 'Specific fields',
},
),
]
|
[
"bahron83@hotmail.com"
] |
bahron83@hotmail.com
|
6dc28070a89def01f58833724ccccf168c6e0c38
|
6cb9fb197ad9f97939b126f5996c099e5be1731e
|
/django_pds/core/rest/views.py
|
00e8f2d8cf00b5e7a91429f75a410026cde90bfd
|
[
"MIT"
] |
permissive
|
knroy/django-pds
|
16c396f55177df6c727a4968af62908f9142fe91
|
75f192eb38acf1ae98cba7034e8768750848e639
|
refs/heads/master
| 2022-12-04T14:19:39.408053
| 2020-08-20T15:30:16
| 2020-08-20T15:30:16
| 259,966,535
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 217
|
py
|
from rest_framework.views import APIView
from .exceptions import method_not_allowed
class BaseAPIView(APIView):
def http_method_not_allowed(self, request, *args, **kwargs):
return method_not_allowed()
|
[
"rax.komol@gmail.com"
] |
rax.komol@gmail.com
|
461ae92691a4dc563eec6cbbe87cbff3f32fc321
|
5b68153a670cb834f03bc4b9c14961b02ca1efac
|
/BDA - Assignments/A4/BDA502_A4_P4.py
|
9f287a04358f9d4b09efd4fddac58d74030979a3
|
[] |
no_license
|
cemkilicli/Uda-Machine_Learning
|
4ce6004ea07bd70c4c02c2c2257ddb8a1fa974fd
|
d8de4ce5fc558f81e5a3d6a0fe69f36f18db50b3
|
refs/heads/master
| 2021-01-17T08:45:13.480547
| 2017-04-01T18:01:15
| 2017-04-01T18:01:15
| 83,961,112
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 591
|
py
|
import numpy as np
X = np.random.randint(6, size=(6, 10)) # assume that this is the survey outputs of 10 questions from 6 individuals
y = np.array([0, 1, 2, 3, 4, 1]) # assume that these are the political parties for 6 participants
print X
print X[2:3]
print y
from sklearn.naive_bayes import MultinomialNB
clf = MultinomialNB()
clf.fit(X, y)
MultinomialNB(alpha=1.0, class_prior=None, fit_prior=True)
print(clf.predict(X[2:3]))
print(clf.predict_proba(X[2:3]))
A = np.array([1, 2, 3, 2, 3, 1, 3, 4, 3, 4])
pred2 = clf.predict(A)
print pred2
print clf.predict_proba(A)
|
[
"kiliclic@mef.edu.tr"
] |
kiliclic@mef.edu.tr
|
bd32ba3fd62a9486d5b5dbaac375ebf63d3f6370
|
b4871c8dd8ef257d604ac221ecff0c71e14f06cb
|
/pilot_curriculum/django_apps/generic_nasa/nasa/migrations/0001_initial.py
|
99716aced67f5162e56b39feda4fbd930ab74f6c
|
[] |
no_license
|
manzur1990/tracecamp_curriculum
|
b55605b0bbe4b5e3b333ae3fb105141e53f42e39
|
e9c8ee9a3c151a5cd57137f6575d1342a7de83fb
|
refs/heads/master
| 2022-04-13T15:24:54.305552
| 2019-08-12T13:58:40
| 2019-08-12T13:58:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 628
|
py
|
# Generated by Django 2.1.4 on 2018-12-21 15:00
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='NasaComments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('comment', models.TextField()),
('rating', models.IntegerField()),
('image_url', models.URLField()),
],
),
]
|
[
"bekkblando@gmail.com"
] |
bekkblando@gmail.com
|
efa12a796f713cb561780c00a44c90b86ff8f070
|
76192190ae7e3407c5bdd177db0f9929b6db83f8
|
/xmltodict3/xml_to_dict.py
|
a1d82a1fa3f8f84b41ed65f14913724cc9974bed
|
[
"MIT"
] |
permissive
|
dart-neitro/xmltodict3
|
66780b37576f5535f1450a0e5e586fa0702e40da
|
9424983509b1fa5028627719963f06317c3fd077
|
refs/heads/master
| 2022-12-21T07:08:31.860976
| 2020-09-13T12:51:06
| 2020-09-13T12:51:06
| 254,725,209
| 12
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,173
|
py
|
"""
Classes for transformation from XML files, string with XML and
xml.etree.ElementTree objects to python dictionary
"""
from collections import defaultdict
import re
import xml.etree.ElementTree as ElementTree
from typing import Union, Dict, List
from xmltodict3.transformers import PullTransformers
class XmlToDict:
"""Class to work with xml.etree.ElementTree objects"""
_pull_transformers = None
def __init__(self, node: ElementTree, ignore_namespace: bool = False):
"""
Init instance
:param node: XML object
:param ignore_namespace: removing namespace from tags
"""
self.node = node
self.ignore_namespace = ignore_namespace
self.child_nodes = list()
def get_dict(self) -> Dict:
"""
Extract data from xml.etree.ElementTree object
which has been passed during initialization of an instance
:return: extracted data as a python dict
"""
tag = self.get_tag()
self.child_nodes = self._get_child_nodes()
if self._is_single_node():
value = self._get_dict_from_single_node()
else:
value = self._get_dict_from_node_with_children()
return {tag: value}
def get_tag(self) -> str:
"""
Get a tag of the current node.
If ignore_namespace is True then
namespace will be removed from a tag.
:return: a tag
"""
tag = self.node.tag
if self.ignore_namespace:
tag = re.sub(r'{[^}]+}', '', tag)
return tag
def _get_child_nodes(self) -> List:
"""
Get child nodes of xml.etree.ElementTree object
which has been passed during initialization of an instance
as XmlToDict instances.
All options (ignore_namespace, transformers) of the current class
will be used for children nodes
:return: List of XmlToDict instances
"""
child_nodes = []
for child_node in self.node:
xml_to_dict_node = XmlToDict(
child_node, ignore_namespace=self.ignore_namespace)
if self._pull_transformers is not None:
xml_to_dict_node.use_pull_transformers(
self._pull_transformers)
child_nodes.append(xml_to_dict_node)
return child_nodes
def _is_single_node(self) -> bool:
"""
If node has no child nodes, this node is a single node
:return: result of check
"""
return True if not self.child_nodes else False
def _get_dict_from_single_node(self) -> Dict:
"""
Extract data from the current node, ignoring child nodes, and
transform result, using instance transformers
:return: Python dict with data node
"""
data_node = self._get_single_data_node()
transformed_data_node = self._transform_node(data_node)
grouped_data_node = self._group_single_node_data(transformed_data_node)
return grouped_data_node
def _get_single_data_node(self) -> Dict:
"""
Extract value and attributes of the current node
:return: Python dict with data node
"""
attributes = self._get_attributes()
node_value = {'#text': self._get_value()}
data_node = {**attributes, **node_value}
return data_node
def _get_value(self) -> Union[str, None]:
"""
Get node value
:return: node value
"""
value = self.node.text
if value is not None:
value = value.strip()
return value
def _transform_node(self, node_data: Dict) -> Dict:
"""
Transform data node, using instance transformers
:param node_data: data for transformation
:return: transformed data
"""
if self._pull_transformers is not None:
node_data = self._pull_transformers.transform_node(node_data)
return node_data
@staticmethod
def _group_single_node_data(node_data: Dict) -> Dict:
"""
Group node data if node data has just a value
>>> xmltodict3.XmlToDict._group_single_node_data({'#text': '1'})
'1'
:param node_data: node data to group
:return:grouped node data
"""
if tuple(node_data.keys()) == ('#text',):
node_data = node_data['#text']
return node_data
def _get_dict_from_node_with_children(self) -> Dict:
"""
Get node attributes and data from child nodes
:return: node data
"""
attributes = self._get_attributes()
children_data = self._get_children_data()
value = {**children_data, **attributes}
return value
def _get_attributes(self) -> Dict:
"""
Get node attributes.
Attributes are marked with "@" in the attribute name
:return: node attributes as dict
"""
attributes = dict()
for attribute_name in self.node.attrib:
key = '@' + attribute_name
attributes[key] = self.node.attrib[attribute_name]
return attributes
def _get_children_data(self) -> Dict:
"""
Get data from child nodes
:return: nodes data as dict
"""
node_data = defaultdict(list)
for child_node in self.child_nodes:
tag = child_node.get_tag()
node_data[tag].append(child_node.get_dict()[tag])
node_data = self._group_children_data(node_data)
return node_data
@staticmethod
def _group_children_data(children_data: defaultdict) -> Dict:
"""
>>> children_data = defaultdict(list)
>>> children_data['tag1'].append({'#value': None})
>>> children_data['tag2'].append({'#value': None})
>>> children_data['tag2'].append({'#value': '111'})
>>> xmltodict3.XmlToDict._group_children_data(children_data)
{'tag1': {'#value': None},
'tag2': [{'#value': None}, {'#value': '111'}]}
:param children_data: data from child nodes
:return: grouped data
"""
grouped_data = dict()
for tag in children_data:
sub_node_data = children_data[tag]
if len(sub_node_data) == 1:
grouped_data[tag] = sub_node_data[0]
else:
grouped_data[tag] = sub_node_data
return grouped_data
def use_pull_transformers(
self, pull_transformers: PullTransformers) -> None:
"""
Set up pull_transformation for data transformation
:param pull_transformers: PullTransformers instance
"""
if isinstance(pull_transformers, PullTransformers):
self._pull_transformers = pull_transformers
class XmlTextToDict:
"""Class to work with strings which contain XML"""
def __init__(self, xml_text: str, ignore_namespace: bool = False):
"""
Init instance
:param xml_text: string with XML
:param ignore_namespace: removing namespace from tags
"""
self.xml_text = xml_text
self.ignore_namespace = ignore_namespace
self._pull_transformers = None
def get_dict(self) -> Dict:
"""
Extract data which has been passed during initialization of an instance
:return: extracted data as a python dict
"""
xml_to_dict_node = self.get_xml_to_dict_node()
if self._pull_transformers is not None:
xml_to_dict_node.use_pull_transformers(
self._pull_transformers)
return xml_to_dict_node.get_dict()
def get_xml_to_dict_node(self) -> XmlToDict:
"""
Prepare a XmlToDict instance
:return: a XmlToDict instance with data
"""
root_node = ElementTree.fromstring(self.xml_text)
xml_to_dict_node = XmlToDict(
root_node, ignore_namespace=self.ignore_namespace)
return xml_to_dict_node
def use_pull_transformers(
self, pull_transformers: PullTransformers) -> None:
"""
Set up pull_transformation for using into XmlToDict object
:param pull_transformers: PullTransformers instance
"""
if isinstance(pull_transformers, PullTransformers):
self._pull_transformers = pull_transformers
class XmlFileToDict(XmlTextToDict):
"""Class to work with XML files"""
def __init__(self, file_path: str, ignore_namespace: bool = False):
"""
Init instance
:param file_path: path to a XML file
:param ignore_namespace: removing namespace from tags
"""
self.file_path = file_path
self.ignore_namespace = ignore_namespace
self._pull_transformers = None
def get_xml_to_dict_node(self) -> XmlToDict:
"""
Prepare a XmlToDict instance
:return: a XmlToDict instance with data
"""
tree_node = ElementTree.parse(self.file_path)
root_node = tree_node.getroot()
xml_to_dict_node = XmlToDict(
root_node, ignore_namespace=self.ignore_namespace)
return xml_to_dict_node
|
[
"admin@rnd2.ru"
] |
admin@rnd2.ru
|
83319f6df2ebe125e516bd716b711c051f7dee54
|
01d1ffbffecb2abc4d0e06e11c3fbc4f966c3710
|
/Assignment IV/Exercise_2/Exercise_2/Exercise_2a.py
|
497aa80c56d17463b81b2931a010292f8879b503
|
[] |
no_license
|
rzeg/INFDEV01-1_0920429
|
39796ca1b089cd9e07bf2312ee9b8db4557e7ba7
|
86d16a7354d365f302bfbe81285a34798c6279e5
|
refs/heads/master
| 2021-01-02T09:14:30.678946
| 2015-10-09T10:46:27
| 2015-10-09T10:46:27
| 42,510,595
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 876
|
py
|
user1Input = raw_input("Player 1 - Type R for Rock, P for Paper or S for Scissors to make your choice ").lower()
user2Input = raw_input("Player 2 - Type R for Rock, P for Paper or S for Scissors to make your choice ").lower()
if(user1Input == user2Input):
print("Draw")
if(user1Input == "r" and user2Input == "p"):
print("Paper covers Rock, Player 2 Wins")
elif(user1Input == "p" and user2Input == "s"):
print("Paper gets cut by Scissors, Player 2 Wins")
elif(user1Input == "s" and user2Input == "r"):
print("Scissors gets crushed by Rock, Player 2 Wins")
elif(user1Input == "p" and user2Input == "r"):
print("Paper covers Rock, Player 1 Wins")
elif(user1Input == "s" and user2Input == "p"):
print("Paper gets cut by Scissors, Player 1 Wins")
elif(user1Input == "r" and user2Input == "s"):
print("Scissors gets crushed by Rock, Player 1 Wins")
|
[
"rickzegelaar@gmail.com"
] |
rickzegelaar@gmail.com
|
698e29f11047d2ec058edc49d83078842a204ea8
|
f98ca6e020f21b303f8cc2a8474f71ce436f2d75
|
/tests/test_jsonlib.py
|
a67045757d2f9a40191ad7b77c8194728c2eb43e
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
garnerargoed/clldutils
|
df8afd714ab0ae1004aeb47dc24e1e96bb33323b
|
0d85c0bf46184bb99c6800ecbfa7f5db87cb2e7e
|
refs/heads/master
| 2021-09-07T22:36:17.564100
| 2018-03-02T09:54:44
| 2018-03-02T09:54:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 855
|
py
|
# coding: utf8
from __future__ import unicode_literals
from datetime import date
import pytest
from clldutils.jsonlib import dump, load, parse, update, format
def test_parse_json_with_datetime():
assert parse(dict(d='2012-12-12T20:12:12.12'))['d'].year
def test_update(tmppath):
p = tmppath / 'test'
with pytest.raises(ValueError):
with update(p):
pass # pragma: no cover
with update(p, default={}) as obj:
obj['a'] = 1
with update(p) as obj:
assert obj['a'] == 1
obj['a'] = 2
with update(p) as obj:
assert obj['a'] == 2
def test_json(tmppath):
d = {'a': 234, 'ä': 'öäüß'}
p = tmppath / 'test'
dump(d, p, indent=4)
for k, v in load(p).items():
assert d[k] == v
def test_format_json():
format(date.today())
assert format(5) == 5
|
[
"xrotwang@googlemail.com"
] |
xrotwang@googlemail.com
|
fa38a33d18841978abe59755c3e991f60bec244a
|
ef4b01eb3cc09193ca0410d24ae40a8ccfc7fa27
|
/kaldi/egs/aspire/s5/file_availibility_in_api.py
|
8fc42853984798ba665649f532f3900eeb1b1e47
|
[
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
NitinShuklaML/ASR-WebAPPLinux
|
67f25c7714dff451fc26a64099760edd2e5d18da
|
1e70ec76cde118e48aa3141e347874716ff1ceeb
|
refs/heads/master
| 2020-12-05T10:28:38.111633
| 2020-01-06T13:54:32
| 2020-01-06T13:54:32
| 232,080,321
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 333
|
py
|
#!/usr/bin/env python
import json
import sys
import urllib
args=" ".join(sys.argv[1:])
response_json = json.loads(args)
#print("response_json from file in api availibility json : "+ response_json[0]["error_code"])
flag=False
if response_json[0]["error_code"] == "NOT_FOUND":
flag=False
else:
flag=True
print(flag)
|
[
"nitin.shukla2014@gmail.com"
] |
nitin.shukla2014@gmail.com
|
082cde2325e0e7425b5bceeddd0cc65adb267ce4
|
dab2e417879aabec935b1ac6b8f40df0b652dbc8
|
/stories/sixth.py
|
1151b79982bfe47c5c4446eb224c345d4d5d87f9
|
[] |
no_license
|
handriss/new_school_system
|
8d328db790d536dc44aa73139975a40701f8612b
|
3583e80adac3c7287aa37954f7c78eb80ef03807
|
refs/heads/master
| 2020-09-27T20:53:19.588247
| 2016-09-15T08:26:05
| 2016-09-15T08:26:05
| 66,295,268
| 0
| 0
| null | 2016-08-22T19:31:18
| 2016-08-22T17:52:24
| null |
UTF-8
|
Python
| false
| false
| 76
|
py
|
class SixthStory():
def __init__(self):
print("Sixth Story: ")
|
[
"andrashinkel@gmail.com"
] |
andrashinkel@gmail.com
|
f91910ccd899ad5814d55a691ce71486e030354d
|
b0794edd7973616367cd7abbd0c87fc202c39fc7
|
/app/__init__.py
|
66b42360a67a66ba525d75a56513958062fe2c5f
|
[] |
no_license
|
OneCalledSyn/Synvitational
|
1b0082c1e03f4f2e18f89aa5651fa22f0a7c053a
|
d5fbe6ad96debfa032d3c603df7b78d347fa6e2b
|
refs/heads/main
| 2023-02-27T03:23:00.947656
| 2021-02-03T21:42:47
| 2021-02-03T21:42:47
| 314,645,502
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,535
|
py
|
# Create the application object as an instance of class 'Flask'
from flask import Flask, request, current_app
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
import logging
from logging.handlers import SMTPHandler, RotatingFileHandler
import os
from flask_mail import Mail
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_babel import Babel, lazy_gettext as _l
from elasticsearch import Elasticsearch
from redis import Redis
import rq
db = SQLAlchemy()
migrate = Migrate()
login = LoginManager()
login.login_view = 'auth.login'
login.login_message = _l('Please log in to access this page.')
mail = Mail()
bootstrap = Bootstrap()
moment = Moment()
babel = Babel()
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(config_class)
db.init_app(app)
migrate.init_app(app, db)
login.init_app(app)
mail.init_app(app)
bootstrap.init_app(app)
moment.init_app(app)
babel.init_app(app)
app.redis = Redis.from_url(app.config['REDIS_URL'])
app.task_queue = rq.Queue('synvitational-tasks', connection=app.redis)
app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) \
if app.config['ELASTICSEARCH_URL'] else None
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix='/auth')
from app.main import bp as main_bp
app.register_blueprint(main_bp)
if not app.debug and not app.testing:
if app.config['MAIL_SERVER']:
auth = None
if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
auth = (app.config['MAIL_USERNAME'],
app.config['MAIL_PASSWORD'])
secure = None
if app.config['MAIL_USE_TLS']:
secure = ()
mail_handler = SMTPHandler(
mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
fromaddr='no-reply@' + app.config['MAIL_SERVER'],
toaddrs=app.config['ADMINS'], subject='Microblog Failure',
credentials=auth, secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
if app.config['LOG_TO_STDOUT']:
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
app.logger.addHandler(stream_handler)
else:
if not os.path.exists('logs'):
os.mkdir('logs')
file_handler = RotatingFileHandler('logs/synvitational.log',
maxBytes=10240, backupCount=10)
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'))
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('Microblog startup')
return app
@babel.localeselector
def get_locale():
return request.accept_languages.best_match(current_app.config['LANGUAGES'])
# return 'es'
from app import models
# Importing routes at the bottom of the file instead of at the top avoids
# 'circular imports' issue
|
[
"noreply@github.com"
] |
OneCalledSyn.noreply@github.com
|
8bbc5995cf8ac1daa24bbf2120f013c8297650ba
|
ed77fd675aa1307c3dcb404eea06b1b6ad0aec03
|
/fullread.py
|
f1ef59da1584285c37e791b263f924da8d965d70
|
[] |
no_license
|
rsnarang/Mars-Rover-Data-Analysis
|
3f7e25004a2a84ec876eb54a23f668387032ab6e
|
f43f8a544303c1f43cc8bf2a4dbdd69a0a2d720e
|
refs/heads/master
| 2022-12-10T21:58:33.936910
| 2022-12-05T00:21:45
| 2022-12-05T00:21:45
| 182,816,584
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 227
|
py
|
import pandas as pd
import glob
flist = (glob.glob('Moc/*.csv'))
pathmoc = '/Users/[user]/desktop/Python/Moc/'
col_list = ['File', 'K2O', 'K2O RMSEP']
df = pd.read_csv(pathmoc + 'UltMocFin.csv')
df2 = df[col_list]
print(df2)
|
[
"noreply@github.com"
] |
rsnarang.noreply@github.com
|
52aeb333ec773168df48f02e2f66a9fb9b84d8a7
|
d0da7ed581bbab646dceccc0eab352b6e3d648e1
|
/main_app/views.py
|
1db4e895090240428e9755cf7a9ad7fd68f57e8b
|
[] |
no_license
|
chauncy-n/webdevskills
|
bb4972b26395ed9e4b53bc8fc425f9e40ed149a0
|
7ef11ff5bbaa7dfca0c71b6577f6e38a567e5011
|
refs/heads/master
| 2020-04-01T02:31:24.941313
| 2018-10-12T21:45:55
| 2018-10-12T21:45:55
| 152,782,583
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,129
|
py
|
from django.shortcuts import render
from .models import Skill
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.http import HttpResponseRedirect
from .forms import LoginForm
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
# Create your views here.
def skills_index(request):
skills = Skill.objects.all()
return render(request, 'skills/index.html', { 'skills': skills} )
def index(request):
return render(request, 'base.html')
def skills_detail(request, skill_id):
skill = Skill.objects.get(id=skill_id)
return render(request, 'skills/detail.html', {'skill': skill})
class SkillCreate(CreateView):
model = Skill
fields = '__all__'
success_url = '/skills'
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.user = self.request.user
self.object.save()
return HttpResponseRedirect('/skills/')
class SkillUpdate(UpdateView):
model = Skill
fields = ['skill', 'description']
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.save()
return HttpResponseRedirect('/skills/' + str(self.object.pk))
class SkillDelete(DeleteView):
model = Skill
success_url = '/skills'
def login_view(request):
if request.method == 'POST':
# if post, then authenticate (user submitted username and password)
form = LoginForm(request.POST)
if form.is_valid():
u = form.cleaned_data['username']
p = form.cleaned_data['password']
user = authenticate(username = u, password = p)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
print("The account has been disabled.")
return HttpResponseRedirect('/')
else:
print("The username and/or password is incorrect.")
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {'form': form})
def logout_view(request):
logout(request)
return HttpResponseRedirect('/')
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
user = form.save()
login(request, user)
return redirect('base')
else:
form = UserCreationForm()
return render(request, 'signup.html', {'form': form})
@login_required
def profile(request, username):
if username == request.user.username:
user = User.objects.get(username=username)
skills = Skill.objects.filter(user=user)
return render(request, 'profile.html', {'username': username, 'skills': skills})
else:
return HttpResponseRedirect('/')
|
[
"eldoon@comcast.net"
] |
eldoon@comcast.net
|
c0b766f87771e2917beaef38f23df52de3d2573b
|
64572e6596a22bd08f5f259ade5b325dac9ba397
|
/survey_sr/views.py
|
2ba05463a4568053db5533a9f4a98cf4983a2c0f
|
[
"MIT"
] |
permissive
|
mgutierrezc/Prueba-GECE
|
236128902b5bd4c05f67ff5ac41697352df32c9e
|
b55cff353745957e4d4ef1d7aa002052ab623e56
|
refs/heads/master
| 2021-06-19T22:36:23.752872
| 2019-06-03T02:50:32
| 2019-06-03T02:50:32
| 154,437,433
| 0
| 0
|
NOASSERTION
| 2021-06-10T20:57:50
| 2018-10-24T04:10:45
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,111
|
py
|
from . import models
from ._builtin import Page, WaitPage
from otree.api import Currency as c, currency_range
from .models import Constants
class Demographics(Page):
form_model = models.Player
form_fields = ['gender',
'field_studies',
'escala',
'ciclo',
'age',
'carrera_padres', ]
class CognitiveReflectionTest(Page):
form_model = models.Player
form_fields = ['preg11',
'preg12', ]
class SegundoCaso(Page):
form_model = models.Player
form_fields = ['preg21',
'preg22', ]
class Question(Page):
form_model = models.Player
form_fields = ['preg31',
'preg32',
'preg33',
'preg34']
class gracias(Page):
def vars_for_template(self):
participant = self.participant
return {
'redemption_code': participant.label or participant.code,
}
page_sequence = [
CognitiveReflectionTest,
SegundoCaso,
Question,
Demographics,
gracias,
]
|
[
"GaryTheStTrooper@users.noreply.github.com"
] |
GaryTheStTrooper@users.noreply.github.com
|
f8eccfe1aedbc9b1aa4f0a96d6d6e702357e2324
|
9c69e2fc689194237ef294071a9c14c6dfabe545
|
/src/ultimate/user/urls.py
|
f8a18af2ec2ba1e3f063efd7648d2a5fa4b8f419
|
[] |
permissive
|
a2ultimate/ultimate-league-app
|
69f0331f5efeb2883c00990eb7a59ac346a34c69
|
1b40e8a01950fc526db9b649b78ada71ec567624
|
refs/heads/main
| 2023-04-03T06:43:24.471566
| 2023-03-16T00:55:22
| 2023-03-16T00:55:22
| 8,152,035
| 4
| 2
|
BSD-3-Clause
| 2023-03-16T00:55:24
| 2013-02-12T03:15:25
|
Python
|
UTF-8
|
Python
| false
| false
| 1,599
|
py
|
from django.conf.urls import url, include
from django.contrib.auth.views import login, logout, password_reset, password_reset_done, password_reset_confirm, password_reset_complete
from . import views
urlpatterns = [
url(r'^$', views.index, {}, 'user'),
url(r'^log-in/$', login, {'template_name': 'user/login.html'}, 'auth_log_in'),
url(r'^log-out/$', logout, {'template_name': 'user/logout.html'}, 'auth_log_out'),
url(r'^password/reset/$', password_reset, {'post_reset_redirect': '/user/password/reset/done/', 'template_name': 'user/registration/password_reset_form.html',
'email_template_name': 'user/registration/password_reset_email.html', 'subject_template_name': 'user/registration/password_reset_subject.txt', }, 'password_reset'),
url(r'^password/reset/done/$', password_reset_done,
{'template_name': 'user/registration/password_reset_done.html'}, 'password_reset_done'),
url(r'^password/reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', password_reset_confirm, {
'post_reset_redirect': '/user/password/done/', 'template_name': 'user/registration/password_reset_confirm.html'}, 'password_reset_confirm'),
url(r'^password/done/$', password_reset_complete,
{'template_name': 'user/registration/password_reset_complete.html'}, 'password_reset_confirm'),
url(r'^sign-up/$', views.signup, {}, 'registration_register'),
url(r'^edit/profile/$', views.editprofile, {}, 'editprofile'),
url(r'^edit/ratings/$', views.editratings, {}, 'editratings'),
]
|
[
"ryanjdonnelly@gmail.com"
] |
ryanjdonnelly@gmail.com
|
4ee1c835bfb47e715ce2c96c65cf218f187bab31
|
282d0a84b45b12359b96bbf0b1d7ca9ee0cb5d19
|
/Malware1/venv/Lib/site-packages/numpy/lib/npyio.py
|
6eb9cbd18235dbb0bd65ee520e9f94852156d02a
|
[] |
no_license
|
sameerakhtar/CyberSecurity
|
9cfe58df98495eac6e4e2708e34e70b7e4c055d3
|
594973df27b4e1a43f8faba0140ce7d6c6618f93
|
refs/heads/master
| 2022-12-11T11:53:40.875462
| 2020-09-07T23:13:22
| 2020-09-07T23:13:22
| 293,598,094
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:40ba76b8ee10e92857396797dd69934bd5b9c1c413138a9ccef88a8b534917a6
size 84853
|
[
"46763165+sameerakhtar@users.noreply.github.com"
] |
46763165+sameerakhtar@users.noreply.github.com
|
bda9e286655709b69005e3804bdc32e23d0e074c
|
1f246ab8f856a7f4cf42ed6fdba0b432be16ed7c
|
/przepisy/views.py
|
377eeb62f533464b365488d6afbc6229fdd6173a
|
[] |
no_license
|
salolg/cooking-recipes
|
7c4adb6c9aa7f7f5055b909ec397beabb7485b2a
|
fec2466c37a45c8d563236d502587b1d167b7e9a
|
refs/heads/master
| 2021-07-08T07:01:12.101245
| 2019-12-22T15:06:17
| 2019-12-22T15:06:17
| 229,552,070
| 0
| 0
| null | 2021-03-20T02:23:50
| 2019-12-22T10:37:59
|
Python
|
UTF-8
|
Python
| false
| false
| 2,380
|
py
|
from flask import Flask, request, session, redirect, url_for, render_template, flash
from models import User, todays_recent_posts, searching_for_posts
app = Flask(__name__)
@app.route("/")
def index():
posts = todays_recent_posts(5)
print(posts)
return render_template("index.html", posts=posts)
@app.route("/register", methods=["GET", "POST"])
def register():
if request.method == "POST":
username = request.form["username"]
password = request.form["password"]
user = User(username)
if not user.register(password):
flash('Juz istnieje osoba z taka nazwa uzytkownika')
else:
flash('Zostales pomyslnie zarejestrowany.')
return redirect(url_for("login"))
return render_template("register.html")
@app.route("/login", methods=["GET", "POST"])
def login():
if request.method == "POST":
username = request.form["username"]
password = request.form["password"]
user = User(username)
if not user.verify_password(password):
flash("Niepoprawne dane logowania")
else:
flash("Zostales pomyslnie zalogowany")
session["username"] = user.username
return redirect(url_for("index"))
return render_template("login.html")
@app.route("/add_post", methods=["POST"])
def add_post():
title = request.form["title"]
tags = request.form["tags"]
text = request.form["text"]
user = User(session["username"])
if not title or not tags or not text:
flash("You must give your post a title, tags, and a text body.")
else:
user.add_post(title, tags, text)
return redirect(url_for("index"))
@app.route("/profile/<username>")
def profile(username):
user1 = User(session.get("username"))
user2 = User(username)
posts = user2.recent_posts()
return render_template("profile.html", username=username, posts=posts)
@app.route("/search_for_recipe")
def search_box():
return render_template("searching.html")
@app.route("/search", methods=["POST"])
def searching():
tag = request.form["tag"]
posts = searching_for_posts(tag)
print(posts)
return render_template("searching.html", posts=posts)
@app.route("/logout")
def logout():
session.pop("username")
flash("Zostales wylogowny")
return redirect(url_for("index"))
|
[
"olga.salagacka@nokia.com"
] |
olga.salagacka@nokia.com
|
5e1ffa3a96834760eec7dd3e8145af3638e64e3e
|
30db5c868ec38f063adebe61435db0dfdcdaf689
|
/src/CIDA/main.py
|
2bab8f394b0c71bf702293c2c3d9a2649dcdfe57
|
[
"MIT"
] |
permissive
|
anshuln/Training-for-the-Future
|
94db402e22b6537f911b809ac3c4993809785319
|
dea30999827ff5882468d318fa536f96455267ca
|
refs/heads/main
| 2023-09-06T01:38:28.552198
| 2021-11-17T08:17:18
| 2021-11-17T08:17:18
| 421,274,612
| 9
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,130
|
py
|
from easydict import EasyDict
import argparse
from torch.utils.data.sampler import SubsetRandomSampler
import numpy as np
import os
import random
from tqdm import tqdm
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
from torchvision import datasets, transforms
from torch.autograd import Variable
import matplotlib.pyplot as plt
from utils import masked_mse, masked_cross_entropy, gaussian_loss, plain_log, write_pickle, read_pickle
from torch.utils import data
from data_loader import classifdata
from sklearn.metrics import accuracy_score, classification_report
label_noise_std = 0.20
use_label_noise = False
use_inverse_weighted = True
discr_thres = 999.999
normalize = True
train_discr_step_tot = 2
train_discr_step_extra = 0
slow_lrD_decay = 1
train_list = [0,1,2,3,4,5,6,7,8,9,10]
mask_list = [1]*11
test_list = [11]
def seed_torch(args, seed):
if args.cuda:
torch.cuda.manual_seed(seed)
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed) # if you are using multi-GPU.
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
class ClassificationDataSet(torch.utils.data.Dataset):
def __init__(self, indices, transported_samples=None,target_bin=None, **kwargs):
'''
TODO: Handle OT
Pass Transported samples as kwargs?
'''
self.indices = indices # Indices are the indices of the elements from the arrays which are emitted by this data-loader
self.transported_samples = transported_samples # a 2-d array of OT maps
self.root = kwargs['root_dir']
self.device = kwargs['device'] if kwargs.get('device') else 'cpu'
self.transport_idx_func = kwargs['transport_idx_func'] if kwargs.get('transport_idx_func') else lambda x:x%1000
self.num_bins = kwargs['num_bins'] if kwargs.get('num_bins') else 6
self.base_bin = kwargs['num_bins'] if kwargs.get('num_bins') else 0 # Minimum whole number value of U
#self.num_bins = kwargs['num_bins'] # using this we can get the bin corresponding to a U value
self.target_bin = target_bin
self.X = np.load("{}/X.npy".format(self.root))
self.Y = np.load("{}/Y.npy".format(self.root))
self.A = np.load("{}/A.npy".format(self.root))
self.U = np.load("{}/U.npy".format(self.root))
self.drop_cols = kwargs['drop_cols_classifier'] if kwargs.get('drop_cols_classifier') else None
def __getitem__(self,idx):
index = self.indices[idx]
data = torch.tensor(self.X[index]).float().to(self.device) # Check if we need to reshape
label = torch.tensor(self.Y[index]).long().to(self.device)
auxiliary = torch.tensor(self.A[index]).float().to(self.device).view(-1, 1)
domain = torch.tensor(self.U[index]).float().to(self.device).view(-1, 1)
if self.transported_samples is not None:
source_bin = int(np.round(domain.item() * self.num_bins)) # - self.base_bin
# print(source_bin,self.target_bin)
transported_X = torch.from_numpy(self.transported_samples[source_bin][self.target_bin][self.transport_idx_func(idx)]).float().to(self.device) #This should be similar to index fun, an indexing function which takes the index of the source sample and returns the corresponding index of the target sample.
# print(source_bin,self.target_bin,transported_X.size())
if self.drop_cols is not None:
return data[:self.drop_cols],transported_X[:self.drop_cols], auxiliary, domain, label
return data,transported_X, auxiliary, domain, label
if self.drop_cols is not None:
return data[:self.drop_cols], auxiliary, domain, label
return data, auxiliary, domain, label
def __len__(self):
return len(self.indices)
# Training loop
def train(encoder, predictor, discriminator, train_loader, opt_D, opt_non_D, lr_scheduler_D, lr_scheduler_non_D, epoch, args,classification):
models = [encoder, predictor, discriminator]
for model in models:
model.train()
sum_discr_loss = 0
sum_total_loss = 0
sum_pred_loss = 0
for batch_idx, data_tuple in tqdm(enumerate(train_loader)):
# print(batch_idx)
if args.cuda:
data_tuple = tuple(ele.cuda() for ele in data_tuple)
if normalize:
data_raw, target, domain, data, mask = data_tuple
else:
data, target, domain, mask = data_tuple
# FF encoder and predictor
encoding = encoder((data, domain))
prediction = predictor((encoding, domain))
if use_label_noise:
noise = (torch.randn(domain.size()).cuda() * label_noise_std).unsqueeze(1)
# train discriminator
train_discr_step = 0
while args.dis_lambda > 0.0:
train_discr_step += 1
discr_pred_m, discr_pred_s = discriminator((encoding, domain))
discr_loss = gaussian_loss(discr_pred_m, discr_pred_s, domain.unsqueeze(1) / args.norm, np.mean(train_list) / args.norm, args.norm)
for model in models:
model.zero_grad()
discr_loss.backward(retain_graph=True)
opt_D.step()
# handle extra steps to train the discr's variance branch
if train_discr_step_extra > 0:
cur_extra_step = 0
while True:
discr_pred_m, discr_pred_s = discriminator((encoding, domain))
discr_loss = gaussian_loss(discr_pred_m.detach(), discr_pred_s, domain.unsqueeze(1) / args.norm)
for model in models:
model.zero_grad()
discr_loss.backward(retain_graph=True)
opt_D.step()
cur_extra_step += 1
if cur_extra_step > train_discr_step_extra:
break
if discr_loss.item() < 1.1 * discr_thres and train_discr_step >= train_discr_step_tot:
sum_discr_loss += discr_loss.item()
break
# handle wgan
if args.wgan == 'wgan':
for p in discriminator.parameters():
p.data.clamp_(args.clamp_lower, args.clamp_upper)
# train encoder and predictor
if classification:
pred_loss = masked_cross_entropy(prediction, target, mask)
else:
pred_loss = masked_mse(prediction, target, mask)
discr_pred_m, discr_pred_s = discriminator((encoding, domain))
ent_loss = 0
discr_loss = gaussian_loss(discr_pred_m, discr_pred_s, domain.unsqueeze(1) / args.norm)
total_loss = pred_loss - discr_loss * args.dis_lambda
for model in models:
model.zero_grad()
total_loss.backward()
opt_non_D.step()
sum_pred_loss += pred_loss.item()
sum_total_loss += total_loss.item()
lr_scheduler_D.step()
lr_scheduler_non_D.step()
avg_discr_loss = sum_discr_loss / len(train_loader.dataset) * args.bs
avg_pred_loss = sum_pred_loss / len(train_loader.dataset) * args.bs
avg_total_loss = sum_total_loss / len(train_loader.dataset) * args.bs
log_txt = 'Train Epoch {}: avg_discr_loss = {:.5f}, avg_pred_loss = {:.3f}, avg_total_loss = {:.3f}'.format(epoch, avg_discr_loss, avg_pred_loss, avg_total_loss)
print(log_txt)
plain_log(args.log_file,log_txt+'\n')
if epoch % args.save_interval == 0 and epoch != 0:
torch.save(encoder, '%s.model_enc' % args.save_head)
torch.save(predictor, '%s.model_pred' % args.save_head)
torch.save(discriminator, '%s.model_discr' % args.save_head)
# Testing loop
def test_regression(encoder, predictor, discriminator, test_loader, args, classification, log_file=None):
models = [encoder, predictor, discriminator]
for model in models:
model.eval()
test_loss = 0
rmse_loss = 0
correct = 0
l_data = []
l_label = []
l_gt = []
l_encoding = []
l_domain = []
l_prob = []
#for data, target, domain in test_loader:
for data_tuple in test_loader:
if args.cuda:
data_tuple = tuple(ele.cuda() for ele in data_tuple)
if normalize:
data_raw, target, domain, data = data_tuple
else:
data, target, domain = data_tuple
data_raw = data
encoding = encoder((data, domain))
prediction = predictor((encoding, domain))
test_loss += F.mse_loss(prediction.squeeze(1), target, reduction='sum').item() # sum up batch loss
pred = prediction.data.max(1, keepdim=True)[1] # get the index of the max log-probability
correct += F.l1_loss(prediction.squeeze(1), target, reduction='sum').item()
test_loss /= len(test_loader.dataset)
correct /= len(test_loader.dataset)
log_txt = 'Test set: MSE loss: {:.7f}, MAE loss: {:.7f}'.format(
test_loss, correct)
if log_file is None:
print(log_txt)
else:
print(log_txt,log_file)
plain_log(args.log_file,log_txt+'\n')
return test_loss,correct
def test_classification(encoder, predictor, discriminator, test_loader, args, classification, log_file=None):
models = [encoder, predictor, discriminator]
for model in models:
model.eval()
test_loss = 0
rmse_loss = 0
correct = 0
l_data = []
l_label = []
l_gt = []
l_true = []
l_encoding = []
l_domain = []
l_prob = []
#for data, target, domain in test_loader:
for data_tuple in test_loader:
if args.cuda:
data_tuple = tuple(ele.cuda() for ele in data_tuple)
if normalize:
data_raw, target, domain, data = data_tuple
else:
data, target, domain = data_tuple
data_raw = data
encoding = encoder((data, domain))
prediction = predictor((encoding, domain))
preds = torch.argmax(prediction, 1)
l_label += list(preds.detach().cpu().numpy())
l_true += list(target.long().clone().cpu().numpy())
test_loss /= len(test_loader.dataset)
acc = accuracy_score(l_true, l_label)
print('Accuracy: ', acc)
log_txt = 'Test set: Accuracy: {:.7f}'.format(acc)
return test_loss, acc
def visualize_trajectory(encoder,predictor,indices,filename=''):
td = ClassificationDataSet(indices=indices,root_dir=fname,device="cuda:0")
fig, ax = plt.subplots(3, 3)
ds = iter(torch.utils.data.DataLoader(td,1,False))
for i in range(3):
for j in range(3):
x,a,u,y = next(ds)
x_ = []
y_ = []
y__ = []
y___ = []
actual_time = u.view(1).detach().cpu().numpy()
for t in tqdm(np.arange(actual_time-0.2,actual_time+0.2,0.005)):
x_.append(t)
t = torch.tensor([t*12]).float().to(x.device)
t.requires_grad_(True)
delta = (x[0,-1]*12 - t).detach()
encoding = encoder((x, t))
y_pred = predictor((encoding, t))
# y_pred = .classifier(torch.cat([x[:,:-2],x[:,-2].view(-1,1)-delta.view(-1,1), t.view(-1,1)],dim=1), t.view(-1,1)) # TODO change the second last feature also
partial_Y_pred_t = torch.autograd.grad(y_pred, t, grad_outputs=torch.ones_like(y_pred))[0]
y_.append(y_pred.item())
y__.append(partial_Y_pred_t.item())
y___.append((-partial_Y_pred_t*delta + y_pred).item())
# TODO gradient addition business
ax[i,j].plot(x_,y_)
ax[i,j].plot(x_,y__)
# ax[i,j].plot(x_,y___)
ax[i,j].set_title("time-{}".format(actual_time))
# print(x_,y_)
ax[i,j].scatter(u.view(-1,1).detach().cpu().numpy(),y.view(-1,1).detach().cpu().numpy())
plt.savefig('traj_{}.png'.format(filename))
plt.close()
def main(args):
seed_torch(args, args.seed)
kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {}
'''
args.dropout=0.0
args.lr=5e-4
args.gamma_exp=1000
args.hidden=800
args.ratio=1
args.dis_lambda=1.0
args.lambda_m=0.0
args.wgan='wgan'
args.clamp_lower=-0.15
args.clamp_upper=0.15
args.bs=200
args.num_train=100
args.loss='default'
args.evaluate=False
args.checkpoint='none'
args.save_head='tmp'
args.save_interval=20
args.log_interval=20
args.log_file='tmp_mlp'
seed=10
args.cuda=True
'''
global train_list, test_list, mask_list
args.wgan = 'wgan'
args.save_head='tmp'
args.save_interval=20
args.log_interval=20
args.log_file='tmp_mlp'
args.gamma_exp=1000
args.dis_lambda=1.0
args.lambda_m=0.0
args.clamp_lower=-0.15
args.clamp_upper=0.15
if args.data == "moons":
from moon_models import DomainEnc, DomainPred, DomainDDisc
train_list = [0, 1, 2, 3, 4, 5, 6, 7, 8]
test_list = [9]
mask_list = [1]*11
fname = '../../data/Moons/processed/'
args.lr = 1e-3
args.norm = 12
classification = True
elif args.data == "house":
from house_models import DomainEnc, DomainPred, DomainDDisc
train_list = [0, 1, 2, 3, 4, 5, 6, 7, 8]
# 25 epochs
test_list = [9]
mask_list = [1]*11
fname = '../../data/HousePriceTime/'
args.lr = 1e-3
args.norm = 1
classification = False
elif args.data == "m5":
from m5_models import DomainEnc, DomainPred, DomainDDisc
train_list = [0, 1, 2]
test_list = [3]
mask_list = [1]*4
fname = '../../data/M5/processed/'
args.lr = 1e-3
args.norm = 5
classification = False
elif args.data == "m5_household":
from m5_models import DomainEnc, DomainPred, DomainDDisc
train_list = [0, 1, 2]
test_list = [3]
mask_list = [1]*4
fname = '../../data/M5/processed_household/'
args.lr = 1e-3
args.norm = 5
classification = False
elif args.data == "onp":
from onp_models import DomainEnc, DomainPred, DomainDDisc
train_list = [0, 1, 2, 3, 4]
test_list = [5]
mask_list = [1]*6
args.norm = 7
fname = '../../data/ONP/processed/'
args.lr = 1e-3
classification = True
elif args.data == "elec":
from elec_models import DomainEnc, DomainPred, DomainDDisc
train_list = [x for x in range(29)]
# 12 epochs
test_list = [29]
mask_list = [1]*29
args.norm = 1
fname = '../../data/Elec2/'
args.lr = 1e-3
classification = True
train_loader = torch.utils.data.DataLoader(
classifdata(fname, train_list, normalize, mask_list),
shuffle=True,
batch_size=args.bs, **kwargs)
test_loader = torch.utils.data.DataLoader(
classifdata(fname, test_list, normalize),
batch_size=args.bs, **kwargs)
encoder = DomainEnc()
predictor = DomainPred()
discriminator = DomainDDisc()
models = [encoder, predictor, discriminator]
if args.cuda:
for model in models:
model.cuda()
torch.autograd.set_detect_anomaly(True)
# Set up optimizer
opt_D = optim.Adam(discriminator.parameters(), lr = args.lr) # lr
opt_non_D = optim.Adam(list(encoder.parameters()) + list(predictor.parameters()), lr = args.lr) # lr
lr_scheduler_D = lr_scheduler.ExponentialLR(optimizer=opt_D, gamma=0.5 ** (1/(args.gamma_exp*(train_discr_step_extra+1)) * slow_lrD_decay))
lr_scheduler_non_D = lr_scheduler.ExponentialLR(optimizer=opt_non_D, gamma=0.5 ** (1/args.gamma_exp))
ind = list(range(args.bs))
ind_test = list(range(1000))
#bce = nn.BCELoss()
#mse = nn.MSELoss()
#if classification:
if classification:
best_acc = 0
for ep in range(args.epochs):
train(encoder, predictor, discriminator, train_loader, opt_D, opt_non_D, lr_scheduler_D, lr_scheduler_non_D, ep, args, classification)
if ep % 10 == 0:
loss, acc = test_classification(encoder, predictor, discriminator, test_loader, args, classification)
if acc > best_acc: best_acc = acc
log_file = open("cida_%s" %(args.data), "a+")
print("Seed - {}".format(args.seed),file=log_file)
print("Accuracy: {}".format(best_acc),file=log_file)
log_file.close()
else:
best_mse,best_mae = 1000000,1000000
for ep in range(args.epochs):
train(encoder, predictor, discriminator, train_loader, opt_D, opt_non_D, lr_scheduler_D, lr_scheduler_non_D, ep, args, classification)
if ep % 5 == 0:
mse,mae = test_regression(encoder, predictor, discriminator, test_loader, args, classification)
if mse < best_mse and mae < best_mae:
best_mse,best_mae = mse,mae
models = [encoder,predictor,discriminator]
log_file = open("cida_%s" %(args.data), "a+")
print("Seed - {}".format(args.seed),file=log_file)
# test(log_file)
print("MSE: {}".format(best_mse),file=log_file)
print("MAE: {}".format(best_mae),file=log_file)
log_file.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--data', help="String, needs to be one of mnist, sleep, moons, m5, elec, m5_household, house")
parser.add_argument('--epochs',default=10, help="Needs to be int, number of epochs",type=int)
parser.add_argument('--bs', default=100, help="Batch size",type=int)
parser.add_argument('--cuda',action='store_true',help="Should we use a GPU")
parser.add_argument('--seed',default=0,type=int)
args = parser.parse_args()
main(args)
|
[
"anshulnasery@gmail.com"
] |
anshulnasery@gmail.com
|
2ca3886162c8a7299d552d659c963526470fe2f9
|
92c2a6ffb602df2d43274bc42d3452166909b1af
|
/2019/11/day11.py
|
87b9277dc33dd332615bba1fef85f905b313bf51
|
[] |
no_license
|
tonyaiuto/aoc
|
97c123a8b2e3326ae517d6f03ad00e63846e0432
|
c24b41cd439511f08e3aa352ea0e0a64c80f5cd6
|
refs/heads/main
| 2023-01-10T23:27:50.912271
| 2023-01-04T13:20:01
| 2023-01-05T03:44:26
| 160,863,158
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,795
|
py
|
#!/usr/bin/env python3
from elf_image import ElfImage
import intcode
class Robot(object):
def __init__(self):
self.x = 0
self.y = 0
self.dir = 0 # 0 up, 1 right, 2, down, 3 left
self.painted = {}
def turn(self, dir):
if dir == 0:
self.dir -= 1
if self.dir < 0:
self.dir += 4
else:
self.dir = (self.dir + 1) % 4
def move(self, dist=1):
if self.dir == 0:
self.y -= dist
elif self.dir == 1:
self.x += dist
elif self.dir == 2:
self.y += dist
elif self.dir == 3:
self.x -= dist
else:
raise Exception('bad dir: %d' % self.dir)
def paint(self, computer):
while True:
computer.push_input(self.painted.get((self.x, self.y), 0))
color = computer.run_until_output()
dir = computer.run_until_output()
# print('paint %d,%d %s' % (self.x, self.y, 'black' if color == 0 else 'white'))
if computer.is_halted:
break
self.painted[(self.x, self.y)] = color
self.turn(dir)
self.move()
def part1():
mem = intcode.load_intcode('input_11.txt')
ic = intcode.IntCode(list(mem))
robot = Robot()
robot.paint(ic)
print('part1:', len(robot.painted))
def part2():
mem = intcode.load_intcode('input_11.txt')
ic = intcode.IntCode(list(mem))
robot = Robot()
robot.painted[(0, 0)] = 1
robot.paint(ic)
image = ElfImage.fromPoints(robot.painted)
image.print()
"""
# # #### ## #### # # ### # ###
# # # # # # # # # # # # #
# # # # # ### ## ### # # #
# # # #### # # # # # # ###
# # # # # # # # # # # #
## #### # # #### # # ### #### #
"""
image.toPng('out_11.png')
if __name__ == '__main__':
part1()
part2()
|
[
"aiuto@google.com"
] |
aiuto@google.com
|
3215f4395ddfe3f66ca86b29a70209aa7b2a2b1b
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/__init__.py
|
01935d3cb4901323838878ded3e2783723747739
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 726
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._data_box_management_client import DataBoxManagementClient
__all__ = ['DataBoxManagementClient']
try:
from ._patch import patch_sdk # type: ignore
patch_sdk()
except ImportError:
pass
from ._version import VERSION
__version__ = VERSION
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
7fc3421e8ac94d7bcef61a7fd298c65c3e30f4f2
|
6947ea29250a8ee2d308b3bfcc4311dc9ecec17b
|
/spim_registration/timelapse/KLB_2_BDVXML.py
|
17c81a728cd1fa2d88344618d54dbeae4231bfcf
|
[
"BSD-3-Clause"
] |
permissive
|
Xqua/snakemake-workflows
|
86361beb61021b0f9f23c7e213a2b27ff6e473da
|
28980575bedeb1452ae54bb5771195080907de66
|
refs/heads/master
| 2020-05-18T02:34:21.676562
| 2019-05-16T20:18:26
| 2019-05-16T20:18:26
| 184,119,453
| 0
| 0
|
BSD-3-Clause
| 2019-04-29T18:04:34
| 2019-04-29T18:04:33
| null |
UTF-8
|
Python
| false
| false
| 6,255
|
py
|
#!/usr/bin/env python3
import xmltodict
import sys
from optparse import OptionParser
import os
from collections import OrderedDict
import pyklb
parser = OptionParser()
parser.add_option("-b", "--basedir", dest="basedir", type="string",
help="[REQUIRED] KLB file root folder")
parser.add_option("-r", "--res", dest="res", type="string",
help="[OPTIONAL] pixel resolution of axes in um comma separated: x_res,y_res,z_res")
parser.add_option("-o", "--output", dest="outpath", type="string",
help="[REQUIRED] mamut XML output file path")
(options, args) = parser.parse_args()
if options.res:
res = [float(r) for r in options.res.split(',')]
else:
res = [1.0, 1.0, 1.0]
# First we need to get the files and organize them by ViewSetup
root = os.listdir(options.basedir)
spms = [spm for spm in root if 'SPM' in spm]
tms = [tm for tm in os.listdir(os.path.join(options.basedir, spms[0])) if 'TM' in tm]
chms = [chm for chm in os.listdir(os.path.join(options.basedir, spms[0], tms[0])) if 'CM' in chm and '.klb' in chm]
cms = []
chs = []
for el in chms:
s = el.strip().split('_')
cm = [i for i in s if 'CM' in i][0]
ch = [i for i in s if 'CH' in i][0].split('.')[0]
if cm not in cms:
cms.append(cm)
if ch not in chs:
chs.append(ch)
# Checking that all the files are here !
error = False
for spm in spms:
for tm in tms:
for cm in cms:
for ch in chs:
path = os.path.join(options.basedir, spm, tm, "{}_{}_{}_{}.klb".format(spm, tm, cm, ch))
if not os.path.isfile(path):
print("File is missing:", path)
error =True
if error:
print("Error during file checking... Some files are missing... Exiting")
sys.exit(1)
last_tm = sorted(tms)[-1]
# Define the XML
templates = []
viewsetups = []
id = 0
s = 0
for spm in spms:
c = 0
for cm in cms:
h = 0
for ch in chs:
path = os.path.join(options.basedir, spm, last_tm, "{}_{}_{}_{}.klb".format(spm, last_tm, cm, ch))
template = OrderedDict([('template',
path),
('timeTag', 'TM')])
headers = pyklb.readheader(path)
dims = headers['imagesize_tczyx'][2:]
viewsetup = OrderedDict([('id', str(id)),
('name', str(id)),
('size', '{} {} {}'.format(dims[2], dims[1], dims[0])),
('voxelSize',
OrderedDict([('unit', 'µm'), ('size', '{} {} {}'.format(res[0], res[1], res[2]))])),
('attributes',
OrderedDict([('illumination', '0'),
('channel', str(h)),
('tile', str(s)),
('angle', str(c))]))])
templates.append(template)
viewsetups.append(viewsetup)
h += 1
id += 1
c += 1
s += 1
# Making the attributes part
attr = []
attr.append(OrderedDict([('@name', 'illumination'),
('Illumination', OrderedDict([('id', '0'), ('name', '0')]))]))
channels = OrderedDict([('@name', 'channel'),
('Channel', [])])
for ch in range(len(chs)):
channels['Channel'].append(OrderedDict([('id', '{}'.format(ch)), ('name', '{}'.format(ch))]))
attr.append(channels)
tiles = OrderedDict([('@name', 'tile'),
('Tile',
[])])
for spm in range(len(spms)):
tiles['Tile'].append(OrderedDict([('id', '{}'.format(spm)), ('name', '{}'.format(spm))]))
attr.append(tiles)
angles = OrderedDict([('@name', 'angle'),
('Angle',[])])
for cm in range(len(cms)):
angles['Angle'].append(OrderedDict([('id', '{}'.format(cm)), ('name', '{}'.format(cm))]))
attr.append(angles)
registrations = []
for tm in range(len(tms)):
for s in range(len(viewsetups)):
registration = OrderedDict([('@timepoint', str(tm)),
('@setup', str(s)),
('ViewTransform',
OrderedDict([('@type',
'affine'),
('affine',
'1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 {} 0.0'.format(res[2]/res[0]))]))])
registrations.append(registration)
XML =OrderedDict([('SpimData',
OrderedDict([('@version', '0.2'),
('BasePath',
OrderedDict([('@type', 'relative'),
('#text', '.')])),
('SequenceDescription',
OrderedDict([('ImageLoader',
OrderedDict([('@format', 'klb'),
('Resolver',
OrderedDict([('@type',
'org.janelia.simview.klb.bdv.KlbPartitionResolver'),
('ViewSetupTemplate', templates
)]))])),
('ViewSetups',
OrderedDict([('ViewSetup', viewsetups ),
('Attributes', attr
)])),
('Timepoints',
OrderedDict([('@type', 'range'),
('first', '0'),
('last', '{}'.format(len(tms)))]))])),
('ViewRegistrations',
OrderedDict([('ViewRegistration', registrations)]))
])
)])
out = xmltodict.unparse(XML, pretty=True)
f = open(options.outpath, 'w')
f.write(out)
f.close()
|
[
"xqua@free.fr"
] |
xqua@free.fr
|
4e09941d8526d97efadcbe72fd89245084658d3f
|
3d230fb3b1922ad96d2f804d0a4edaf70c9a6688
|
/swe0/ext/portal/views.py
|
9e83a4c369659cab6a5cd4585a4724b1372b62b1
|
[] |
no_license
|
ubacm/swe0-old
|
d24a89b2801b0aac31bb016dc2f6050dca673717
|
233adb8af49d6e18e4e4d55f1c95a53b9f0bdcca
|
refs/heads/master
| 2020-03-18T17:17:35.066673
| 2018-11-18T19:07:47
| 2018-11-18T19:07:51
| 135,019,130
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 224
|
py
|
from flask import render_template
from swe0 import enabled_extensions
from . import portal_blueprint
@portal_blueprint.route('')
def index():
return render_template('portal/index.html', extensions=enabled_extensions)
|
[
"angushtlam@gmail.com"
] |
angushtlam@gmail.com
|
9afdb639ecdcd91cc372fddec76524fc52b25c3c
|
4252d5f3d4e34124259fd1985e736b47842e9a5a
|
/Tkinter - 10 - Creating a Toolbar/Tkinter - 10 - Creating a Toolbar.py
|
e97bc464f66a329fb1255b0e24d253d793cb2da7
|
[] |
no_license
|
judas79/TKinter-git-theNewBoston
|
2c7643b46e4b82d6f0f4b8527228f5f5b777ba3f
|
01f943bdd25f771787fb22b35a976fa915a9c933
|
refs/heads/master
| 2020-12-21T23:58:30.597985
| 2020-01-30T21:45:28
| 2020-01-30T21:45:28
| 236,606,985
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,947
|
py
|
from tkinter import *
def do_nothing(): # Notice this function comes before the main window ie root, is built
print('this does nothing')
root = Tk() # creates a blank window named root
root.geometry("500x300")
root.title('Roddy') # renames title to Roddy
#*************************** Main Menu ***************************************
menu1 = Menu(root) # building object menu from the Menu class inside main window root
root.config(menu=menu1) # configures the menu object so tkinter recognizes it as a Menu object; parameter menu, is the software object menu1
# tkinter knows that this is now a menu and needs to go at the top, what items to accept ect. and prepars it to add ore items to be built upon it
sub_menu = Menu(menu1) # sub menu being a menu item uses the Menu class to build sub_menu and is within menu1, this makes a blank menu
menu1.add_cascade(label='File', menu=sub_menu) # to add drop down item functionalit to the empty menu window, add_cascade add dropdown item/ button, then parameters:
# a label, and what appears in it, which is the sub menu
sub_menu.add_command(label='New Project...', command=do_nothing) # creates a item, New Project... in the sub menu that points to function do_nothing
sub_menu.add_command(label='New', command=do_nothing) # creates a item, New in the sub menu that points to function do_nothing
sub_menu.add_separator() # add a seperator to the sub menu to seperate different groups within the File cascade
sub_menu.add_command(label='Exit', command=do_nothing)
edit_menu = Menu(menu1)
menu1.add_cascade(label='Edit', menu=edit_menu) # Be careful to change the menu='' parameter when copy pasting
edit_menu.add_command(label='Undo Typing', command=do_nothing)
#******************************** Toolbar **************************************
toolbar = Frame(root, bg='blue') # create the toolbar in the Frame and make the background blue
image_button = Button(toolbar, text='Create Image', command=do_nothing) # create a Button in the toolbar with text create image on it, that points to def do_nothing
image_button.pack(side=LEFT, padx=2, pady=2) # Puts the button on the left hand side of the frame and displays it; paddx is padding space on the sides and pady on top and bottom
print_button = Button(toolbar, text='Print Image', command=do_nothing)
print_button.pack(side=LEFT, padx=2, pady=2)
toolbar.pack(side=TOP, fill=X) # puts the toolbar in the top below the menu window, fills the sides of the buttons, when frame is resized
root.mainloop() # mainloop keeps the root looping infinitely or until closed, so the window remains visible on the screen.
|
[
"robotrods@gmail.com"
] |
robotrods@gmail.com
|
8710c34e56994907a5c2a8375d3db551668509d1
|
5e5b3776acc441b77a970db29d99e850b79be65e
|
/gist/define.py
|
3f301f552c4ea79b674caf06a826dd54b686c3f2
|
[
"Apache-2.0"
] |
permissive
|
eightnoteight/x-gist
|
f153ae7c5ae5d9335af23ba54c0c668b71a5c157
|
ec65e5193f989238a026a8b239eabf61c5ec7a8d
|
refs/heads/master
| 2020-12-24T16:23:40.157360
| 2014-08-15T04:54:14
| 2014-08-15T04:54:14
| 22,781,473
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 244
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class struct(dict):
def __init__(self, **kwargs):
super(struct, self).__init__(**kwargs)
self.__dict__ = self
client = struct(name="x-gist",
url ="https://github.com/eightnoteight/x-gist")
|
[
"mr.eightnoteight@gmail.com"
] |
mr.eightnoteight@gmail.com
|
bb42c4ef5ed1b734e189b8498c3f6455c4e3163f
|
d68048667b6bfbe83319db3662e2bae375c88502
|
/Inventory.py
|
1ff792023f088cc9d5bd8d1c024c1f7c68d2f8b9
|
[] |
no_license
|
CherepoGG/ArenaRPG_on_python
|
534e2ac355a2c72d4f8b09175a086a0678e9163d
|
260bd567f85b227b756c40dab7b3efa429463228
|
refs/heads/master
| 2022-11-09T18:51:34.035233
| 2020-06-23T06:49:37
| 2020-06-23T06:49:37
| 264,398,814
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 578
|
py
|
class Inventory:
def __init__(self):
self.inventory = []
self.limit = 10
self.current_limit = 0
def add_item(self, item):
if self.current_limit <= self.limit - 1:
self.current_limit += 1
self.inventory.append(item)
else:
print('Ваш инвентарь полон')
def delete_item(self, item):
for element in self.inventory:
if element == item:
index_item = self.inventory.index(item)
self.inventory.pop(index_item)
|
[
"noreply@github.com"
] |
CherepoGG.noreply@github.com
|
ac2dea16ccd2b71756d09ad35c1724375eada021
|
5c2f520dde0cf8077facc0fcd9a92bc1a96d168b
|
/microbenchmarks/exceptions_ubench.py
|
8260f1764de23db08de7146823a2733eba4417dc
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] |
permissive
|
nagyist/pyston
|
b613337a030ef21a3f03708febebe76cedf34c61
|
14ba2e6e6fb5c7316f66ccca86e6c6a836d96cab
|
refs/heads/master
| 2022-12-24T03:56:12.885732
| 2015-02-25T11:11:08
| 2015-02-25T11:28:13
| 31,314,596
| 0
| 0
|
NOASSERTION
| 2022-12-17T08:15:11
| 2015-02-25T13:24:41
|
Python
|
UTF-8
|
Python
| false
| false
| 241
|
py
|
def f():
# Try to eliminate as much non-exception stuff as possible:
from __builtin__ import Exception
e = Exception()
for i in xrange(100000):
try:
raise e
except Exception:
pass
f()
|
[
"kmod@dropbox.com"
] |
kmod@dropbox.com
|
b37361e125867ab72a8e5057b71914f5bfc99446
|
2ed0242e9ec6b8fb4fda745fafe715e787ebe8bd
|
/blog/migrations/0008_post_update_date.py
|
77bd27e35b841c8a5f2b79d22ae64156b6ab8d1d
|
[] |
no_license
|
mugan86/camperp
|
95793ae7d7e644dcdb1fab425f71315d66feb4ff
|
597573c4d82d9963acc2ef4dc56ae4cc08ed7667
|
refs/heads/master
| 2021-01-12T12:51:54.094600
| 2016-10-20T18:35:18
| 2016-10-20T18:35:18
| 69,386,214
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 498
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-14 18:10
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blog', '0007_auto_20161008_0852'),
]
operations = [
migrations.AddField(
model_name='post',
name='update_date',
field=models.DateField(default=django.utils.timezone.now),
),
]
|
[
"mugan86@gmail.com"
] |
mugan86@gmail.com
|
91d66a042dd155e0b4c535b6fd0d0b0c5f21e6bc
|
5c8139f1e57e06c7eaf603bd8fe74d9f22620513
|
/PartB/Py括号的配对问题.py
|
07341985e258c672ef6b2f14eda73e2fd3628ce7
|
[] |
no_license
|
madeibao/PythonAlgorithm
|
c8a11d298617d1abb12a72461665583c6a44f9d2
|
b4c8a75e724a674812b8a38c0202485776445d89
|
refs/heads/master
| 2023-04-03T07:18:49.842063
| 2021-04-11T12:02:40
| 2021-04-11T12:02:40
| 325,269,130
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
lst = raw_input().strip()
stack = []
flag = True
for i in lst:
if i == '(':
stack.append('(')
elif i == '[':
stack.append('[')
elif i == ')':
if len(stack) > 0 and stack[-1] == '(':
stack.pop(-1)
else:
flag = False
break
elif i == ']':
if len(stack) > 0 and stack[-1] == '[':
stack.pop(-1)
else:
flag = False
break
if flag:
print('true')
else:
print('false')
|
[
"2901429479@qq.com"
] |
2901429479@qq.com
|
57362ba0e53034e56e33d83ce38fe14c64c59506
|
79fb5c5552645e3b2d4428a857ee46d448183eac
|
/tools/mvTrainer.py
|
ac2ecf7f0d4cb405ad41411c91697ecdebe4fd64
|
[] |
no_license
|
ChengChen2020/mvter
|
cac2e582880b1bb500080c8ecd437db9e4507882
|
aafe576b1a4d9b6233eec714e81763c26e84978e
|
refs/heads/main
| 2023-06-17T02:22:49.371659
| 2021-06-25T03:24:28
| 2021-06-25T03:24:28
| 372,432,718
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,879
|
py
|
from tqdm import tqdm
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
class mvterTrainer():
def __init__(self, log_dir, model, train_loader, test_loader, optimizer, scheduler, num_views=12, w=1.0):
self.log_dir = log_dir
self.model = model
self.train_loader = train_loader
self.test_loader = test_loader
self.optimizer = optimizer
self.scheduler = scheduler
self.num_views = num_views
self.w = w
def train(self, resume, epoch_start, epochs):
# print(self.train_loader)
results = {'train_loss': [], 'test_acc@1': []}
model = nn.DataParallel(self.model).cuda()
if resume is not '':
checkpoint = torch.load(resume)
model.load_state_dict(checkpoint['state_dict'])
self.optimizer.load_state_dict(checkpoint['optimizer'])
self.scheduler.load_state_dict(checkpoint['scheduler'])
epoch_start = checkpoint['epoch'] + 1
print('Loaded from: {}'.format(resume))
for epoch in range(epoch_start, epochs + 1):
model.train()
total_loss, total_num, train_bar = 0.0, 0, tqdm(self.train_loader)
for label, euler, origin, rotate, _, _ in train_bar:
origin, rotate = origin.cuda(), rotate.cuda()
label, euler = label.cuda(), euler.cuda()
pred_labels, pred_eulers = model(origin, rotate)
loss_m = nn.MSELoss()
loss_task = nn.CrossEntropyLoss()
loss = loss_task(pred_labels.float(), label.long()) + self.w * loss_m(pred_eulers.float(), euler.float())
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
total_num += self.train_loader.batch_size
total_loss += loss.item() * self.train_loader.batch_size
train_bar.set_description('Train Epoch: [{}/{}], lr: {:.6f}, Loss: {:.4f}'.format(epoch, epochs, self.optimizer.param_groups[0]['lr'], total_loss / total_num))
results['train_loss'].append(total_loss / total_num)
model.eval()
total_top1, total_num = 0.0, 0
with torch.no_grad():
test_bar = tqdm(self.test_loader)
for label, euler, origin, rotate, _, _ in test_bar:
origin, rotate = origin.cuda(), rotate.cuda()
label, euler = label.cuda(), euler.cuda()
pred_labels, _ = model(origin, rotate)
total_num += origin.size(0)
_, predicted = torch.max(pred_labels, dim=1)
total_top1 += (predicted == label).float().sum().item()
test_bar.set_description('Test Epoch: [{}/{}] Acc@1:{:.2f}%'.format(epoch, epochs, total_top1 / total_num * 100))
results['test_acc@1'].append(total_top1 / total_num * 100)
self.scheduler.step()
data_frame = pd.DataFrame(data=results, index=range(epoch_start, epoch + 1))
data_frame.to_csv(self.log_dir + '/log.csv', index_label='epoch')
torch.save({'epoch': epoch, 'state_dict': model.state_dict(), 'optimizer': self.optimizer.state_dict(), 'scheduler': self.scheduler.state_dict()}, self.log_dir + '/model_last.pth')
|
[
"cc6858@nyu.edu"
] |
cc6858@nyu.edu
|
1d5bd8721529674e5f6853512782765366b3340d
|
9361fe5fbb567e23f5ce3315c2aaa7675411fa02
|
/hardware_interface/scripts/serial_node_mega.py
|
9c77bb349a8d0e3ff2b276d461aa0a3af5aeac3a
|
[] |
no_license
|
dannypadilla/Robosub2018
|
d2fb1c546575efb1579c260e50decc16bfcf7a22
|
509cb13b5d297418cc86e45e1ac35354b97a569e
|
refs/heads/master
| 2020-03-25T19:07:08.888322
| 2018-07-28T23:28:15
| 2018-07-28T23:28:15
| 144,065,609
| 1
| 0
| null | 2018-08-08T20:44:17
| 2018-08-08T20:44:17
| null |
UTF-8
|
Python
| false
| false
| 4,100
|
py
|
#!/usr/bin/env python
#####################################################################
# Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__author__ = "mferguson@willowgarage.com (Michael Ferguson)"
import rospy
from rosserial_python import SerialClient, RosSerialServer
from serial import SerialException
from time import sleep
import multiprocessing
import sys
if __name__=="__main__":
rospy.init_node("serial_node_motors")
rospy.loginfo("ROS Serial Python Node - Motors")
port_name = rospy.get_param('~port','/dev/serial/by-id/usb-Arduino_Srl_Arduino_Mega_854393131303519151E1-if00')
# baud = int(rospy.get_param('~baud','57600'))
baud = int(rospy.get_param('~baud','115200'))
# for systems where pyserial yields errors in the fcntl.ioctl(self.fd, TIOCMBIS, \
# TIOCM_DTR_str) line, which causes an IOError, when using simulated port
fix_pyserial_for_test = rospy.get_param('~fix_pyserial_for_test', False)
# TODO: should these really be global?
tcp_portnum = int(rospy.get_param('/rosserial_embeddedlinux/tcp_port', '11411'))
fork_server = rospy.get_param('/rosserial_embeddedlinux/fork_server', False)
# TODO: do we really want command line params in addition to parameter server params?
sys.argv = rospy.myargv(argv=sys.argv)
if len(sys.argv) >= 2 :
port_name = sys.argv[1]
if len(sys.argv) == 3 :
tcp_portnum = int(sys.argv[2])
if port_name == "tcp" :
server = RosSerialServer(tcp_portnum, fork_server)
rospy.loginfo("Waiting for socket connections on port %d" % tcp_portnum)
try:
server.listen()
except KeyboardInterrupt:
rospy.loginfo("got keyboard interrupt")
finally:
rospy.loginfo("Shutting down")
for process in multiprocessing.active_children():
rospy.loginfo("Shutting down process %r", process)
process.terminate()
process.join()
rospy.loginfo("All done")
else : # Use serial port
while not rospy.is_shutdown():
rospy.loginfo("Connecting to %s at %d baud" % (port_name,baud) )
try:
client = SerialClient(port_name, baud, fix_pyserial_for_test=fix_pyserial_for_test)
client.run()
except KeyboardInterrupt:
break
except SerialException:
sleep(1.0)
continue
except OSError:
sleep(1.0)
continue
|
[
"synergy1095@gmail.com"
] |
synergy1095@gmail.com
|
7d28daded65b8c89fa59a9043486597527a861fd
|
a6f64fbc3e97962f8443cba0b499570268f85697
|
/book/templatetags/markdown.py
|
4d47d4e036bd2d7f28ff3812c5d61d8eb5750d01
|
[] |
no_license
|
xggrnx/translate_problem_solving
|
f8b55c4aadd893c6685bbf1bfa295531fee23d10
|
b8c19da196abc0ddb3ad3ebf62c252d1c70100dc
|
refs/heads/master
| 2021-01-15T20:44:07.994835
| 2014-06-01T08:15:15
| 2014-06-01T08:15:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
py
|
from markdown2 import markdown
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from django.utils.text import force_unicode
register = template.Library()
@register.filter(is_safe=True)
@stringfilter
def markdown2(value):
return markdown(force_unicode(value),
extras=['wiki-tables',
'fenced-code-blocks'])
|
[
"shamkir@gmail.com"
] |
shamkir@gmail.com
|
67f7daaefaae8d776a203ce1eb65de7d4fc4810a
|
cb57a9ea4622b94207d12ea90eab9dd5b13e9e29
|
/lc/python/0339_nested_list_weight_sum.py
|
4db7961460d959975511fefeea17b42164eec24f
|
[] |
no_license
|
boknowswiki/mytraning
|
b59585e1e255a7a47c2b28bf2e591aef4af2f09a
|
5e2f6ceacf5dec8260ce87e9a5f4e28e86ceba7a
|
refs/heads/master
| 2023-08-16T03:28:51.881848
| 2023-08-10T04:28:54
| 2023-08-10T04:28:54
| 124,834,433
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,821
|
py
|
# dfs
# time O(n)
# space O(1)
# """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger:
# def __init__(self, value=None):
# """
# If value is not specified, initializes an empty list.
# Otherwise initializes a single integer equal to value.
# """
#
# def isInteger(self):
# """
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def add(self, elem):
# """
# Set this NestedInteger to hold a nested list and adds a nested integer elem to it.
# :rtype void
# """
#
# def setInteger(self, value):
# """
# Set this NestedInteger to hold a single integer equal to value.
# :rtype void
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
class Solution:
def depthSum(self, nestedList: List[NestedInteger]) -> int:
if not nestedList:
return 0
ret = 0
def dfs(nl, level):
nonlocal ret
for nnl in nl:
if nnl.isInteger():
ret += nnl.getInteger()*level
else:
dfs(nnl.getList(), level+1)
return
dfs(nestedList, 1)
return ret
|
[
"noreply@github.com"
] |
boknowswiki.noreply@github.com
|
2400ef6b3a04d86bc85202a882efac402f7cff96
|
4283b47dda370393f12bff63e8528c90ed3bff2e
|
/odoo_configuracion/modules/modulo_prueba/models/models.py
|
47c7d8f943137b0fe276d7a5b0e0960bbddfbbdf
|
[
"MIT"
] |
permissive
|
jaqconsultor/odoo_informacion
|
11acb0a338afcda34f81d3137c4e10a42bf7d306
|
9e33a758e086fccccb4c1cd3420c3891efb5de56
|
refs/heads/main
| 2023-04-17T03:23:18.599794
| 2021-05-03T23:55:20
| 2021-05-03T23:55:20
| 364,088,657
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 495
|
py
|
# -*- coding: utf-8 -*-
# from odoo import models, fields, api
# class modulo_prueba(models.Model):
# _name = 'modulo_prueba.modulo_prueba'
# _description = 'modulo_prueba.modulo_prueba'
# name = fields.Char()
# value = fields.Integer()
# value2 = fields.Float(compute="_value_pc", store=True)
# description = fields.Text()
#
# @api.depends('value')
# def _value_pc(self):
# for record in self:
# record.value2 = float(record.value) / 100
|
[
"soluciones@jaqconsultor.com"
] |
soluciones@jaqconsultor.com
|
4d1022dc40f697e79fed4872a64c1e1fada292a0
|
89b7056826e2df8525ebdb279491cf660493893b
|
/flowalyzer.py
|
40be1741b1bf7dd1eda979be7b03fd6c712a3b0f
|
[] |
no_license
|
constcast/flowalyzer
|
25ae94e6be415c47bfd1ccfb856c82e38a5d4a3e
|
f17c14384ced189c7082824bee68422a076f5ade
|
refs/heads/master
| 2020-05-17T03:44:52.695367
| 2012-03-14T16:44:50
| 2012-03-14T16:44:50
| 2,650,266
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 820
|
py
|
#!/usr/bin/env python
from optparse import OptionParser
from mainmodule import getMainModule
import sys, yaml
if __name__ == "__main__":
parser = OptionParser("usage: %prog [options]")
parser.add_option('-c', '--config', dest="configfile",
help = "config file in yml format (required)")
(options, args) = parser.parse_args()
if options.configfile != None:
try:
cf = file(options.configfile, 'r')
config = yaml.load(cf)
except Exception, e:
print "Could not open config file \"%s\": %s" % (options.configfile, e)
sys.exit(-1)
else:
# try to read default config file config.yml if no other config has been read
try:
cf = file('config.yml', 'r')
config = yaml.load(cf)
except Exception, e:
parser.print_help()
sys.exit(-1)
main = getMainModule(config)
main.run()
|
[
"braun@net.in.tum.de"
] |
braun@net.in.tum.de
|
72e463abce2ec31cbab727c3ad66951dd1293acc
|
bde81bb9686a863e0010607e09ffcf2b80a15ee0
|
/Keras_Version/model2.py
|
65c8623146f6489984ca9ded51cb9b54a714c0d1
|
[] |
no_license
|
qss2012/DeepGraphCNNforTexts
|
a813117275d3b3c6211b7694c79093e79b543732
|
bf0bb5441ecea58c5556a9969064bec074325c7a
|
refs/heads/master
| 2020-04-25T14:59:42.227391
| 2018-08-10T06:50:26
| 2018-08-10T06:50:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,254
|
py
|
# -*- coding: utf-8 -*-
from keras.layers import (
Input,
Activation,
Dropout,
Flatten,
Dense,
Reshape)
from keras.layers.convolutional import Convolution2D
from keras.layers.normalization import BatchNormalization
from keras.models import Model
from keras.layers.pooling import MaxPool2D, AveragePooling2D
import time
def mpool(type, input, size, stride):
if type == "max":
return MaxPool2D(pool_size=(size, size), strides=stride, padding='same')(input)
elif type == "avg":
return AveragePooling2D(pool_size=(size, size), strides=stride, padding='same')(input)
else:
raise ValueError("pooling type invalid")
def active(type, input):
if type == "relu":
return Activation("relu")(input)
elif type == "sigmoid":
return Activation("sigmoid")(input)
elif type == "tanh":
return Activation("tanh")(input)
elif type == "softmax":
return Activation("softmax")(input)
else:
raise ValueError("activation type invalid")
def gcnn(depth=4, mkenerls=[64, 64, 64, 32], conv_conf=[2, 1], pooling_conf=["max", 2, 2], bn=False, dropout=True,
rate=0.8, activation="relu", conf=[50, 300, 10], output_dim=20):
assert depth == len(mkenerls)
mchannel, mheight, mwidth = conf
conv_size, conv_stride = conv_conf
pooling_type, pooling_size, pooling_stride = pooling_conf
input = Input(shape=(mchannel, mheight, mwidth))
conv1 = Convolution2D(filters=mkenerls[0], kernel_size=(1, mwidth), strides=(1, 1), padding="valid")(input)
# bn1 = BatchNormalization(axis=1)(conv1)
activation1 = Activation("relu")(conv1)
pool1 = MaxPool2D(pool_size=(2, 1), strides=(2, 1), padding='same')(activation1)
_k1, _n1 = map(int, pool1.shape[1:3])
reshape_pool1 = Reshape((1, _k1, _n1))(pool1)
conv2 = Convolution2D(filters=mkenerls[1], kernel_size=(1, _n1), strides=(1, 1), padding="valid")(reshape_pool1)
# bn2 = BatchNormalization(axis=1)(conv2)
activation2 = Activation("relu")(conv2)
pool2 = MaxPool2D(pool_size=(2, 1), strides=(2, 1), padding='same')(activation2)
_k2, _n2 = map(int, pool2.shape[1:3])
reshape_pool2 = Reshape((1, _k2, _n2))(pool2)
conv3 = Convolution2D(filters=mkenerls[1], kernel_size=(1, _n2), strides=(1, 1), padding="valid")(reshape_pool2)
# bn2 = BatchNormalization(axis=1)(conv2)
activation3 = Activation("relu")(conv3)
pool3 = MaxPool2D(pool_size=(2, 1), strides=(2, 1), padding='same')(activation3)
_k3, _n3 = map(int, pool2.shape[1:3])
reshape_pool3 = Reshape((1, _k2, _n2))(pool3)
conv4 = Convolution2D(filters=mkenerls[2], kernel_size=(1, _n3), strides=(1, 1), padding="valid")(reshape_pool2)
# bn3 = BatchNormalization(axis=1)(conv3)
activation4 = Activation("relu")(conv4)
pool4 = MaxPool2D(pool_size=(2, 1), strides=(2, 1), padding='same')(activation4)
# step_results = [input]
# for i in range(depth - 1):
# mconv = Convolution2D(
# nb_filter=mkenerls[i], nb_row=conv_size, nb_col=conv_size, strides=(conv_stride, conv_stride),
# border_mode="same")(step_results[-1])
# if bn:
# mbn = BatchNormalization(axis=1)(mconv)
# else:
# mbn = mconv
# mactivation = active(activation, mbn)
# mpooling = mpool(pooling_type, mactivation, pooling_size, pooling_stride)
# if dropout:
# mdropout = Dropout(rate=rate, seed=time.time())(mpooling)
# else:
# mdropout = mpooling
# step_results.append(mdropout)
# last_conv = Convolution2D(
# nb_filter=mkenerls[-1], nb_row=conv_size, nb_col=conv_size, border_mode="same")(step_results[-1])
# last_pooling = mpool(pooling_type, last_conv, pooling_size, pooling_stride)
mFlatten = Flatten()(pool4)
ms_output = Dense(output_dim=128)(mFlatten)
msinput = active("sigmoid", ms_output)
moutput = Dense(output_dim=output_dim)(msinput)
output = active("softmax", moutput)
model = Model(input=input, output=output)
return model
if __name__ == '__main__':
model = gcnn()
model.summary()
|
[
"noreply@github.com"
] |
qss2012.noreply@github.com
|
0193b01dc0137c46fdb0cd40dfa3ad9c171772d2
|
f300699fc84b497f1a2de47a1f7994869116b8d5
|
/venv/bin/pip
|
0f049d8ae325889d243688804b814035376004d7
|
[] |
no_license
|
Maurice-Muthaka/Python-Flask-Blog
|
39a0aad6cb8e16c3e045d9103e9c8ba3fe1e37c7
|
591841c02e89d001e63ca6b6cd23d1f5d5bba21b
|
refs/heads/master
| 2022-12-26T23:48:41.267906
| 2020-10-13T20:30:14
| 2020-10-13T20:30:14
| 301,540,184
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
#!/Users/mac/PycharmProjects/newblog/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
|
[
"mauricemuthaka@gmail.com"
] |
mauricemuthaka@gmail.com
|
|
55d22da914c4ade053c762b9f5ccd29547f0584f
|
718fea511bfd7e8c16307909c0662a423738ff83
|
/makeModel.py
|
987f2d96e0023b85be875d167b6b04e9c772eeaf
|
[] |
no_license
|
chudeng/study
|
4123b5108694585822246becbdef76f967f9c56c
|
568fdc79d69afc204af5395d947defa2907a3c25
|
refs/heads/master
| 2023-04-02T07:54:19.812935
| 2021-04-09T08:24:00
| 2021-04-09T08:24:00
| 342,161,625
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,426
|
py
|
# 패키지 임포트(import)
# 데이터 적재를 위해 필요한 패키지를 임포트(Import)한다.
from keras.datasets import mnist
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dense, Activation
# 데이터 적재(Load)와 분할
(train_data, train_label), (validation_data, validation_label) = mnist.load_data()
# 적재된 데이터 확인
# 대상 데이터를 적재(Load)하고 학습 데이터와 검증 데이터로 나눈다.
for images in train_data[:2]:
for image in images:
for dat in image:
print('{:3}'.format(dat), end='')
print()
print(end="\n\n")
# 데이터 전처리
# 데이터를 신경망 학습 모델이 학습 가능한 형태로 변환하는 과정의 전처리를 진행한다.
# 학습용 정수형 초평면 데이터를 2차원의 실수형 데이터로 변환한다.
train_data = train_data.reshape(train_data.shape[0], 784).astype('float64') / 255
validation_data = validation_data.reshape(validation_data.shape[0], 784).astype('float64') / 255
# 초매개 변수(Hyper Parameter)를 설정
# 학습 모델을 생성하고 학습 모델 I에 레이어(Layer)를 초매개 변숫값과 함께 추가하여 설 정한다.
model = Sequential()
model.add(Dense(128, input_dim=784, activation='relu'))
model.add(Dense(10, activation='softmax'))
# 학습 모델 생성
# 하이퍼 파라미터가 적용된 학습 모델에 손실 함수, 옵티마이저, 메트릭을 적용하여 생성한다.
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# 학습 실행
# 앞서 준비한 학습 데이터를 이용하여 모델을 학습시킨다.
model.fit( train_data, train_label, validation_data=(validation_data, validation_label), epochs=10, batch_size=200, verbose=0, callbacks=[cb_checkpoint, cb_early_stopping])
# 검증 데이터 준비
# 검증용 데이터를 One Hot Encoding(문자를 고유의 숫자 인덱스로 표현)을 적용하여 학습 모델 검증에 이용할 수 있도록 준비한다.
train_label = np_utils.to_categorical(train_label, 10)
validation_label = np_utils.to_categorical(validation_label, 10)
# 학습 모델 평가
# 검증 데이터를 이용하여 학습 모델을 평가하고, 평가된 결괏값을 확인한다.
accuracy = model.evaluate(validation_data, validation_label)[1]
print('\nAccuracy: {:.2f}'.format(accuracy))
|
[
"chudeng@hanmail.net"
] |
chudeng@hanmail.net
|
a338badeef81d66bc04c82fec02b2e5181f8c5e2
|
9ce37874388961f28344931f3fb051d7a0dbd4c5
|
/hikerrank/models.py
|
126e2433fc898222141f6d8afddbe95c1ed9de9a
|
[] |
no_license
|
jerrypeng02/Hikerrank
|
8a33968bea3a4a0cca6783ca46f2469a9b36d1ee
|
1ca21e748dc0c3274e48d75b22ce99d3ea270a4c
|
refs/heads/master
| 2023-01-23T17:59:13.879867
| 2020-12-09T05:11:45
| 2020-12-09T05:11:45
| 319,846,080
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,003
|
py
|
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from django.contrib.auth.models import User
User._meta.get_field('email')._unique = True
from datetime import datetime
def profile_picture_upload_path(instance, filename):
return '/'.join([str(instance.user.username), filename])
def profile_album_upload_path(instance, filename):
return '/'.join([str(instance.user.username), filename])
def json_default():
return {'foo': 'bar'}
# id link to user id
class Profile(models.Model):
user = models.OneToOneField(User, primary_key=True, default=None, on_delete=models.PROTECT)
bio = models.TextField(default="Tell me about yourself...", blank=True)
picture = models.FileField(default="default-picture.png", max_length=255, upload_to=profile_picture_upload_path)
class Trail(models.Model):
id = models.PositiveIntegerField(primary_key=True)
tname = models.CharField(max_length=200)
tclass = models.CharField(max_length=100)
surface = models.CharField(max_length=100)
length = models.FloatField()
backpack = models.CharField(max_length=100)
bicycle = models.CharField(max_length=100)
mountainbike = models.CharField(max_length=100)
ski = models.CharField(max_length=100)
width = models.FloatField(default=0)
difficulty = models.CharField(max_length=100)
description = models.TextField(blank=True)
map_info = models.JSONField(default=dict)
class Event(models.Model):
initiator = models.ForeignKey(User, default=None, on_delete=models.PROTECT, related_name="initiator")
name = models.CharField(max_length=200)
description = models.TextField(blank=True)
post_time = models.DateTimeField(auto_now_add=True)
event_time = models.DateTimeField(default=datetime.now)
trail = models.ForeignKey(Trail, on_delete=models.CASCADE)
headcount = models.IntegerField(default=0)
participants = models.ManyToManyField(User, related_name="participants", blank=True)
status = models.CharField(max_length=10, default="normal") # accept denied
class Message(models.Model):
author = models.ForeignKey(User, related_name='author_messages', on_delete=models.CASCADE)
content = models.TextField()
timestamp = models.DateTimeField(auto_now_add=True)
class Chat(models.Model):
event = models.OneToOneField(Event, primary_key=True, default=None, on_delete=models.CASCADE)
messages = models.ManyToManyField(Message, blank=True)
class Photo(models.Model):
picture = models.FileField(upload_to='')
content_type = models.CharField(max_length=50, default='image/jpeg')
Trail = models.ForeignKey(Trail, on_delete=models.CASCADE)
Event = models.ForeignKey(Event, on_delete=models.CASCADE)
class Album(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
picture = models.FileField(default="welcome-image.jpg", max_length=255, upload_to=profile_album_upload_path)
caption = models.TextField(blank=True)
time = models.DateTimeField(auto_now_add=True)
class CheckIn(models.Model):
trail = models.ForeignKey(Trail, on_delete=models.CASCADE)
User = models.ForeignKey(Profile, on_delete=models.CASCADE)
Time = models.DateTimeField(auto_now_add=True)
class Review(models.Model):
poster = models.ForeignKey(Profile, on_delete=models.CASCADE)
trail = models.ForeignKey(Trail, on_delete=models.CASCADE)
time = models.DateTimeField(auto_now_add=True)
rating = models.IntegerField(default=5)
Review_text = models.TextField(blank=True)
class Follow_UnFollow(models.Model):
time = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="this_user")
following = models.ForeignKey(User, on_delete=models.CASCADE, related_name="following")
class PendingRequest(models.Model):
event = models.ForeignKey(Event, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
text = models.TextField(blank=True)
time = models.DateTimeField(auto_now_add=True)
class ProcessedRequest(models.Model):
event = models.ForeignKey(Event, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
status = models.CharField(max_length=10) # accept denied
time = models.DateTimeField(auto_now_add=True)
class BroadcastMessage(models.Model):
audience = models.ManyToManyField(User, related_name="audience", blank=True)
message = models.TextField(blank=True)
messageType = models.TextField(blank=True) # cancelevent, acceptrequest, declinerequest
time = models.DateTimeField(auto_now_add=True)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
|
[
"jerrypeng02@gmail.com"
] |
jerrypeng02@gmail.com
|
0646d3e78b4be5c3a140963336474e911eb00d30
|
6518f97e1763fe85cd60242f5b544218b6843104
|
/DaEun/little.py
|
ee4b7d8e2460a34c7222d1c8b694df9f11ac4e0e
|
[] |
no_license
|
KJJH/CapstoneDesignProject
|
0d912ffaeb1e2952e973fc7e99fbc153018b7533
|
79b85b54b5ab290325f144c0ffc2034d0e2468ef
|
refs/heads/master
| 2021-01-19T20:22:46.966276
| 2017-06-19T08:32:01
| 2017-06-19T08:32:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,014
|
py
|
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
import urllib.request
import re
import pymysql
""" 이미지 다운로드 class """
class crawlerImageDownload:
def imageDownlad(self, imageUrl, name):
image = urllib.request.urlopen(imageUrl)
fileName = 'image/' + name[0] + '.jpg'
imageFile = open(fileName, 'wb')
imageFile.write(image.read())
imageFile.close()
if __name__ == '__main__':
# MySQL Connection 연결
conn = pymysql.connect(host='ec2-13-124-80-232.ap-northeast-2.compute.amazonaws.com', user='root', password='root',
db='forstyle', charset='utf8')
# Connection 으로부터 Cursor 생성
curs = conn.cursor()
sql = """insert into product(product_brand,product_name,product_cost,product_clothes_label,product_shopping_img_url,product_shopping_url)
values (%s, %s, %s, %s, %s, %s)"""
url = [["http://www.naning9.com/shop/list.php?cate=0S01", "SLEEVELESS"],
["http://www.naning9.com/shop/list.php?cate=0S02", "TEE"],
["http://www.naning9.com/shop/list.php?cate=0S04", "SHIRT&BLOUSE"],
["http://www.naning9.com/shop/list.php?cate=0S05", "KINT"],
["http://www.naning9.com/shop/list.php?cate=0S06", "HOOD"],
["http://www.naning9.com/shop/list.php?cate=0D", "TRAINING SET"],
["http://www.naning9.com/shop/list.php?cate=0T01", "JUMPER"],
["http://www.naning9.com/shop/list.php?cate=0T01", "JACKET"],
["http://www.naning9.com/shop/list.php?cate=0T01", "COAT"],
["http://www.naning9.com/shop/list.php?cate=0T01", "VEST"],
["http://www.naning9.com/shop/list.php?cate=0T01", "CARDIGAN"],
["http://www.naning9.com/shop/list.php?cate=0T01", "ZIPUP"],
["http://www.naning9.com/shop/list.php?cate=0V01", "JEANS"],
["http://www.naning9.com/shop/list.php?cate=0V01", "SHORT"],
["http://www.naning9.com/shop/list.php?cate=0V01", "LONG"],
["http://www.naning9.com/shop/list.php?cate=0V01", "LEGGINGS"],
["http://www.naning9.com/shop/list.php?cate=0U01", "SKIRT"],
["http://www.naning9.com/shop/list.php?cate=0U01", "DRESS"],
["http://www.naning9.com/shop/list.php?cate=0U01", "Flower dress"]]
# Crawling URL
CRAWLING_URL = 'http://www.naning9.com'
# 지정된 URL을 오픈하여 requset 정보를 가져옵니다
for i in range(0, 19):
source_code_from_URL = urllib.request.urlopen(url[i][0])
soup = BeautifulSoup(source_code_from_URL, 'html.parser')
product_clothes_label = url[i][1]
temp = soup.find("div", {"class", "item-page"})
temp = temp.find_all("a")
page_num = int(temp[-2].get_text())
for j in range(1, page_num + 1):
source_url = url[i][0] + "&page=2"
source_code_from_URL = urllib.request.urlopen(source_url)
soup = BeautifulSoup(source_code_from_URL, 'html.parser')
for goods in soup.find_all("div", {"class", "goods_item"}):
product_shopping_img_url = goods.find("img", {"class", "MS_prod_img_m"}).get("src")
print(product_shopping_img_url)
product_shopping_url = CRAWLING_URL + goods.find("a").get("href")
print(product_shopping_url)
product_cost = goods.find("li", {"class", "price"}).get_text()
product_cost = re.sub("\s", "", str(product_cost))
print(product_cost)
# product_name = goods.find("br").get_text()
product_name = goods.find("li", {"class", "dsc"}).get_text()
product_name = re.sub("\s", "", str(product_name))
print(product_name)
curs.execute(sql, (
"naning9", product_name, product_cost, product_clothes_label, product_shopping_img_url,
product_shopping_url))
# Connection 닫기
conn.commit()
conn.close()
|
[
"ekdms717@kookmin.ac.kr"
] |
ekdms717@kookmin.ac.kr
|
c6953a4e5043c527d601564eb5a7b592755e8c6f
|
9db85dc0b9195f99dfef79d79ba8d556649abcf5
|
/Chen_Xueying_HW3_6.py
|
ad1b73d0c9fabf1bfa7faffa5c8441c947fb57e1
|
[] |
no_license
|
alvyxc/DSA4413
|
13b4e0ee774fcb950f8f42e9797f9e6466605d92
|
ae4577a30cd1ff7dca76443616bd39a1aabd68f7
|
refs/heads/master
| 2020-07-24T15:12:02.317982
| 2019-12-09T07:41:31
| 2019-12-09T07:41:31
| 207,965,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,706
|
py
|
import math
import random
found = False
threshold = 10
def merge(arr, l, m, r):
L = arr[l:m+1]
R = arr[m+1:r+1]
i = 0 # Initial index of first subarray
j = 0 # Initial index of second subarray
k = l # Initial index of merged subarray
while i < len(L) and j < len(R) :
if L[i] <= R[j]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
while i < len(L):
arr[k] = L[i]
i += 1
k += 1
while j < len(R):
arr[k] = R[j]
j += 1
k += 1
return arr
def findK(arr, l, m, r, k):
low_index = l
high_index = r
while low_index <= m and high_index >= m:
sum = arr[low_index] + arr[high_index]
if sum == k:
return [arr[low_index], arr[high_index]]
elif sum < k:
low_index = low_index+1
else:
high_index = high_index-1
return []
def insertionSort(arr, begin_index, end_index):
for i in range(begin_index + 1, end_index+1):
key = arr[i]
j = i-1
while j >= begin_index and key < arr[j]:
arr[j+1] = arr[j]
j -= 1
arr[j + 1] = key
def mergeSort(arr,l,r):
global found
if (r - l) <= threshold:
insertionSort(arr, l, r)
elif l < r:
m = int(math.floor((l+r)/2))
# Sort first and second halves
mergeSort(arr, l, m)
mergeSort(arr, m+1, r)
merge(arr, l, m, r)
# Driver code to test above
arr = [12, 11, 13, 5, 6, 7]
n = len(arr)
mergeSort(arr,0,n-1)
print(arr)
arr2 = random.sample(range(1, 1000), 500)
mergeSort(arr2,0,len(arr2)-1)
print(arr2)
|
[
"xc573a@us.att.com"
] |
xc573a@us.att.com
|
0d97ba84468527ed14779b4592870c4d6e235c17
|
bf05b11814d13f49afefc5cce95dd4c141383ccb
|
/regular_siamese.py
|
2c6d1366e1f1f13be231765d0d4d8ca3d4cd8f1f
|
[] |
no_license
|
bferrarini/keras-timedistributed-siamese
|
ce7d65880f02e8f22fae1b6036b81f9e47e9e428
|
0036cfe14a340a26598fba07c3563733a727df8f
|
refs/heads/master
| 2023-02-02T13:44:16.550947
| 2020-12-21T22:49:51
| 2020-12-21T22:49:51
| 320,066,464
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,868
|
py
|
"""
Copyright (c) 2020
Author:
Bruno Ferrarini (University of Essex)
This project is licensed under MIT.
The source code is a modified version of the example shared at https://gist.github.com/mmmikael/0a3d4fae965bdbec1f9d to work with Tensorflow 2.0
A siamese network is implemented with the regular twin-branch architecture and trained on MNIST.
The performance metric is the accuracy in matching digits of the same class.
.
"""
import numpy as np
from tensorflow.keras.datasets import mnist
from tensorflow.keras.datasets import cifar10
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Flatten, Dense, Lambda, Conv2D
from tensorflow.keras.layers import MaxPooling2D, BatchNormalization
from tensorflow.keras.layers import Activation
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.utils import plot_model, to_categorical
import tensorflow.keras.backend as K
from matplotlib import pyplot as plt
import matplotlib.image as mpimg
import random
# this should make the process repeatable
random.seed(10)
num_classes = 10
epochs = 1
show = False #displays the model diagram
def euclidean_distance(vects):
x,y = vects
sum_square = K.sum(K.square(x-y), axis=1, keepdims=True)
return K.sqrt(K.maximum(sum_square, K.epsilon()))
def eucl_dist_output_shape(shapes):
shape1, shape2 = shapes
return (shape1[0], 1)
def contrastive_loss(y_true, y_pred):
margin = 1
square_pred = K.square(y_pred)
margin_square = K.square(K.maximum(margin - y_pred, 0))
return K.mean(y_true * square_pred + (1-y_true) * margin_square)
def create_pairs(x, digit_indices):
pairs = []
labels = []
n = min([len(digit_indices[d]) for d in range(num_classes)]) - 1
for d in range(num_classes):
for i in range(n):
z1, z2 = digit_indices[d][i], digit_indices[d][i + 1]
pairs += [[x[z1], x[z2]]] # positive sample
inc = random.randrange(1, num_classes)
dn = (d + inc) % num_classes
z1, z2 = digit_indices[d][i], digit_indices[dn][i] # negative sample
pairs += [[x[z1], x[z2]]]
labels += [1.0, 0.0]
return np.array(pairs), np.array(labels)
def create_base_model(input_shape):
model_input = Input(shape=input_shape)
embedding = Conv2D(32, kernel_size=(3, 3), input_shape=input_shape)(model_input)
embedding = BatchNormalization()(embedding)
embedding = Activation(activation='relu')(embedding)
embedding = MaxPooling2D(pool_size=(2, 2))(embedding)
embedding = Conv2D(64, kernel_size=(3, 3))(embedding)
embedding = BatchNormalization()(embedding)
embedding = Activation(activation='relu')(embedding)
embedding = MaxPooling2D(pool_size=(2, 2))(embedding)
embedding = Flatten()(embedding)
embedding = Dense(128)(embedding)
embedding = BatchNormalization()(embedding)
embedding = Dense(10)(embedding)
embedding = Activation(activation='softmax')(embedding)
return Model(model_input, embedding, name="inner")
def compute_accuracy(y_true, y_pred):
'''Compute classification accuracy with a fixed threshold on distances.
'''
pred = y_pred.ravel() < 0.5
return np.mean(pred == y_true)
def accuracy(y_true, y_pred):
'''Compute classification accuracy with a fixed threshold on distances.
'''
return K.mean(K.equal(y_true, K.cast(y_pred < 0.5, y_true.dtype)))
(x_train, y_train), (x_test, y_test) = mnist.load_data()
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
y_train = y_train.astype('float32')
y_test = y_test.astype('float32')
input_shape = x_train.shape[1:]
print("Base model input shape: {}".format(input_shape))
time_input_shape = (2, input_shape[0], input_shape[1], input_shape[2])
print("T model input shape: {}".format(time_input_shape))
digit_indices = [np.where(y_train == i)[0] for i in range(num_classes)]
tr_pairs, tr_y = create_pairs(x_train, digit_indices)
digit_indices = [np.where(y_test == i)[0] for i in range(num_classes)]
te_pairs, te_y = create_pairs(x_test, digit_indices)
print("Shape of training pairs", tr_pairs.shape)
print("Shape of training labels", tr_y.shape)
# network definition
base_network = create_base_model(input_shape)
base_network.summary()
input_a = Input(shape=input_shape, name='input_a')
input_b = Input(shape=input_shape, name='input_b')
# because we re-use the same instance `base_network`,
# the weights of the network
# will be shared across the two branches
processed_a = base_network(input_a)
processed_b = base_network(input_b)
distance = Lambda(euclidean_distance,
output_shape=eucl_dist_output_shape)([processed_a, processed_b])
model = Model([input_a, input_b], distance)
model.summary()
if show:
# you need graphviz
plot_model(model, to_file="s-model.png", show_shapes=True, expand_nested=True)
img = mpimg.imread('s-model.png')
imgplot = plt.imshow(img)
plt.show()
rms = RMSprop()
model.compile(loss=contrastive_loss, optimizer=rms, metrics=[accuracy])
history = model.fit([tr_pairs[:, 0], tr_pairs[:, 1]], tr_y,
batch_size=128,
epochs=epochs,
validation_data=([te_pairs[:, 0], te_pairs[:, 1]], te_y)
)
# Plot training & validation accuracy values
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Model accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Test'], loc='upper left')
plt.show()
# Plot training & validation loss values
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Test'], loc='upper left')
plt.show()
# Accessing and testing the model.
# You might need to train a model as a siamese but using it later as a regular single-branch model.
# 'inner' is the given name to the base model. Check the function 'create_base_model'
y_train = to_categorical(y_train, num_classes)
y_test = to_categorical(y_test, num_classes)
inner_model = model.get_layer("inner")
inner_model.summary()
# compiling is required to call evaluate for example, to train the classifier or any added layers to the siamise stage (https://github.com/aspamers/siamese/blob/master/mnist_siamese_example.py)
# in this example, we are only interest in showing the equivalence between the multi-branch training schema and timedistributed approach.
# binary_crossentropy is chosen to have class prediction accuracy as a metric.
# For the evaluation purpose the optimizer does not mind but it is required by the compile method
inner_model.compile(loss='categorical_crossentropy', metrics=['acc',], optimizer=rms)
inner_model.evaluate(x_test, y_test, verbose = True)
pass
|
[
"bferra@essex.ac.uk"
] |
bferra@essex.ac.uk
|
f3d055a7ef6f252097e216f76b6b7183664a0b4d
|
b93dda642d64868d35a4b9875e49ae65b1110183
|
/Area of a Field.py
|
0b9a506fd5350a4681eb1a9a57c7cd5dd97d33a2
|
[] |
no_license
|
OsmiumDust/classwork
|
0e36ece06865f685b076c81d872b41717d4e5cff
|
491da103105ea5d9a15f543b4c8ca2647139cb79
|
refs/heads/master
| 2020-04-21T13:46:41.867882
| 2019-03-26T17:22:12
| 2019-03-26T17:22:12
| 169,611,212
| 0
| 0
| null | 2019-03-01T16:53:50
| 2019-02-07T17:20:57
|
Python
|
UTF-8
|
Python
| false
| false
| 259
|
py
|
print("Calculating area of a field in acres")
length = float(input("What is the length of this feet in feet?"))
width = float(input("What is the width of this feet in feet?"))
Area = (length * width) / 43560
print("The area of this field is:" , Area, "acres")
|
[
"noreply@github.com"
] |
OsmiumDust.noreply@github.com
|
dc4ee8f6a93c84ad4ec642b105f5776f3d804089
|
fccd8a6ce8e5369a152a1e46008d588c155264ef
|
/Lab8/C Codes/ldsfn.py
|
970ca72aec3f5a1451848bbdeb1fbd09039c5765
|
[] |
no_license
|
jagonzalezvi/AlgorithmsUN2021I
|
ec0f1cdb8d9d566c1c53873ae2bfcdcf6cfc5c0f
|
db0e9efdfaf38a918ee14da642773cf05c99ade7
|
refs/heads/main
| 2023-07-17T19:16:23.755009
| 2021-08-09T14:10:20
| 2021-08-09T14:10:20
| 344,151,366
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 848
|
py
|
# Uses python3
import sys
def fibonacci_sum_naive(n):
if n <= 1:
return n
previous = 0
current = 1
sum = 1
for _ in range(n - 1):
previous, current = current, previous + current
sum += current
return sum % 10
def get_fibonacci(n):
if n == 0:
return 0
if n == 1:
return 1
fib = [x for x in range(n+1)]
fib[0] = 0
fib[1] = 1
for i in range(2, n+1):
fib[i] = fib[i-1]+fib[i-2]
return fib[n]
def fibonacci_huge(n, m):
index = -1
for i in range(2, 10**14):
if(get_fibonacci(i) % m == 0):
if(get_fibonacci(i+1) % m == 1):
index = i
break
mod_small = n % index
fib_small = get_fibonacci(mod_small)-1
return fib_small % m
x = int(input())
print(fibonacci_huge(x+2, 10))
|
[
"noreply@github.com"
] |
jagonzalezvi.noreply@github.com
|
66e5798ab2976e00c02d75e5a536974524bdee60
|
18c972a51361e0e1fd7999a6b1d3335352b6cf17
|
/GeneratorSoftware_FLASK/NoiseGenerator.py
|
ca8799e719affcd72bacb1c9e27717d4cbe65548
|
[] |
no_license
|
jeffThompson/WhiteNoiseBoutique
|
f38a7b75307a008e5fddbdd8d3198a33eb3bc92e
|
721bc9b4395253315e73c4f586ba71b2ca2c2553
|
refs/heads/master
| 2021-01-18T15:14:28.119126
| 2015-09-04T14:55:46
| 2015-09-04T14:55:46
| 39,200,520
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,876
|
py
|
'''
NOISE GENERATOR
Jeff Thompson | 2015 | www.jeffreythompson.org
Interface for generating boutique white noise.
TODO
- web interface with Flask
PROCESS
(many of these items can be skipped if specified)
1. generate noise using either:
1a. built-in RNG, or
1b. input from hardware RNG
2. save to file
3. create random salt or use user-specified
4. hash file with salt
5. make sure doesn't already exist and store hash
6. ent tests
7. dieharder tests
8. write to audio file
9. upload to server
10. email link
11. securely delete file
Generously supported by a commission from Brighton Digital Festival.
'''
from Generators import * # file with noise generating functions
from Tests import * # file with RNG testing (ent, dieharder)
from AudioOutput import *
from UploadAndEmail import *
from Utilities import * # file with misc functions (hashing, etc)
import uuid # for creating salt
# user-specified options
generator = 'congruential' # which to use?
email_address = 'mail@jeffreythompson.org' # needed to send download link
pre_chosen_salt = None # user-specified salt
salt_it = True # randomly salt the noise?
email_salt = True
store_hash = True # store the resulting hash?
upload_to_server = False # upload noise for download?
delete_noise_file = True # securely delete noise file when done?
noise_len = 10 * 44100 # duration in sec * sample rate
seed = None # seed value (or None) - doesn't work with all gens
# dieharder_tests = []
dieharder_tests = [ 1, 2, 101, 202, 203 ]
# files created and used
noise_byte_file = 'bytes.dat'
noise_dieharder_file = 'dieharder.dat'
all_hashes_file = 'AllHashes.csv' # file to store previous hashes
max_value = 4294967295 # max value for 32-bit integer
# - - - - - - - - - - - - - - - - - - - -
print '\n' + 'WHITE NOISE GENERATOR'
print ('- ' * 8) + '\n'
print 'generating ' + str(noise_len/44100) + ' seconds of noise...'
# read from hardware RNG
if 'hardware:' in generator:
generator = generator.replace('hardware:', '')
print '- using hardware generator: ' + generator
# or, create noise using algo RNG and write to file
# by default, use dev/random since it is very secure
else:
print '- using software generator: ' + generator
if generator == 'threefish':
noise = threefish(seed, noise_len)
elif generator == 'aes_ofb':
noise = aes_ofb(seed, noise_len)
elif generator == 'congruential':
noise = congruential(seed, noise_len)
elif generator == 'dieharder:':
generator = generator[10:]
noise = dieharder_generator(seed, noise_len, generator)
else:
noise = dev_random(seed, noise_len)
noise = convert_to_byte_range(noise, max_value)
# add generator to stats
stats = [ [ 'generator', generator, 'pseudo RNG' ] ]
print '- writing to file...'
write_byte_file(noise, noise_byte_file)
write_dieharder_file(noise, noise_dieharder_file, max_value)
# create salt as a "password" to the data
# or use user-specified salt if it exists
if pre_chosen_salt == None:
salt = uuid.uuid4().hex
else:
salt = pre_chosen_salt
# hash file as SHA512 for storage in list
# also create MD5 hash with salt created above for audio
# filename (since most OSs can't take filenames)
# longer than 128-256 characters
print '\n' + 'hashing file...'
if salt_it:
sha512 = hash_file_sha512(noise_byte_file, salt)
else:
sha512 = hash_file_sha512(noise_byte_file, None)
md5 = hash_file_md5(noise_byte_file, salt)
# check against previous hashes and store
if store_hash:
print '- checking against previous hashes...'
if check_against_hashes(sha512, all_hashes_file) == True:
print '- already stored - redo generation!'
exit(1)
else:
print '- not stored, adding to list...'
with open(all_hashes_file, 'a') as out:
out.write(sha512 + '\n')
# run tests on noise file
'''
print '\n' + 'running ENT tests...'
stats.extend( ent_test(noise_byte_file) )
print '\n' + 'running DIEHARDER tests (may take a while)...'
for test in dieharder_tests:
stats.extend( run_dieharder(noise_dieharder_file, test) )
'''
# write wav file!
print '\n' + 'writing audio file...'
audio_filename = 'AudioFiles/' + md5 + '.wav'
write_wav(noise, audio_filename)
# upload to server and email
if upload_to_server:
print '\n' + 'uploading audio file to server...'
upload(audio_filename, 'noise/' + md5 + '.wav')
print '\n' + 'sending email...'
if email_salt:
send_email(email_address, 'http://www.whitenoiseboutique.com/noise/' + md5 + '.wav', stats, salt)
else:
send_email(email_address, 'http://www.whitenoiseboutique.com/noise/' + md5 + '.wav', stats, None)
# delete noise file (we only keep the hash)
if delete_noise_file:
print '\n' + 'securely deleting noise files...'
check_call(['srm', noise_byte_file])
check_call(['srm', noise_dieharder_file])
# all done!
print '\n' + 'DONE!' + '\n\n'
|
[
"mail@jeffreythompson.org"
] |
mail@jeffreythompson.org
|
0f5773e8b31e055609b801c44d7c309044166cf5
|
0bc4731f0b87ebc03afe2b282efeca98f161be6d
|
/data.py
|
997c527ed4f7ee53a1905da6fa2f4dfc780e0346
|
[] |
no_license
|
melrobin/MachineLearning
|
caec26c47dfc3bdaeee70b1434023379673d768e
|
e20a79b79e8af7c35937c7d87bf78da50bf91e74
|
refs/heads/master
| 2020-04-05T12:34:37.348733
| 2017-06-23T19:05:11
| 2017-06-23T19:05:11
| 95,159,257
| 0
| 0
| null | 2017-06-22T21:48:21
| 2017-06-22T21:48:20
| null |
UTF-8
|
Python
| false
| false
| 959
|
py
|
import numpy as np
from sklearn import neighbors, model_selection
raw_data = np.genfromtxt('plrx.txt') #plrx.txt is the dataset txt file
data = raw_data[:,:12] #splicing the raw data
label = raw_data[:,-1]
X = data #data for testing
y = label
X_train, X_test, y_train, y_test = model_selection.train_test_split(X,y, test_size = 0.1)
k_values = list(range(1,41))
cv_scores =[]
for k in k_values:
knn = neighbors.KNeighborsClassifier(n_neighbors=k)
scores = model_selection.cross_val_score(knn, X_train, y_train,cv=10, scoring = 'accuracy')
cv_scores.append(scores.mean())
error = [1 - x for x in cv_scores]
optimal_k = error.index(min(error))
optimal_k = optimal_k +1
print optimal_k
print error
### use knn
X_train, X_test, y_train, y_test = model_selection.train_test_split(X,y, test_size = 0.1)
knn = neighbors.KNeighborsClassifier(n_neighbors= optimal_k)
knn.fit(X_train, y_train)
accuracy = knn.score(X_test, y_test)
print accuracy
|
[
"rojascm02@gmail.com"
] |
rojascm02@gmail.com
|
cee58b171102a5c97d291ca70e27fba7e8d8a050
|
ff8e957db8a7d238db655b0458bf6d389c027a77
|
/src/rna/modules/remote_management/tasks.py
|
7fb30b768ab0bdea1daea3eceb65d70262f30afe
|
[] |
no_license
|
will7200/RNA
|
68d30a1c493a57f69f00ec5d40456fba76d87415
|
0c39aca2ee88c8c76c41c30dce36906e63ef5871
|
refs/heads/main
| 2023-02-03T20:01:05.628350
| 2020-12-23T16:58:03
| 2020-12-23T16:58:03
| 322,120,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,534
|
py
|
import os
import subprocess
import tempfile
from rna.extensions import celery, db
from rna.modules.core.remote_management.schemas import ExecuteDetails, AuthenticationMethod
from rna.modules.remote_management.models import HostCommandEvent
def build_ssh_command(details: ExecuteDetails):
host = details.hostname
port = []
command = ['ssh']
temp = None
if details.authentication_method == AuthenticationMethod.password:
command = ['sshpass', '-p', details.password, 'ssh']
elif details.authentication_method == AuthenticationMethod.key_pair:
temp = tempfile.NamedTemporaryFile(prefix="delete_key_", delete=False)
temp.write(details.private_key.encode())
temp.write('\n'.encode())
temp.close()
command.extend(['-i', temp.name])
if details.username:
host = f'{details.username}@{host}'
if details.port:
port = ['-p', f'{details.port}']
if len(port) != 0:
command.extend(port)
command.extend(['-o', 'StrictHostKeyChecking=no'])
command.append(host)
command.append(details.command)
return command, temp
def deserialize_into(argument):
def decorator(function):
def wrapper(self, arg, *args, **kwargs):
arg = argument(**arg)
result = function(self, arg, *args, **kwargs)
return result
return wrapper
return decorator
@celery.task(bind=True, queue='execute_host_command')
@deserialize_into(ExecuteDetails)
def execute_host_command(self, details: ExecuteDetails):
command, temp = build_ssh_command(details)
try:
child = subprocess.Popen(command, shell=False,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = child.communicate(timeout=30)
# Need to Delete the temporary file
if temp:
os.remove(temp.name)
except (subprocess.TimeoutExpired, FileNotFoundError) as e:
event = HostCommandEvent(result=str(e), host_command_id=details.command_id,
exit_code=-1, guid=self.request.id)
db.session.add(event)
db.session.commit()
return str(e)
stdout, stderr = result
# noinspection PyTypeChecker
event = HostCommandEvent(result=stdout.decode() + '\n' + stderr.decode(), host_command_id=details.command_id,
exit_code=child.returncode, guid=self.request.id)
db.session.add(event)
db.session.commit()
return stdout.decode() + '\n' + stderr.decode()
|
[
"will7207@gmail.com"
] |
will7207@gmail.com
|
a0501f5fe8d7b2dbd0b347bf845646f7cd23628d
|
34599a66861f7d95a5740eeb5329ea77014e18d4
|
/problems_solving/algospot/firetrucks.py
|
0006d6df8ed42a042757bbebd9b4c40fa0a3cf8c
|
[] |
no_license
|
laolee010126/algorithm-with-python
|
f0f5f1bc3cbe374ccbb59e10ac639674c44ae743
|
89ff0c47a6d8b0cd5b31a25bb3981b8e90971f19
|
refs/heads/master
| 2022-04-01T17:38:36.199309
| 2020-01-14T01:54:22
| 2020-01-14T01:54:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,108
|
py
|
"""Get least sum of distances to dispatch firetrucks to houses on fire
:input:
1
8 12 3 2
1 2 3
1 6 9
2 3 6
3 4 4
3 5 2
4 5 7
6 5 5
8 6 5
6 7 3
8 7 3
7 5 1
2 8 3
2 3 5
4 6
:return:
16
"""
from heapq import heappush, heappop
from math import inf
from sys import stdin
get_input = stdin.readline
def min_dist(g, dest, src):
V = len(g)
# Add a new trasparent vertex connecting fire stations into one sinlge station
for s in src:
g[0].append((0, s))
g[s].append((0, 0))
# 1. priority queue version
# pq = [(0, 0)]
# dist = [inf] * V
# dist[0] = 0
# while pq:
# cost, here = heappop(pq)
# if cost > dist[here]:
# continue
# for dc, there in g[here]:
# nxt_dist = cost + dc
# if nxt_dist < dist[there]:
# dist[there] = nxt_dist
# heappush(pq, (nxt_dist, there))
# return sum(dist[d] for d in dest)
# 2. Non-priority queue version
dist = [inf] * V
dist[0] = 0
visited = [False] * V
while True:
min_dist = inf
here = None
for v in range(V):
if dist[v] < min_dist and not visited[v]:
min_dist = dist[v]
here = v
if min_dist == inf:
break
visited[here] = True
for dc, there in g[here]:
nxt_dist = dist[here] + dc
if not visited[there] and nxt_dist < dist[there]:
dist[there] = nxt_dist
return sum(dist[d] for d in dest)
if __name__ == '__main__':
C = int(get_input().strip())
ans = []
for _ in range(C):
V, E, DEST, SRC = (int(n) for n in get_input().strip().split())
g = [[] for _ in range(V+1)]
for _ in range(E):
a, b, d = (int(n) for n in get_input().strip().split())
g[a].append((d, b))
g[b].append((d, a))
dest = [int(n) for n in get_input().strip().split()]
src = [int(n) for n in get_input().strip().split()]
ans.append(min_dist(g, dest, src))
for n in ans:
print(n)
|
[
"shoark7@gmail.com"
] |
shoark7@gmail.com
|
0502027a727280b3ed0d2bedef582948f0b0f229
|
a94ff9b8b7ad19047c5acb87d7c867628e2d4f75
|
/CreateDatacards/properties_datacardClass_phase.py
|
d7b41c37280fcbd59ebe2daa35c1e3df3e5ff79c
|
[] |
no_license
|
meng-xiao/HiggsAnalysis-HZZ4l_Combination
|
16ccef3bba6885376213b427d8be4f46f7752262
|
3fc348c32ba0df902f70d38b71ce1837463a9e82
|
refs/heads/master
| 2021-01-17T12:50:28.316409
| 2014-09-15T14:18:32
| 2014-09-15T14:18:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 91,698
|
py
|
#! /usr/bin/env python
import sys
import os
import re
import math
from scipy.special import erf
from ROOT import *
import ROOT
from array import array
from systematicsClass import *
from inputReader import *
## ------------------------------------
## card and workspace class
## ------------------------------------
class properties_datacardClass_phase:
def __init__(self):
self.ID_4mu = 1
self.ID_4e = 2
self.ID_2e2mu = 3
self.isFSR = True
def loadIncludes(self):
ROOT.gSystem.AddIncludePath("-I$ROOFITSYS/include/")
ROOT.gSystem.AddIncludePath("-Iinclude/")
ROOT.gROOT.ProcessLine(".L include/tdrstyle.cc")
ROOT.gSystem.Load("libRooFit")
ROOT.gSystem.Load("libHiggsAnalysisCombinedLimit.so")
ROOT.gSystem.Load("include/HiggsCSandWidth_cc.so")
ROOT.gSystem.Load("include/HiggsCSandWidthSM4_cc.so")
# cs x br function
def makeXsBrFunction(self,signalProc,rrvMH):
procName = "ggH"
if(signalProc == 0): procName = "ggH" #dummy, when you sum up all the 5 chans
if(signalProc == 1): procName = "ggH"
if(signalProc == 2): procName = "qqH"
if(signalProc == 3): procName = "WH"
if(signalProc == 4): procName = "ZH"
if(signalProc == 5): procName = "ttH"
channelName = ""
if (self.channel == self.ID_4mu): channelName = "4mu"
elif (self.channel == self.ID_4e): channelName = "4e"
elif (self.channel == self.ID_2e2mu): channelName = "2e2mu"
else: print "Input Error: Unknown channel! (4mu = 1, 4e = 2, 2e2mu = 3)"
myCSWrhf = HiggsCSandWidth()
histXsBr = ROOT.TH1F("hsmxsbr_{0}_{1}".format(procName,channelName),"", 8905, 109.55, 1000.05)
for i in range(1,8906):
mHVal = histXsBr.GetBinCenter(i)
BR = 0.0
if (self.channel == self.ID_2e2mu):
BR = myCSWrhf.HiggsBR(13,mHVal)
else:
BR = myCSWrhf.HiggsBR(12,mHVal)
if (signalProc == 3 or signalProc == 4 or signalProc == 5):
#overwrite BR if VH,ttH sample
#these samples have inclusive Z decay
BR = myCSWrhf.HiggsBR(11,mHVal)
if (signalProc==0):
totXs=0
for ch in range(1,6):
totXs+=myCSWrhf.HiggsCS(ch, mHVal, self.sqrts)
histXsBr.SetBinContent(i, totXs * BR)
else:
histXsBr.SetBinContent(i, myCSWrhf.HiggsCS(signalProc, mHVal, self.sqrts) * BR)
#print '\nmakeXsBrFunction : procName=',procName,' signalProc=',signalProc,' mH (input)=',rrvMH.getVal(),
#print ' CS=',myCSWrhf.HiggsCS(signalProc, mHVal, self.sqrts),' BR=',BR
rdhname = "rdhXsBr_{0}_{1}_{2}".format(procName,self.channel,self.sqrts)
rdhXsBr = RooDataHist(rdhname,rdhname, ROOT.RooArgList(rrvMH), histXsBr)
return rdhXsBr
# return trueVar if testStatement else return falseVar
def getVariable(self,trueVar,falseVar,testStatement):
if (testStatement):
return trueVar
else:
return falseVar
# main datacard and workspace function
def makeCardsWorkspaces(self, theMH, theOutputDir, theInputs,theTemplateDir="templates2D"):
## --------------- SETTINGS AND DECLARATIONS --------------- ##
DEBUG = False
self.mH = theMH
self.SMDsigCut = 1.
self.SMDbkgCut = 1.
self.lumi = theInputs['lumi']
self.sqrts = theInputs['sqrts']
self.channel = theInputs['decayChannel']
self.bkgMorph = theInputs['useCMS_zz4l_zjet']
self.outputDir = theOutputDir
self.templateDir = theTemplateDir
self.ggH_chan = theInputs['ggH']
self.qqH_chan = theInputs['qqH']
self.WH_chan = theInputs['WH']
self.ZH_chan = theInputs['ZH']
self.ttH_chan = theInputs['ttH']
self.qqZZ_chan = theInputs['qqZZ']
self.ggZZ_chan = theInputs['ggZZ']
self.zjets_chan = theInputs['zjets']
## ---------------- SET PLOTTING STYLE ---------------- ##
ROOT.setTDRStyle(True)
ROOT.gStyle.SetPalette(1)
ROOT.gStyle.SetPadLeftMargin(0.16)
## ---------------- VARIABLES FOR LATER --------------- ##
self.bUseCBnoConvolution = False
ForXSxBR = False
myCSW = HiggsCSandWidth()
## ----------------- WIDTH AND RANGES ----------------- ##
self.widthHVal = myCSW.HiggsWidth(0,self.mH)
if(self.widthHVal < 0.12):
self.bUseCBnoConvolution = True
self.isHighMass = False
if self.mH >= 390:
if theInputs['useHighMassReweightedShapes']:
self.isHighMass = True
else: print "useHighMassReweightedShapes set to FALSE, using non-reweighted shapes!"
print "width: ",self.widthHVal
self.windowVal = max( self.widthHVal, 1.0)
lowside = 100.0
highside = 1000.0
if (self.mH >= 275):
lowside = 180.0
highside = 650.0
if (self.mH >= 350):
lowside = 200.0
highside = 900.0
if (self.mH >= 500):
lowside = 250.0
highside = 1000.0
if (self.mH >= 700):
lowside = 350.0
highside = 1400.0
self.low_M = max( (self.mH - 20.*self.windowVal), lowside)
self.high_M = min( (self.mH + 15.*self.windowVal), highside)
if (self.channel == self.ID_4mu): self.appendName = '4mu'
elif (self.channel == self.ID_4e): self.appendName = '4e'
elif (self.channel == self.ID_2e2mu): self.appendName = '2e2mu'
else: print "Input Error: Unknown channel! (4mu = 1, 4e = 2, 2e2mu = 3)"
## ------------------------- SYSTEMATICS CLASSES ----------------------------- ##
systematics = systematicsClass( self.mH, False, self.isFSR, theInputs)
systematics_forXSxBR = systematicsClass( self.mH, True, self.isFSR,theInputs)
## -------------------------- SIGNAL SHAPE ----------------------------------- ##
bins = 1000
if(self.bUseCBnoConvolution): bins = 200
CMS_zz4l_mass_name = "CMS_zz4l_mass"
CMS_zz4l_mass = ROOT.RooRealVar(CMS_zz4l_mass_name,CMS_zz4l_mass_name,self.low_M,self.high_M)
CMS_zz4l_mass.setBins(bins)
x_name = "CMS_zz4l_fg4"
x = ROOT.RooRealVar(x_name,x_name,0.,0.,1.)
x.setBins(bins)
phi_name = "CMS_zz4l_fg4phi"
phi = ROOT.RooRealVar(phi_name,phi_name,-math.pi,math.pi)
phi.setBins(bins)
phi.Print("v")
D1Name = "CMS_zz4l_pseudoKD"
D2Name = "CMS_zz4l_dcp"
D3Name = "CMS_zz4l_smd"
self.LUMI = ROOT.RooRealVar("LUMI_{0:.0f}".format(self.sqrts),"LUMI_{0:.0f}".format(self.sqrts),self.lumi)
self.LUMI.setConstant(True)
self.MH = ROOT.RooRealVar("MH","MH",self.mH)
self.MH.setConstant(True)
# n2, alpha2 are right side parameters of DoubleCB
# n, alpha are left side parameters of DoubleCB
n_CB_d = 0.0
alpha_CB_d = 0.0
n2_CB_d = 0.0
alpha2_CB_d = 0.0
mean_CB_d = 0.0
sigma_CB_d = 0.0
mean_BW_d = self.mH
gamma_BW_d = 0.0
rdhXsBrFuncV_1 = self.makeXsBrFunction(1,self.MH)
rhfname = "rhfXsBr_{0}_{1:.0f}_{2:.0f}".format("ggH",self.channel,self.sqrts)
rhfXsBrFuncV_1 = ROOT.RooHistFunc(rhfname,rhfname, ROOT.RooArgSet(self.MH), rdhXsBrFuncV_1, 1)
rdhXsBrFuncV_2 = self.makeXsBrFunction(2,self.MH)
rhfname = "rhfXsBr_{0}_{1:.0f}_{2:.0f}".format("VBF",self.channel,self.sqrts)
rhfXsBrFuncV_2 = ROOT.RooHistFunc(rhfname,rhfname, ROOT.RooArgSet(self.MH), rdhXsBrFuncV_2, 1)
rdhXsBrFuncV_3 = self.makeXsBrFunction(3,self.MH)
rhfname = "rhfXsBr_{0}_{1:.0f}_{2:.0f}".format("WH",self.channel,self.sqrts)
rhfXsBrFuncV_3 = ROOT.RooHistFunc(rhfname,rhfname, ROOT.RooArgSet(self.MH), rdhXsBrFuncV_3, 1)
rdhXsBrFuncV_4 = self.makeXsBrFunction(4,self.MH)
rhfname = "rhfXsBr_{0}_{1:.0f}_{2:.0f}".format("ZH",self.channel,self.sqrts)
rhfXsBrFuncV_4 = ROOT.RooHistFunc(rhfname,rhfname, ROOT.RooArgSet(self.MH), rdhXsBrFuncV_4, 1)
rdhXsBrFuncV_5 = self.makeXsBrFunction(5,self.MH)
rhfname = "rhfXsBr_{0}_{1:.0f}_{2:.0f}".format("ttH",self.channel,self.sqrts)
rhfXsBrFuncV_5 = ROOT.RooHistFunc(rhfname,rhfname, ROOT.RooArgSet(self.MH), rdhXsBrFuncV_5, 1)
## -------- Variable Definitions -------- ##
name = "CMS_zz4l_mean_e_sig"
CMS_zz4l_mean_e_sig = ROOT.RooRealVar(name,"CMS_zz4l_mean_e_sig",0.0,-10.0,10.0)
name = "CMS_zz4l_mean_e_err_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_mean_e_err = ROOT.RooRealVar(name,"CMS_zz4l_mean_e_err",float(theInputs['CMS_zz4l_mean_e_sig']),-0.99,0.99)
name = "CMS_zz4l_sigma_e_sig"
CMS_zz4l_sigma_e_sig = ROOT.RooRealVar(name,"CMS_zz4l_sigma_e_sig",3.0,0.0,30.0)
name = "CMS_zz4l_mean_m_sig"
CMS_zz4l_mean_m_sig = ROOT.RooRealVar(name,"CMS_zz4l_mean_sig",0.0,-10.0,10.0)
name = "CMS_zz4l_mean_m_err_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_mean_m_err = ROOT.RooRealVar(name,"CMS_zz4l_mean_m_err",float(theInputs['CMS_zz4l_mean_m_sig']),-0.99,0.99)
name = "CMS_zz4l_sigma_m_sig"
CMS_zz4l_sigma_m_sig = ROOT.RooRealVar(name,"CMS_zz4l_sigma_sig",3.0,0.0,30.0)
name = "CMS_zz4l_alpha2_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_alpha2 = ROOT.RooRealVar(name,"CMS_zz4l_alpha2",1.,-10.,10.)
name = "CMS_zz4l_n2_sig_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_n2 = ROOT.RooRealVar(name,"CMS_zz4l_n2",2.,-10.,10.)
name = "CMS_zz4l_alpha_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_alpha = ROOT.RooRealVar(name,"CMS_zz4l_alpha",1.,-10.,10.)
name = "CMS_zz4l_n_sig_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_n = ROOT.RooRealVar(name,"CMS_zz4l_n",2.,-10.,10.)
name = "CMS_zz4l_mean_BW_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_mean_BW = ROOT.RooRealVar(name,"CMS_zz4l_mean_BW",self.mH,self.low_M,self.high_M)
name = "interf_ggH"
#name = "CMS_zz4l_gamma_sig_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_gamma = ROOT.RooRealVar(name,"CMS_zz4l_gamma",10.,0.001,1000.)
name = "CMS_zz4l_widthScale_{0}_{1:.0f}".format(self.channel,self.sqrts)
CMS_zz4l_widthScale = ROOT.RooRealVar(name,"CMS_zz4l_widthScale",1.0)
one = ROOT.RooRealVar("one","one",1.0)
one.setConstant(True)
CMS_zz4l_mean_BW.setVal( mean_BW_d )
CMS_zz4l_gamma.setVal(0)
CMS_zz4l_mean_e_sig.setVal(0)
CMS_zz4l_mean_e_err.setConstant(kTRUE)
CMS_zz4l_sigma_e_sig.setVal(0)
CMS_zz4l_mean_m_sig.setVal(0)
CMS_zz4l_mean_m_err.setConstant(kTRUE)
CMS_zz4l_sigma_m_sig.setVal(0)
CMS_zz4l_alpha.setVal(0)
CMS_zz4l_n.setVal(0)
CMS_zz4l_alpha2.setVal(0)
CMS_zz4l_n2.setVal(0)
CMS_zz4l_widthScale.setConstant(True)
CMS_zz4l_mean_BW.setConstant(True)
print "mean_BW ", CMS_zz4l_mean_BW.getVal()
print "gamma_BW ", CMS_zz4l_gamma.getVal()
print "mean_e_sig ", CMS_zz4l_mean_e_sig.getVal()
print "mean_e_err ", CMS_zz4l_mean_e_err.getVal()
print "sigma_e ", CMS_zz4l_sigma_e_sig.getVal()
print "mean_m_sig ",CMS_zz4l_mean_m_sig.getVal()
print "mean_m_err ", CMS_zz4l_mean_m_err.getVal()
print "sigma_m ", CMS_zz4l_sigma_m_sig.getVal()
print "alpha ", CMS_zz4l_alpha.getVal()
print "n ", CMS_zz4l_n.getVal()
print "alpha2 ", CMS_zz4l_alpha2.getVal()
print "n2 ", CMS_zz4l_n2.getVal()
## -------------------- RooFormulaVar's -------------------- ##
rfv_n_CB = ROOT.RooFormulaVar()
rfv_alpha_CB = ROOT.RooFormulaVar()
rfv_n2_CB = ROOT.RooFormulaVar()
rfv_alpha2_CB = ROOT.RooFormulaVar()
rfv_mean_CB = ROOT.RooFormulaVar()
rfv_sigma_CB = ROOT.RooFormulaVar()
name = "CMS_zz4l_n_{0:.0f}_{1:.0f}_centralValue".format(self.channel,self.sqrts)
if self.isHighMass : rfv_n_CB = ROOT.RooFormulaVar(name,"("+theInputs['n_CB_shape_HM']+")"+"*(1+@1)",ROOT.RooArgList(self.MH,CMS_zz4l_n))
else : rfv_n_CB = ROOT.RooFormulaVar(name,"("+theInputs['n_CB_shape']+")"+"*(1+@1)",ROOT.RooArgList(self.MH,CMS_zz4l_n))
name = "CMS_zz4l_alpha_{0:.0f}_centralValue".format(self.channel)
if self.isHighMass : rfv_alpha_CB = ROOT.RooFormulaVar(name,theInputs['alpha_CB_shape_HM'], ROOT.RooArgList(self.MH))
else : rfv_alpha_CB = ROOT.RooFormulaVar(name,theInputs['alpha_CB_shape'], ROOT.RooArgList(self.MH))
name = "CMS_zz4l_n2_{0:.0f}_{1:.0f}_centralValue".format(self.channel,self.sqrts)
#if self.isHighMass : rfv_n2_CB = ROOT.RooFormulaVar(name,"("+theInputs['n2_CB_shape_HM']+")"+"*(1+@1)",ROOT.RooArgList(self.MH,CMS_zz4l_n2))
#else : rfv_n2_CB = ROOT.RooFormulaVar(name,"("+theInputs['n2_CB_shape']+")"+"*(1+@1)",ROOT.RooArgList(self.MH,CMS_zz4l_n2))
if self.isHighMass : rfv_n2_CB = ROOT.RooFormulaVar(name,"("+theInputs['n2_CB_shape_HM']+")",ROOT.RooArgList(self.MH))
else : rfv_n2_CB = ROOT.RooFormulaVar(name,"("+theInputs['n2_CB_shape']+")",ROOT.RooArgList(self.MH))
name = "CMS_zz4l_alpha2_{0:.0f}_centralValue".format(self.channel)
if self.isHighMass : rfv_alpha2_CB = ROOT.RooFormulaVar(name,theInputs['alpha2_CB_shape_HM'], ROOT.RooArgList(self.MH))
else : rfv_alpha2_CB = ROOT.RooFormulaVar(name,theInputs['alpha2_CB_shape'], ROOT.RooArgList(self.MH))
name = "CMS_zz4l_mean_sig_{0:.0f}_{1:.0f}_centralValue".format(self.channel,self.sqrts)
if (self.channel == self.ID_4mu) :
if self.isHighMass : rfv_mean_CB = ROOT.RooFormulaVar(name,"("+theInputs['mean_CB_shape_HM']+")"+"+@0*@1*@2", ROOT.RooArgList(self.MH, CMS_zz4l_mean_m_sig,CMS_zz4l_mean_m_err))
else : rfv_mean_CB = ROOT.RooFormulaVar(name,"("+theInputs['mean_CB_shape']+")"+"+@0*@1*@2", ROOT.RooArgList(self.MH, CMS_zz4l_mean_m_sig,CMS_zz4l_mean_m_err))
elif (self.channel == self.ID_4e) :
if self.isHighMass : rfv_mean_CB = ROOT.RooFormulaVar(name,"("+theInputs['mean_CB_shape_HM']+")"+"+@0*@1*@2", ROOT.RooArgList(self.MH, CMS_zz4l_mean_e_sig,CMS_zz4l_mean_e_err))
else : rfv_mean_CB = ROOT.RooFormulaVar(name,"("+theInputs['mean_CB_shape']+")"+"+@0*@1*@2", ROOT.RooArgList(self.MH, CMS_zz4l_mean_e_sig,CMS_zz4l_mean_e_err))
elif (self.channel == self.ID_2e2mu) :
if self.isHighMass : rfv_mean_CB = ROOT.RooFormulaVar(name,"("+theInputs['mean_CB_shape_HM']+")"+"+ (@0*@1*@3 + @0*@2*@4)/2", ROOT.RooArgList(self.MH, CMS_zz4l_mean_m_sig,CMS_zz4l_mean_e_sig,CMS_zz4l_mean_m_err,CMS_zz4l_mean_e_err))
else : rfv_mean_CB = ROOT.RooFormulaVar(name,"("+theInputs['mean_CB_shape']+")"+"+ (@0*@1*@3 + @0*@2*@4)/2", ROOT.RooArgList(self.MH, CMS_zz4l_mean_m_sig,CMS_zz4l_mean_e_sig,CMS_zz4l_mean_m_err,CMS_zz4l_mean_e_err))
name = "CMS_zz4l_sigma_sig_{0:.0f}_{1:.0f}_centralValue".format(self.channel,self.sqrts)
if (self.channel == self.ID_4mu) :
if self.isHighMass : rfv_sigma_CB = ROOT.RooFormulaVar(name,"("+theInputs['sigma_CB_shape_HM']+")"+"*(1+@1)", ROOT.RooArgList(self.MH, CMS_zz4l_sigma_m_sig))
else : rfv_sigma_CB = ROOT.RooFormulaVar(name,"("+theInputs['sigma_CB_shape']+")"+"*(1+@1)", ROOT.RooArgList(self.MH, CMS_zz4l_sigma_m_sig))
elif (self.channel == self.ID_4e) :
if self.isHighMass : rfv_sigma_CB = ROOT.RooFormulaVar(name,"("+theInputs['sigma_CB_shape_HM']+")"+"*(1+@1)", ROOT.RooArgList(self.MH, CMS_zz4l_sigma_e_sig))
else : rfv_sigma_CB = ROOT.RooFormulaVar(name,"("+theInputs['sigma_CB_shape']+")"+"*(1+@1)", ROOT.RooArgList(self.MH, CMS_zz4l_sigma_e_sig))
elif (self.channel == self.ID_2e2mu) :
if self.isHighMass : rfv_sigma_CB = ROOT.RooFormulaVar(name,"("+theInputs['sigma_CB_shape_HM']+")"+"*TMath::Sqrt((1+@1)*(1+@2))", ROOT.RooArgList(self.MH, CMS_zz4l_sigma_m_sig,CMS_zz4l_sigma_e_sig))
else : rfv_sigma_CB = ROOT.RooFormulaVar(name,"("+theInputs['sigma_CB_shape']+")"+"*TMath::Sqrt((1+@1)*(1+@2))", ROOT.RooArgList(self.MH, CMS_zz4l_sigma_m_sig,CMS_zz4l_sigma_e_sig))
name = "CMS_zz4l_gamma_{0:.0f}_{1:.0f}_centralValue".format(self.channel,self.sqrts)
rfv_gamma_BW = ROOT.RooFormulaVar(name,"("+theInputs['gamma_BW_shape_HM']+")"+"*(1+@1*0.05)",ROOT.RooArgList(self.MH,CMS_zz4l_gamma))
if (DEBUG): print " DEBUG ********* ", theInputs['sigma_CB_shape']
print "n_CB ", rfv_n_CB.getVal()
print "alpha_CB ", rfv_alpha_CB.getVal()
print "n2_CB ", rfv_n2_CB.getVal()
print "alpha2_CB ", rfv_alpha2_CB.getVal()
print "mean_CB ", rfv_mean_CB.getVal()
print "sigma_CB ", rfv_sigma_CB.getVal()
print "gamma_BW ", rfv_gamma_BW.getVal()
CMS_zz4l_mean_sig_NoConv = ROOT.RooFormulaVar("CMS_zz4l_mean_sig_NoConv_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts),"@0+@1", ROOT.RooArgList(rfv_mean_CB, self.MH))
print "mean_sig_NoConv ", CMS_zz4l_mean_sig_NoConv.getVal()
## --------------------- SHAPE FUNCTIONS ---------------------- ##
signalCB_ggH = ROOT.RooDoubleCB("signalCB_ggH","signalCB_ggH",CMS_zz4l_mass, self.getVariable(CMS_zz4l_mean_sig_NoConv,rfv_mean_CB, self.bUseCBnoConvolution) , rfv_sigma_CB,rfv_alpha_CB,rfv_n_CB, rfv_alpha2_CB, rfv_n2_CB)
#Low mass pdf
signalBW_ggH = ROOT.RooRelBWUFParam("signalBW_ggH", "signalBW_ggH",CMS_zz4l_mass,CMS_zz4l_mean_BW,CMS_zz4l_widthScale)
sig_ggH = ROOT.RooFFTConvPdf("sig_ggH","BW (X) CB",CMS_zz4l_mass,signalBW_ggH,signalCB_ggH, 2)
#High mass pdf
signalBW_ggH_HM = ROOT.RooRelBWHighMass("signalBW_ggH", "signalBW_ggH",CMS_zz4l_mass,CMS_zz4l_mean_BW,rfv_gamma_BW)
sig_ggH_HM = ROOT.RooFFTConvPdf("sig_ggH","BW (X) CB",CMS_zz4l_mass,signalBW_ggH_HM,signalCB_ggH, 2)
signalCB_VBF = ROOT.RooDoubleCB("signalCB_VBF","signalCB_VBF",CMS_zz4l_mass,self.getVariable(CMS_zz4l_mean_sig_NoConv,rfv_mean_CB,self.bUseCBnoConvolution),rfv_sigma_CB,rfv_alpha_CB,rfv_n_CB, rfv_alpha2_CB, rfv_n2_CB)
#Low mass pdf
signalBW_VBF = ROOT.RooRelBWUFParam("signalBW_VBF", "signalBW_VBF",CMS_zz4l_mass,CMS_zz4l_mean_BW,CMS_zz4l_widthScale)
sig_VBF = ROOT.RooFFTConvPdf("sig_VBF","BW (X) CB",CMS_zz4l_mass,signalBW_VBF,signalCB_VBF, 2)
#High mass pdf
signalBW_VBF_HM = ROOT.RooRelBWHighMass("signalBW_VBF", "signalBW_VBF",CMS_zz4l_mass,CMS_zz4l_mean_BW,rfv_gamma_BW)
sig_VBF_HM = ROOT.RooFFTConvPdf("sig_VBF","BW (X) CB",CMS_zz4l_mass,signalBW_VBF_HM,signalCB_VBF, 2)
signalCB_WH = ROOT.RooDoubleCB("signalCB_WH","signalCB_WH",CMS_zz4l_mass,self.getVariable(CMS_zz4l_mean_sig_NoConv,rfv_mean_CB,self.bUseCBnoConvolution),rfv_sigma_CB,rfv_alpha_CB,rfv_n_CB, rfv_alpha2_CB, rfv_n2_CB)
#Low mass pdf
signalBW_WH = ROOT.RooRelBWUFParam("signalBW_WH", "signalBW_WH",CMS_zz4l_mass,CMS_zz4l_mean_BW,CMS_zz4l_widthScale)
sig_WH = ROOT.RooFFTConvPdf("sig_WH","BW (X) CB",CMS_zz4l_mass,signalBW_WH,signalCB_WH, 2)
#High mass pdf
signalBW_WH_HM = ROOT.RooRelBWHighMass("signalBW_WH", "signalBW_WH",CMS_zz4l_mass,CMS_zz4l_mean_BW,rfv_gamma_BW)
sig_WH_HM = ROOT.RooFFTConvPdf("sig_WH","BW (X) CB",CMS_zz4l_mass,signalBW_WH_HM,signalCB_WH, 2)
signalCB_ZH = ROOT.RooDoubleCB("signalCB_ZH","signalCB_ZH",CMS_zz4l_mass,self.getVariable(CMS_zz4l_mean_sig_NoConv,rfv_mean_CB,self.bUseCBnoConvolution),rfv_sigma_CB,rfv_alpha_CB,rfv_n_CB, rfv_alpha2_CB, rfv_n2_CB)
#Low mass pdf
signalBW_ZH = ROOT.RooRelBWUFParam("signalBW_ZH", "signalBW_ZH",CMS_zz4l_mass,CMS_zz4l_mean_BW,CMS_zz4l_widthScale)
sig_ZH = ROOT.RooFFTConvPdf("sig_ZH","BW (X) CB",CMS_zz4l_mass,signalBW_ZH,signalCB_ZH, 2)
#High mass pdf
signalBW_ZH_HM = ROOT.RooRelBWHighMass("signalBW_ZH", "signalBW_ZH",CMS_zz4l_mass,CMS_zz4l_mean_BW,rfv_gamma_BW)
sig_ZH_HM = ROOT.RooFFTConvPdf("sig_ZH","BW (X) CB",CMS_zz4l_mass,signalBW_ZH_HM,signalCB_ZH, 2)
signalCB_ttH = ROOT.RooDoubleCB("signalCB_ttH","signalCB_ttH",CMS_zz4l_mass,self.getVariable(CMS_zz4l_mean_sig_NoConv,rfv_mean_CB,self.bUseCBnoConvolution),rfv_sigma_CB,rfv_alpha_CB,rfv_n_CB, rfv_alpha2_CB, rfv_n2_CB)
#Low mass pdf
signalBW_ttH = ROOT.RooRelBWUFParam("signalBW_ttH", "signalBW_ttH",CMS_zz4l_mass,CMS_zz4l_mean_BW,CMS_zz4l_widthScale)
sig_ttH = ROOT.RooFFTConvPdf("sig_ttH","BW (X) CB",CMS_zz4l_mass,signalBW_ttH,signalCB_ttH, 2)
#High mass pdf
signalBW_ttH_HM = ROOT.RooRelBWHighMass("signalBW_ttH", "signalBW_ttH",CMS_zz4l_mass,CMS_zz4l_mean_BW,rfv_gamma_BW)
sig_ttH_HM = ROOT.RooFFTConvPdf("sig_ttH","BW (X) CB",CMS_zz4l_mass,signalBW_ttH_HM,signalCB_ttH, 2)
## Buffer fraction for cyclical behavior
sig_ggH.setBufferFraction(0.2)
sig_VBF.setBufferFraction(0.2)
sig_WH.setBufferFraction(0.2)
sig_ZH.setBufferFraction(0.2)
sig_ttH.setBufferFraction(0.2)
sig_ggH_HM.setBufferFraction(0.2)
sig_VBF_HM.setBufferFraction(0.2)
sig_WH_HM.setBufferFraction(0.2)
sig_ZH_HM.setBufferFraction(0.2)
sig_ttH_HM.setBufferFraction(0.2)
## -------------------- 2D SIGNAL SHAPES FOR PROPERTIES ------------------------- ##
print '2D signal shapes for Properties'
templateSigName = "{0}_{2:.0f}TeV/{1}_{0}.root".format(self.templateDir,self.appendName, self.sqrts)#templates3D_AdapSmoothMirror_Signal_{1}.root
sigTempFile = ROOT.TFile(templateSigName)
Sig_T_1 = sigTempFile.Get("template0PlusAdapSmoothMirror")
Sig_T_2 = sigTempFile.Get("template0MinusAdapSmoothMirror")
Sig_T_4 = sigTempFile.Get("templateIntAdapSmoothMirror")
Sig_T_5 = sigTempFile.Get("templateIntPi2AdapSmoothMirror")
templateSigName = "{0}_{2:.0f}TeV/{1}_{0}_ResScaleUp.root".format(self.templateDir,self.appendName, self.sqrts)
sigTempFile_ResUp = ROOT.TFile(templateSigName)
Sig_T_1_ResUp = sigTempFile_ResUp.Get("template0PlusAdapSmoothMirror")
Sig_T_2_ResUp = sigTempFile_ResUp.Get("template0MinusAdapSmoothMirror")
Sig_T_4_ResUp = sigTempFile_ResUp.Get("templateIntAdapSmoothMirror")
Sig_T_5_ResUp = sigTempFile_ResUp.Get("templateIntPi2AdapSmoothMirror")
templateSigName = "{0}_{2:.0f}TeV/{1}_{0}_ResScaleDown.root".format(self.templateDir,self.appendName, self.sqrts)
sigTempFile_ResDown = ROOT.TFile(templateSigName)
Sig_T_1_ResDown = sigTempFile_ResDown.Get("template0PlusAdapSmoothMirror")
Sig_T_2_ResDown = sigTempFile_ResDown.Get("template0MinusAdapSmoothMirror")
Sig_T_4_ResDown = sigTempFile_ResDown.Get("templateIntAdapSmoothMirror")
Sig_T_5_ResDown = sigTempFile_ResDown.Get("templateIntPi2AdapSmoothMirror")
dBinsX = Sig_T_1.GetXaxis().GetNbins()
print "X bins: ",dBinsX
dLowX = Sig_T_1.GetXaxis().GetXmin()
dHighX = Sig_T_1.GetXaxis().GetXmax()
dBinsY = Sig_T_1.GetYaxis().GetNbins()
print "Y bins: ",dBinsY
dLowY = Sig_T_1.GetYaxis().GetXmin()
dHighY = Sig_T_1.GetYaxis().GetXmax()
dBinsZ = Sig_T_1.GetZaxis().GetNbins()
print "Z bins: ",dBinsZ
dLowZ = Sig_T_1.GetZaxis().GetXmin()
dHighZ = Sig_T_1.GetZaxis().GetXmax()
D1 = ROOT.RooRealVar(D1Name,D1Name,dLowX,dHighX)
D2 = ROOT.RooRealVar(D2Name,D2Name,dLowY,dHighY)
D3 = ROOT.RooRealVar(D3Name,D3Name,dLowZ,dHighZ)
D1.setBins(dBinsX)
D2.setBins(dBinsY)
D3.setBins(dBinsZ)
Sig_T_1_hist = ROOT.RooDataHist ("T_1_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_1)
Sig_T_2_hist = ROOT.RooDataHist ("T_2_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_2)
Sig_T_4_hist = ROOT.RooDataHist ("T_3_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_4)
Sig_T_5_hist = ROOT.RooDataHist ("T_5_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_5)
Sig_T_1_ResUp_hist = ROOT.RooDataHist ("T_1_ResUp_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_1_ResUp)
Sig_T_2_ResUp_hist = ROOT.RooDataHist ("T_2_ResUp_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_2_ResUp)
Sig_T_4_ResUp_hist = ROOT.RooDataHist ("T_3_ResUp_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_4_ResUp)
Sig_T_5_ResUp_hist = ROOT.RooDataHist ("T_5_ResUp_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_5_ResUp)
Sig_T_1_ResDown_hist = ROOT.RooDataHist ("T_1_ResDown_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_1_ResDown)
Sig_T_2_ResDown_hist = ROOT.RooDataHist ("T_2_ResDown_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_2_ResDown)
Sig_T_4_ResDown_hist = ROOT.RooDataHist ("T_3_ResDown_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_4_ResDown)
Sig_T_5_ResDown_hist = ROOT.RooDataHist ("T_5_ResDown_hist","", ROOT.RooArgList(D1,D2,D3),Sig_T_5_ResDown)
Sig_T_1_histfunc = ROOT.RooHistFunc ("T_1_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_1_hist)
Sig_T_2_histfunc = ROOT.RooHistFunc ("T_2_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_2_hist)
Sig_T_4_histfunc = ROOT.RooHistFunc ("T_3_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_4_hist)
Sig_T_5_histfunc = ROOT.RooHistFunc ("T_5_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_5_hist)
Sig_T_1_ResUp_histfunc = ROOT.RooHistFunc ("T_1_ResUp_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_1_ResUp_hist)
Sig_T_2_ResUp_histfunc = ROOT.RooHistFunc ("T_2_ResUp_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_2_ResUp_hist)
Sig_T_4_ResUp_histfunc = ROOT.RooHistFunc ("T_3_ResUp_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_4_ResUp_hist)
Sig_T_5_ResUp_histfunc = ROOT.RooHistFunc ("T_5_ResUp_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_5_ResUp_hist)
Sig_T_1_ResDown_histfunc = ROOT.RooHistFunc ("T_1_ResDown_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_1_ResDown_hist)
Sig_T_2_ResDown_histfunc = ROOT.RooHistFunc ("T_2_ResDown_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_2_ResDown_hist)
Sig_T_4_ResDown_histfunc = ROOT.RooHistFunc ("T_3_ResDown_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_4_ResDown_hist)
Sig_T_5_ResDown_histfunc = ROOT.RooHistFunc ("T_5_ResDown_histfunc","", ROOT.RooArgSet(D1,D2,D3),Sig_T_5_ResDown_hist)
ggHpdfName = "ggH_RooSpinZeroPdf_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ggHpdf = ROOT.HZZ4L_RooSpinZeroPdf_phase(ggHpdfName,ggHpdfName,D1,D2,D3,x,phi,ROOT.RooArgList(Sig_T_1_histfunc,Sig_T_2_histfunc,Sig_T_4_histfunc, Sig_T_5_histfunc))
#r_fa3 = ggHpdf.createIntegral(ROOT.RooArgSet(D1,D2,D3))
#r_fa3 = ggHpdf.getNormObj(0,ROOT.RooArgSet(D1,D2,D3))
#x.setVal(0.)
#normfa3 = ROOT.RooConstVar ("normfa3","normfa3",ggHpdf.createIntegral(ROOT.RooArgSet(D1,D2,D3)).getVal())
T1_integralName = "normt1_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
T2_integralName = "normt2_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
T4_integralName = "normt4_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
T5_integralName = "normt5_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
T1_integral = ROOT.RooConstVar (T1_integralName,T1_integralName,Sig_T_1.Integral())
T2_integral = ROOT.RooConstVar (T2_integralName,T2_integralName,Sig_T_2.Integral())
T4_integral = ROOT.RooConstVar (T4_integralName,T4_integralName,Sig_T_4.Integral())
T5_integral = ROOT.RooConstVar (T5_integralName,T5_integralName,Sig_T_5.Integral())
print "T1 ",T1_integral.getVal()
print "T2 ",T2_integral.getVal()
print "T4 ",T4_integral.getVal()
print "T5 ",T5_integral.getVal()
r_fa3_norm = ROOT.RooFormulaVar("ggH_norm","ggH_norm","( (1-abs(@0))*@1+abs(@0)*@2 + (@0>0 ? 1.: -1.)*sqrt (abs(@0)*(1-abs(@0)))* (cos(@5)*@3 +sin(@5)*@4) )/@1",RooArgList(x,T1_integral,T2_integral, T4_integral, T5_integral, phi))
ggHpdfName_syst1Up = "ggH_RooSpinZeroPdf_ResUp_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ggHpdf_syst1Up = ROOT.HZZ4L_RooSpinZeroPdf_phase(ggHpdfName_syst1Up,ggHpdfName_syst1Up,D1,D2,D3,x,phi,ROOT.RooArgList(Sig_T_1_ResUp_histfunc,Sig_T_2_ResUp_histfunc,Sig_T_4_ResUp_histfunc, Sig_T_5_ResUp_histfunc))
ggHpdfName_syst1Down = "ggH_RooSpinZeroPdf_ResDown_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ggHpdf_syst1Down = ROOT.HZZ4L_RooSpinZeroPdf_phase(ggHpdfName_syst1Down,ggHpdfName_syst1Down,D1,D2,D3,x,phi,ROOT.RooArgList(Sig_T_1_ResDown_histfunc,Sig_T_2_ResDown_histfunc,Sig_T_4_ResDown_histfunc,Sig_T_5_ResDown_histfunc))
## ------------------ END 2D SIGNAL SHAPES FOR PROPERTIES ------------------------ ##
## -------------------------- BACKGROUND SHAPES ---------------------------------- ##
## qqZZ contribution
name = "CMS_qqzzbkg_a0_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a0 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a0",115.3,0.,200.)
name = "CMS_qqzzbkg_a1_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a1 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a1",21.96,0.,200.)
name = "CMS_qqzzbkg_a2_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a2 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a2",122.8,0.,200.)
name = "CMS_qqzzbkg_a3_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a3 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a3",0.03479,0.,1.)
name = "CMS_qqzzbkg_a4_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a4 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a4",185.5,0.,200.)
name = "CMS_qqzzbkg_a5_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a5 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a5",12.67,0.,200.)
name = "CMS_qqzzbkg_a6_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a6 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a6",34.81,0.,100.)
name = "CMS_qqzzbkg_a7_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a7 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a7",0.1393,0.,1.)
name = "CMS_qqzzbkg_a8_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a8 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a8",66.,0.,200.)
name = "CMS_qqzzbkg_a9_{0:.0f}_{1:.0f}".format( self.channel,self.sqrts )
CMS_qqzzbkg_a9 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a9",0.07191,0.,1.)
name = "CMS_qqzzbkg_a10_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts )
CMS_qqzzbkg_a10 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a10",94.11,0.,200.)
name = "CMS_qqzzbkg_a11_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts )
CMS_qqzzbkg_a11 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a11",-5.111,-100.,100.)
name = "CMS_qqzzbkg_a12_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts )
CMS_qqzzbkg_a12 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a12",4834,0.,10000.)
name = "CMS_qqzzbkg_a13_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts )
CMS_qqzzbkg_a13 = ROOT.RooRealVar(name,"CMS_qqzzbkg_a13",0.2543,0.,1.)
if (DEBUG) :
print "qqZZshape_a0 = ",theInputs['qqZZshape_a0']
print "qqZZshape_a1 = ",theInputs['qqZZshape_a1']
print "qqZZshape_a2 = ",theInputs['qqZZshape_a2']
print "qqZZshape_a3 = ",theInputs['qqZZshape_a3']
print "qqZZshape_a4 = ",theInputs['qqZZshape_a4']
print "qqZZshape_a5 = ",theInputs['qqZZshape_a5']
print "qqZZshape_a6 = ",theInputs['qqZZshape_a6']
print "qqZZshape_a7 = ",theInputs['qqZZshape_a7']
print "qqZZshape_a8 = ",theInputs['qqZZshape_a8']
print "qqZZshape_a9 = ",theInputs['qqZZshape_a9']
print "qqZZshape_a10 = ",theInputs['qqZZshape_a10']
print "qqZZshape_a11 = ",theInputs['qqZZshape_a11']
print "qqZZshape_a12 = ",theInputs['qqZZshape_a12']
print "qqZZshape_a13 = ",theInputs['qqZZshape_a13']
CMS_qqzzbkg_a0.setVal(theInputs['qqZZshape_a0'])
CMS_qqzzbkg_a1.setVal(theInputs['qqZZshape_a1'])
CMS_qqzzbkg_a2.setVal(theInputs['qqZZshape_a2'])
CMS_qqzzbkg_a3.setVal(theInputs['qqZZshape_a3'])
CMS_qqzzbkg_a4.setVal(theInputs['qqZZshape_a4'])
CMS_qqzzbkg_a5.setVal(theInputs['qqZZshape_a5'])
CMS_qqzzbkg_a6.setVal(theInputs['qqZZshape_a6'])
CMS_qqzzbkg_a7.setVal(theInputs['qqZZshape_a7'])
CMS_qqzzbkg_a8.setVal(theInputs['qqZZshape_a8'])
CMS_qqzzbkg_a9.setVal(theInputs['qqZZshape_a9'])
CMS_qqzzbkg_a10.setVal(theInputs['qqZZshape_a10'])
CMS_qqzzbkg_a11.setVal(theInputs['qqZZshape_a11'])
CMS_qqzzbkg_a12.setVal(theInputs['qqZZshape_a12'])
CMS_qqzzbkg_a13.setVal(theInputs['qqZZshape_a13'])
CMS_qqzzbkg_a0.setConstant(True)
CMS_qqzzbkg_a1.setConstant(True)
CMS_qqzzbkg_a2.setConstant(True)
CMS_qqzzbkg_a3.setConstant(True)
CMS_qqzzbkg_a4.setConstant(True)
CMS_qqzzbkg_a5.setConstant(True)
CMS_qqzzbkg_a6.setConstant(True)
CMS_qqzzbkg_a7.setConstant(True)
CMS_qqzzbkg_a8.setConstant(True)
CMS_qqzzbkg_a9.setConstant(True)
CMS_qqzzbkg_a10.setConstant(True)
CMS_qqzzbkg_a11.setConstant(True)
CMS_qqzzbkg_a12.setConstant(True)
CMS_qqzzbkg_a13.setConstant(True)
bkg_qqzz = ROOT.RooqqZZPdf_v2("bkg_qqzzTmp","bkg_qqzzTmp",CMS_zz4l_mass,CMS_qqzzbkg_a0,CMS_qqzzbkg_a1,CMS_qqzzbkg_a2,CMS_qqzzbkg_a3,CMS_qqzzbkg_a4,CMS_qqzzbkg_a5,CMS_qqzzbkg_a6,CMS_qqzzbkg_a7,CMS_qqzzbkg_a8,CMS_qqzzbkg_a9,CMS_qqzzbkg_a10,CMS_qqzzbkg_a11,CMS_qqzzbkg_a12,CMS_qqzzbkg_a13)
## ggZZ contribution
name = "CMS_ggzzbkg_a0_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a0 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a0",115.3,0.,200.)
name = "CMS_ggzzbkg_a1_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a1 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a1",21.96,0.,200.)
name = "CMS_ggzzbkg_a2_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a2 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a2",122.8,0.,200.)
name = "CMS_ggzzbkg_a3_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a3 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a3",0.03479,0.,1.)
name = "CMS_ggzzbkg_a4_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a4 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a4",185.5,0.,200.)
name = "CMS_ggzzbkg_a5_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a5 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a5",12.67,0.,200.)
name = "CMS_ggzzbkg_a6_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a6 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a6",34.81,0.,100.)
name = "CMS_ggzzbkg_a7_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a7 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a7",0.1393,0.,1.)
name = "CMS_ggzzbkg_a8_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a8 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a8",66.,0.,200.)
name = "CMS_ggzzbkg_a9_{0:.0f}_{1:.0f}".format( self.channel, self.sqrts )
CMS_ggzzbkg_a9 = ROOT.RooRealVar(name,"CMS_ggzzbkg_a9",0.07191,0.,1.)
CMS_ggzzbkg_a0.setVal(theInputs['ggZZshape_a0'])
CMS_ggzzbkg_a1.setVal(theInputs['ggZZshape_a1'])
CMS_ggzzbkg_a2.setVal(theInputs['ggZZshape_a2'])
CMS_ggzzbkg_a3.setVal(theInputs['ggZZshape_a3'])
CMS_ggzzbkg_a4.setVal(theInputs['ggZZshape_a4'])
CMS_ggzzbkg_a5.setVal(theInputs['ggZZshape_a5'])
CMS_ggzzbkg_a6.setVal(theInputs['ggZZshape_a6'])
CMS_ggzzbkg_a7.setVal(theInputs['ggZZshape_a7'])
CMS_ggzzbkg_a8.setVal(theInputs['ggZZshape_a8'])
CMS_ggzzbkg_a9.setVal(theInputs['ggZZshape_a9'])
CMS_ggzzbkg_a0.setConstant(True)
CMS_ggzzbkg_a1.setConstant(True)
CMS_ggzzbkg_a2.setConstant(True)
CMS_ggzzbkg_a3.setConstant(True)
CMS_ggzzbkg_a4.setConstant(True)
CMS_ggzzbkg_a5.setConstant(True)
CMS_ggzzbkg_a6.setConstant(True)
CMS_ggzzbkg_a7.setConstant(True)
CMS_ggzzbkg_a8.setConstant(True)
CMS_ggzzbkg_a9.setConstant(True)
if (DEBUG) :
print "ggZZshape_a0 = ",theInputs['ggZZshape_a0']
print "ggZZshape_a1 = ",theInputs['ggZZshape_a1']
print "ggZZshape_a2 = ",theInputs['ggZZshape_a2']
print "ggZZshape_a3 = ",theInputs['ggZZshape_a3']
print "ggZZshape_a4 = ",theInputs['ggZZshape_a4']
print "ggZZshape_a5 = ",theInputs['ggZZshape_a5']
print "ggZZshape_a6 = ",theInputs['ggZZshape_a6']
print "ggZZshape_a7 = ",theInputs['ggZZshape_a7']
print "ggZZshape_a8 = ",theInputs['ggZZshape_a8']
print "ggZZshape_a9 = ",theInputs['ggZZshape_a9']
bkg_ggzz = ROOT.RooggZZPdf_v2("bkg_ggzzTmp","bkg_ggzzTmp",CMS_zz4l_mass,CMS_ggzzbkg_a0,CMS_ggzzbkg_a1,CMS_ggzzbkg_a2,CMS_ggzzbkg_a3,CMS_ggzzbkg_a4,CMS_ggzzbkg_a5,CMS_ggzzbkg_a6,CMS_ggzzbkg_a7,CMS_ggzzbkg_a8,CMS_ggzzbkg_a9)
## Reducible backgrounds
val_meanL_3P1F = float(theInputs['zjetsShape_mean_3P1F'])
val_sigmaL_3P1F = float(theInputs['zjetsShape_sigma_3P1F'])
val_normL_3P1F = float(theInputs['zjetsShape_norm_3P1F'])
val_meanL_2P2F = float(theInputs['zjetsShape_mean_2P2F'])
val_sigmaL_2P2F = float(theInputs['zjetsShape_sigma_2P2F'])
val_normL_2P2F = float(theInputs['zjetsShape_norm_2P2F'])
val_pol0_2P2F = float(theInputs['zjetsShape_pol0_2P2F'])
val_pol1_2P2F = float(theInputs['zjetsShape_pol1_2P2F'])
val_meanL_2P2F_2 = float(theInputs['zjetsShape_mean_2P2F_2e2mu'])
val_sigmaL_2P2F_2 = float(theInputs['zjetsShape_sigma_2P2F_2e2mu'])
val_normL_2P2F_2 = float(theInputs['zjetsShape_norm_2P2F_2e2mu'])
if (self.channel == self.ID_4mu):
name = "mlZjet_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
mlZjet = ROOT.RooRealVar(name,"mean landau Zjet",val_meanL_2P2F)
name = "slZjet_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
slZjet = ROOT.RooRealVar(name,"sigma landau Zjet",val_sigmaL_2P2F)
print "mean 4mu: ",mlZjet.getVal()
print "sigma 4mu: ",slZjet.getVal()
bkg_zjets = ROOT.RooLandau("bkg_zjetsTmp","bkg_zjetsTmp",CMS_zz4l_mass,mlZjet,slZjet)
elif (self.channel == self.ID_4e):
name = "mlZjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
mlZjet_2p2f = ROOT.RooRealVar(name,"mean landau Zjet 2p2f",val_meanL_2P2F)
name = "slZjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
slZjet_2p2f = ROOT.RooRealVar(name,"sigma landau Zjet 2p2f",val_sigmaL_2P2F)
name = "nlZjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
nlZjet_2p2f = ROOT.RooRealVar(name,"norm landau Zjet 2p2f",val_normL_2P2F)
name = "p0Zjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
p0Zjet_2p2f = ROOT.RooRealVar(name,"p0 Zjet 2p2f",val_pol0_2P2F)
name = "p1Zjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
p1Zjet_2p2f = ROOT.RooRealVar(name,"p1 Zjet 2p2f",val_pol1_2P2F)
print "mean 2p2f 4e: ",mlZjet_2p2f.getVal()
print "sigma 2p2f 4e: ",slZjet_2p2f.getVal()
print "norm 2p2f 4e: ",nlZjet_2p2f.getVal()
print "pol0 2p2f 4e: ",p0Zjet_2p2f.getVal()
print "pol1 2p2f 4e: ",p1Zjet_2p2f.getVal()
bkg_zjets_2p2f = ROOT.RooGenericPdf("bkg_zjetsTmp_2p2f","bkg_zjetsTmp_2p2f","(TMath::Landau(@0,@1,@2))*@3*(1.+ TMath::Exp(@4+@5*@0))",RooArgList(CMS_zz4l_mass,mlZjet_2p2f,slZjet_2p2f,nlZjet_2p2f,p0Zjet_2p2f,p1Zjet_2p2f))
name = "mlZjet_3p1f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
mlZjet_3p1f = ROOT.RooRealVar(name,"mean landau Zjet 3p1f",val_meanL_3P1F)
name = "slZjet_3p1f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
slZjet_3p1f = ROOT.RooRealVar(name,"sigma landau Zjet 3p1f",val_sigmaL_3P1F)
name = "nlZjet_3p1f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
nlZjet_3p1f = ROOT.RooRealVar(name,"norm landau Zjet 3p1f",val_normL_3P1F)
print "mean 3p1f 4e: ",mlZjet_3p1f.getVal()
print "sigma 3p1f 4e: ",slZjet_3p1f.getVal()
print "norm 3p1f 4e: ",nlZjet_3p1f.getVal()
bkg_zjets_3p1f = ROOT.RooLandau("bkg_zjetsTmp_3p1f","bkg_zjetsTmp_3p1f",CMS_zz4l_mass,mlZjet_3p1f,slZjet_3p1f)
bkg_zjets = ROOT.RooAddPdf("bkg_zjetsTmp","bkg_zjetsTmp",ROOT.RooArgList(bkg_zjets_2p2f,bkg_zjets_3p1f),ROOT.RooArgList(nlZjet_2p2f,nlZjet_3p1f))
elif (self.channel == self.ID_2e2mu):
name = "mlZjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
mlZjet_2p2f = ROOT.RooRealVar(name,"mean landau Zjet 2p2f",val_meanL_2P2F)
name = "slZjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
slZjet_2p2f = ROOT.RooRealVar(name,"sigma landau Zjet 2p2f",val_sigmaL_2P2F)
name = "nlZjet_2p2f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
nlZjet_2p2f = ROOT.RooRealVar(name,"norm landau Zjet 2p2f",val_normL_2P2F)
print "mean 2p2f 2mu2e: ",mlZjet_2p2f.getVal()
print "sigma 2p2f 2mu2e: ",slZjet_2p2f.getVal()
print "norm 2p2f 2mu2e: ",nlZjet_2p2f.getVal()
bkg_zjets_2p2f = ROOT.RooLandau("bkg_zjetsTmp_2p2f","bkg_zjetsTmp_2p2f",CMS_zz4l_mass,mlZjet_2p2f,slZjet_2p2f)
name = "mlZjet_2p2f_2_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
mlZjet_2p2f_2 = ROOT.RooRealVar(name,"mean landau Zjet 2p2f 2e2mu",val_meanL_2P2F_2)
name = "slZjet_2p2f_2_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
slZjet_2p2f_2 = ROOT.RooRealVar(name,"sigma landau Zjet 2p2f 2e2mu",val_sigmaL_2P2F_2)
name = "nlZjet_2p2f_2_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
nlZjet_2p2f_2 = ROOT.RooRealVar(name,"norm landau Zjet 2p2f 2e2mu",val_normL_2P2F_2)
print "mean 2p2f 2e2mu: ",mlZjet_2p2f_2.getVal()
print "sigma 2p2f 2e2mu: ",slZjet_2p2f_2.getVal()
print "norm 2p2f 2e2mu: ",nlZjet_2p2f_2.getVal()
bkg_zjets_2p2f_2 = ROOT.RooLandau("bkg_zjetsTmp_2p2f_2","bkg_zjetsTmp_2p2f_2",CMS_zz4l_mass,mlZjet_2p2f_2,slZjet_2p2f_2)
name = "mlZjet_3p1f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
mlZjet_3p1f = ROOT.RooRealVar(name,"mean landau Zjet 3p1f",val_meanL_3P1F)
name = "slZjet_3p1f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
slZjet_3p1f = ROOT.RooRealVar(name,"sigma landau Zjet 3p1f",val_sigmaL_3P1F)
name = "nlZjet_3p1f_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
nlZjet_3p1f = ROOT.RooRealVar(name,"norm landau Zjet 3p1f",val_normL_3P1F)
print "mean 3p1f 2mu2e: ",mlZjet_3p1f.getVal()
print "sigma 3p1f 2mu2e: ",slZjet_3p1f.getVal()
print "norm 3p1f 2mu2e: ",nlZjet_3p1f.getVal()
bkg_zjets_3p1f = ROOT.RooLandau("bkg_zjetsTmp_3p1f","bkg_zjetsTmp_3p1f",CMS_zz4l_mass,mlZjet_3p1f,slZjet_3p1f)
bkg_zjets = ROOT.RooAddPdf("bkg_zjetsTmp","bkg_zjetsTmp",ROOT.RooArgList(bkg_zjets_2p2f,bkg_zjets_3p1f,bkg_zjets_2p2f_2),ROOT.RooArgList(nlZjet_2p2f,nlZjet_3p1f,nlZjet_2p2f_2))
## ------------------ 2D BACKGROUND SHAPES FOR PROPERTIES ------------------- ##
print '2D backgorund shapes for Properties'
templateBkgName = "{0}_{2:.0f}TeV/{1}_{0}_bkg.root".format(self.templateDir,self.appendName, self.sqrts)#templates3D_AdapSmoothMirror_Background_
#templateBkgName = "{0}/{1}_Interf_templates_bkg_dc_3D_5bin.root".format(self.templateDir, self.appendName )
bkgTempFile = ROOT.TFile(templateBkgName)
#qqZZTemplate = bkgTempFile.Get("template3D_qqZZAdapSmoothMirror{0}".format(self.getVariable("_{0}".format(self.appendName),"",self.channel != self.ID_2e2mu)))
#qqZZTemplate = bkgTempFile.Get("template3D_qqZZFixed10x10x5Mirror_{0}".format(self.appendName))#template3D_qqZZAdapSmoothMirror_
#qqZZTemplate = bkgTempFile.Get("T_1")
qqZZTemplate = bkgTempFile.Get("template_qqZZ")
TemplateName = "qqZZTempDataHist_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
qqZZTempDataHist = ROOT.RooDataHist(TemplateName,TemplateName,ROOT.RooArgList(D1,D2,D3),qqZZTemplate)
PdfName = "qqZZ_TemplatePdf_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
qqZZTemplatePdf = ROOT.RooHistPdf(PdfName,PdfName,ROOT.RooArgSet(D1,D2,D3),qqZZTempDataHist)
#ggZZTemplate = bkgTempFile.Get("template3D_ggZZAdapSmoothMirror{0}".format(self.getVariable("_{0}".format(self.appendName),"",self.channel != self.ID_2e2mu)))
#ggZZTemplate = bkgTempFile.Get("template3D_ggZZFixed10x10x5Mirror_{0}".format(self.appendName))
#ggZZTemplate = bkgTempFile.Get("T_2")
ggZZTemplate = bkgTempFile.Get("template_ggZZ")
TemplateName = "ggZZTempDataHist_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ggZZTempDataHist = ROOT.RooDataHist(TemplateName,TemplateName,ROOT.RooArgList(D1,D2,D3),ggZZTemplate)
PdfName = "ggZZ_TemplatePdf_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ggZZTemplatePdf = ROOT.RooHistPdf(PdfName,PdfName,ROOT.RooArgSet(D1,D2,D3),ggZZTempDataHist)
#ZjetsTemplate = bkgTempFile.Get("template3D_ZXAdapSmoothMirror{0}".format(self.getVariable("_{0}".format(self.appendName),"",self.channel != self.ID_2e2mu)))
#ZjetsTemplate = bkgTempFile.Get("template3D_ZXFixed10x10x5Mirror_{0}".format(self.appendName))
#ZjetsTemplate = bkgTempFile.Get("T_3")
ZjetsTemplate = bkgTempFile.Get("template_ZX")
TemplateName = "ZjetsTempDataHist_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ZjetsTempDataHist = ROOT.RooDataHist(TemplateName,TemplateName,ROOT.RooArgList(D1,D2,D3),ZjetsTemplate)
PdfName = "Zjets_TemplatePdf_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ZjetsTemplatePdf = ROOT.RooHistPdf(PdfName,PdfName,ROOT.RooArgSet(D1,D2,D3),ZjetsTempDataHist)
ZjetsTemplateDown = bkgTempFile.Get("T_mirror")
TemplateName = "ZjetsTempDownDataHist_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ZjetsTempDataHistDown = ROOT.RooDataHist(TemplateName,TemplateName,ROOT.RooArgList(D1,D2,D3),ZjetsTemplateDown)
PdfName = "Zjets_TemplatDownePdf_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
ZjetsTemplatePdfDown = ROOT.RooHistPdf(PdfName,PdfName,ROOT.RooArgSet(D1,D2,D3),ZjetsTempDataHistDown)
funcList_zjets = ROOT.RooArgList()
morphBkgVarName = "CMS_zz4l_smd_zjets_bkg_{0:.0f}".format(self.channel)
alphaMorphBkg = ROOT.RooRealVar(morphBkgVarName,morphBkgVarName,0,-20,20)
morphVarListBkg = ROOT.RooArgList()
if(self.bkgMorph):
funcList_zjets.add(ZjetsTemplatePdf)
funcList_zjets.add(qqZZTemplatePdf)
funcList_zjets.add(ZjetsTemplatePdfDown)
alphaMorphBkg.setConstant(False)
morphVarListBkg.add(alphaMorphBkg)
else:
funcList_zjets.add(ZjetsTemplatePdf)
alphaMorphBkg.setConstant(True)
MorphName = "ZX_TemplateMorphPdf_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
#qqZZTemplateMorphPdf = ROOT.FastVerticalInterpHistPdf3D(MorphName,MorphName,D1,D2,D3,False,ROOT.RooArgList(qqZZTemplatePdf),ROOT.RooArgList(),1.0,1)
#ggZZTemplateMorphPdf = ROOT.FastVerticalInterpHistPdf3D(MorphName,MorphName,D1,D2,D3,False,ROOT.RooArgList(ggZZTemplatePdf),ROOT.RooArgList(),1.0,1)
ZjetsTemplateMorphPdf = ROOT.FastVerticalInterpHistPdf3D(MorphName,MorphName,D1,D2,D3,False,funcList_zjets,morphVarListBkg,1.0,1)
## ---------------- END 2D BACKGROUND SHAPES FOR PROPERTIES ----------------- ##
## ----------------------- PLOTS FOR SANITY CHECKS -------------------------- ##
canv_name = "czz_{0}_{1}".format(self.mH,self.appendName)
czz = ROOT.TCanvas( canv_name, canv_name, 750, 700 )
czz.cd()
zzframe_s = CMS_zz4l_mass.frame(45)
if self.bUseCBnoConvolution: super(RooDoubleCB,signalCB_ggH).plotOn(zzframe_s, ROOT.RooFit.LineStyle(1), ROOT.RooFit.LineColor(1) )
elif self.isHighMass : super(ROOT.RooFFTConvPdf,sig_ggH_HM).plotOn(zzframe_s, ROOT.RooFit.LineStyle(1), ROOT.RooFit.LineColor(1) )
else : super(ROOT.RooFFTConvPdf,sig_ggH).plotOn(zzframe_s, ROOT.RooFit.LineStyle(1), ROOT.RooFit.LineColor(1) )
super(ROOT.RooqqZZPdf_v2,bkg_qqzz).plotOn(zzframe_s, ROOT.RooFit.LineStyle(1), ROOT.RooFit.LineColor(4) )
super(ROOT.RooggZZPdf_v2,bkg_ggzz).plotOn(zzframe_s, ROOT.RooFit.LineStyle(1), ROOT.RooFit.LineColor(6) )
super(ROOT.RooAbsPdf,bkg_zjets).plotOn(zzframe_s, ROOT.RooFit.LineStyle(2), ROOT.RooFit.LineColor(6) )
zzframe_s.Draw()
figName = "{0}/figs/mzz_{1}_{2}.png".format(self.outputDir, self.mH, self.appendName)
czz.SaveAs(figName)
del czz
## ------------------- LUMI -------------------- ##
rrvLumi = ROOT.RooRealVar("cmshzz4l_lumi","cmshzz4l_lumi",self.lumi)
## ----------------------- SIGNAL RATES ----------------------- ##
CMS_zz4l_mass.setRange("shape",self.low_M,self.high_M)
fr_low_M = self.low_M
fr_high_M = self.high_M
if (self.mH >= 450):
fr_low_M = 100
fr_high_M = 1000
if (self.mH >= 750):
fr_low_M = 100
fr_high_M = 1400
CMS_zz4l_mass.setRange("fullrangesignal",fr_low_M,fr_high_M)
CMS_zz4l_mass.setRange("fullrange",100,1400)
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_a1".format(self.channel,self.sqrts)
rrva1 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_a1'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_a2".format(self.channel,self.sqrts)
rrva2 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_a2'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_a3".format(self.channel,self.sqrts)
rrva3 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_a3'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_a4".format(self.channel,self.sqrts)
rrva4 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_a4'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_b1".format(self.channel,self.sqrts)
rrvb1 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_b1'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_b2".format(self.channel,self.sqrts)
rrvb2 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_b2'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_b3".format(self.channel,self.sqrts)
rrvb3 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_b3'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_g1".format(self.channel,self.sqrts)
rrvg1 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_g1'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_g2".format(self.channel,self.sqrts)
rrvg2 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_g2'])
sigEffName = "hzz4lggHeff_{0:.0f}_{1:.0f}_g3".format(self.channel,self.sqrts)
rrvg3 = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_g3'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_a1".format(self.channel,self.sqrts)
rrva1_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHa1'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_a2".format(self.channel,self.sqrts)
rrva2_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHa2'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_a3".format(self.channel,self.sqrts)
rrva3_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHa3'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_a4".format(self.channel,self.sqrts)
rrva4_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHa4'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_b1".format(self.channel,self.sqrts)
rrvb1_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHb1'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_b2".format(self.channel,self.sqrts)
rrvb2_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHb2'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_b3".format(self.channel,self.sqrts)
rrvb3_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHb3'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_g1".format(self.channel,self.sqrts)
rrvg1_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHg1'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_g2".format(self.channel,self.sqrts)
rrvg2_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHg2'])
sigEffName = "hzz4lqqHeff_{0:.0f}_{1:.0f}_g3".format(self.channel,self.sqrts)
rrvg3_qqh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_qqHg3'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_a1".format(self.channel,self.sqrts)
rrva1_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHa1'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_a2".format(self.channel,self.sqrts)
rrva2_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHa2'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_a3".format(self.channel,self.sqrts)
rrva3_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHa3'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_a4".format(self.channel,self.sqrts)
rrva4_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHa4'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_b1".format(self.channel,self.sqrts)
rrvb1_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHb1'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_b2".format(self.channel,self.sqrts)
rrvb2_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHb2'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_b3".format(self.channel,self.sqrts)
rrvb3_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHb3'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_g1".format(self.channel,self.sqrts)
rrvg1_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHg1'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_g2".format(self.channel,self.sqrts)
rrvg2_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHg2'])
sigEffName = "hzz4lZHeff_{0:.0f}_{1:.0f}_g3".format(self.channel,self.sqrts)
rrvg3_zh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ZHg3'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_a1".format(self.channel,self.sqrts)
rrva1_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHa1'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_a2".format(self.channel,self.sqrts)
rrva2_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHa2'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_a3".format(self.channel,self.sqrts)
rrva3_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHa3'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_a4".format(self.channel,self.sqrts)
rrva4_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHa4'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_b1".format(self.channel,self.sqrts)
rrvb1_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHb1'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_b2".format(self.channel,self.sqrts)
rrvb2_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHb2'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_b3".format(self.channel,self.sqrts)
rrvb3_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHb3'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_g1".format(self.channel,self.sqrts)
rrvg1_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHg1'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_g2".format(self.channel,self.sqrts)
rrvg2_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHg2'])
sigEffName = "hzz4lWHeff_{0:.0f}_{1:.0f}_g3".format(self.channel,self.sqrts)
rrvg3_wh = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_WHg3'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_a1".format(self.channel,self.sqrts)
rrva1_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHa1'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_a2".format(self.channel,self.sqrts)
rrva2_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHa2'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_a3".format(self.channel,self.sqrts)
rrva3_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHa3'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_a4".format(self.channel,self.sqrts)
rrva4_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHa4'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_b1".format(self.channel,self.sqrts)
rrvb1_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHb1'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_b2".format(self.channel,self.sqrts)
rrvb2_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHb2'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_b3".format(self.channel,self.sqrts)
rrvb3_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHb3'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_g1".format(self.channel,self.sqrts)
rrvg1_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHg1'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_g2".format(self.channel,self.sqrts)
rrvg2_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHg2'])
sigEffName = "hzz4lttHeff_{0:.0f}_{1:.0f}_g3".format(self.channel,self.sqrts)
rrvg3_tth = ROOT.RooRealVar(sigEffName,sigEffName, theInputs['sigEff_ttHg3'])
if(DEBUG):
print "sigEff_a1 = ",theInputs['sigEff_a1']
print "sigEff_a2 = ",theInputs['sigEff_a2']
print "sigEff_a3 = ",theInputs['sigEff_a3']
print "sigEff_a4 = ",theInputs['sigEff_a4']
print "sigEff_b1 = ",theInputs['sigEff_b1']
print "sigEff_b2 = ",theInputs['sigEff_b2']
print "sigEff_b3 = ",theInputs['sigEff_b3']
print "sigEff_g1 = ",theInputs['sigEff_g1']
print "sigEff_g2 = ",theInputs['sigEff_g2']
print "sigEff_g3 = ",theInputs['sigEff_g3']
print "sigEff_qqHa1 = ",theInputs['sigEff_qqHa1']
print "sigEff_qqHa2 = ",theInputs['sigEff_qqHa2']
print "sigEff_qqHa3 = ",theInputs['sigEff_qqHa3']
print "sigEff_qqHa4 = ",theInputs['sigEff_qqHa4']
print "sigEff_qqHb1 = ",theInputs['sigEff_qqHb1']
print "sigEff_qqHb2 = ",theInputs['sigEff_qqHb2']
print "sigEff_qqHb3 = ",theInputs['sigEff_qqHb3']
print "sigEff_qqHg1 = ",theInputs['sigEff_qqHg1']
print "sigEff_qqHg2 = ",theInputs['sigEff_qqHg2']
print "sigEff_qqHg3 = ",theInputs['sigEff_qqHg3']
print "sigEff_ZHa1 = ",theInputs['sigEff_ZHa1']
print "sigEff_ZHa2 = ",theInputs['sigEff_ZHa2']
print "sigEff_ZHa3 = ",theInputs['sigEff_ZHa3']
print "sigEff_ZHa4 = ",theInputs['sigEff_ZHa4']
print "sigEff_ZHb1 = ",theInputs['sigEff_ZHb1']
print "sigEff_ZHb2 = ",theInputs['sigEff_ZHb2']
print "sigEff_ZHb3 = ",theInputs['sigEff_ZHb3']
print "sigEff_ZHg1 = ",theInputs['sigEff_ZHg1']
print "sigEff_ZHg2 = ",theInputs['sigEff_ZHg2']
print "sigEff_ZHg3 = ",theInputs['sigEff_ZHg3']
print "sigEff_WHa1 = ",theInputs['sigEff_WHa1']
print "sigEff_WHa2 = ",theInputs['sigEff_WHa2']
print "sigEff_WHa3 = ",theInputs['sigEff_WHa3']
print "sigEff_WHa4 = ",theInputs['sigEff_WHa4']
print "sigEff_WHb1 = ",theInputs['sigEff_WHb1']
print "sigEff_WHb2 = ",theInputs['sigEff_WHb2']
print "sigEff_WHb3 = ",theInputs['sigEff_WHb3']
print "sigEff_WHg1 = ",theInputs['sigEff_WHg1']
print "sigEff_WHg2 = ",theInputs['sigEff_WHg2']
print "sigEff_WHg3 = ",theInputs['sigEff_WHg3']
print "sigEff_ttHa1 = ",theInputs['sigEff_ttHa1']
print "sigEff_ttHa2 = ",theInputs['sigEff_ttHa2']
print "sigEff_ttHa3 = ",theInputs['sigEff_ttHa3']
print "sigEff_ttHa4 = ",theInputs['sigEff_ttHa4']
print "sigEff_ttHb1 = ",theInputs['sigEff_ttHb1']
print "sigEff_ttHb2 = ",theInputs['sigEff_ttHb2']
print "sigEff_ttHb3 = ",theInputs['sigEff_ttHb3']
print "sigEff_ttHg1 = ",theInputs['sigEff_ttHg1']
print "sigEff_ttHg2 = ",theInputs['sigEff_ttHg2']
print "sigEff_ttHg3 = ",theInputs['sigEff_ttHg3']
sigEffName_ggH = "hzz4lggHeff_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
sigEffName_qqH = "hzz4lqqHeff_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
sigEffName_WH = "hzz4lWHeff_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
sigEffName_ZH = "hzz4lZHeff_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
sigEffName_ttH = "hzz4lttHeff_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
listEff = ROOT.RooArgList(rrva1,rrva2,rrva3,rrva4,rrvb1,rrvb2,rrvb3,self.MH)
listEff.add(rrvg1)
listEff.add(rrvg2)
listEff.add(rrvg3)
listEff_qqh = ROOT.RooArgList(rrva1_qqh,rrva2_qqh,rrva3_qqh,rrva4_qqh,rrvb1_qqh,rrvb2_qqh,rrvb3_qqh,self.MH)
listEff_qqh.add(rrvg1_qqh)
listEff_qqh.add(rrvg2_qqh)
listEff_qqh.add(rrvg3_qqh)
listEff_wh = ROOT.RooArgList(rrva1_wh,rrva2_wh,rrva3_wh,rrva4_wh,rrvb1_wh,rrvb2_wh,rrvb3_wh,self.MH)
listEff_wh.add(rrvg1_wh)
listEff_wh.add(rrvg2_wh)
listEff_wh.add(rrvg3_wh)
listEff_zh = ROOT.RooArgList(rrva1_zh,rrva2_zh,rrva3_zh,rrva4_zh,rrvb1_zh,rrvb2_zh,rrvb3_zh,self.MH)
listEff_zh.add(rrvg1_zh)
listEff_zh.add(rrvg2_zh)
listEff_zh.add(rrvg3_zh)
listEff_tth = ROOT.RooArgList(rrva1_tth,rrva2_tth,rrva3_tth,rrva4_tth,rrvb1_tth,rrvb2_tth,rrvb3_tth,self.MH)
listEff_tth.add(rrvg1_tth)
listEff_tth.add(rrvg2_tth)
listEff_tth.add(rrvg3_tth)
rfvSigEff_ggH = ROOT.RooFormulaVar(sigEffName_ggH,"(@0+@1*TMath::Erf((@7-@2)/@3))*(@4+@5*@7+@6*@7*@7)+@8*TMath::Gaus(@7,@9,@10)",listEff) #ROOT.RooArgList(rrva1,rrva2,rrva3,rrva4,rrvb1,rrvb2,rrvb3,self.MH,rrvg1,rrvg2,rrvg3))
rfvSigEff_qqH = ROOT.RooFormulaVar(sigEffName_qqH,"(@0+@1*TMath::Erf((@7-@2)/@3))*(@4+@5*@7+@6*@7*@7)+@8*TMath::Gaus(@7,@9,@10)",listEff_qqh)
rfvSigEff_ZH = ROOT.RooFormulaVar(sigEffName_ZH,"(@0+@1*TMath::Erf((@7-@2)/@3))*(@4+@5*@7+@6*@7*@7)+@8*TMath::Gaus(@7,@9,@10)",listEff_zh)
rfvSigEff_WH = ROOT.RooFormulaVar(sigEffName_WH,"(@0+@1*TMath::Erf((@7-@2)/@3))*(@4+@5*@7+@6*@7*@7)+@8*TMath::Gaus(@7,@9,@10)",listEff_wh)
rfvSigEff_ttH = ROOT.RooFormulaVar(sigEffName_ttH,"(@0+@1*TMath::Erf((@7-@2)/@3))*(@4+@5*@7+@6*@7*@7)+@8*TMath::Gaus(@7,@9,@10)",listEff_tth)
#from TF1 *polyFunc= new TF1("polyFunc","([0]+[1]*TMath::Erf( (x-[2])/[3] ))*([4]+[5]*x+[6]*x*x)+[7]*TMath::Gaus(x,[8],[9])", 110., xMax);
## following printout is needed , dont remove it
print " @@@@@@@@ ggHeff ",rfvSigEff_ggH.getVal()
print " @@@@@@@@ qqHeff ",rfvSigEff_qqH.getVal()
print " @@@@@@@@ ZHeff ",rfvSigEff_ZH.getVal()
print " @@@@@@@@ WHeff ",rfvSigEff_WH.getVal()
print " @@@@@@@@ ttHeff ",rfvSigEff_ttH.getVal()
CS_ggH = myCSW.HiggsCS(1,self.mH,self.sqrts)
CS_VBF = myCSW.HiggsCS(2,self.mH,self.sqrts)
CS_WH = myCSW.HiggsCS(3,self.mH,self.sqrts)
CS_ZH = myCSW.HiggsCS(4,self.mH,self.sqrts)
CS_ttH = myCSW.HiggsCS(5,self.mH,self.sqrts)
BRH2e2mu = myCSW.HiggsBR(13,self.mH)
BRH4mu = myCSW.HiggsBR(12,self.mH)
BRH4e = myCSW.HiggsBR(12,self.mH)
BR = 0.0
if( self.channel == self.ID_4mu ): BR = BRH4mu
if( self.channel == self.ID_4e ): BR = BRH4e
if( self.channel == self.ID_2e2mu ): BR = BRH2e2mu
#HZZ Branching ratio for ZH,WH,ttH samples
BRZZ = myCSW.HiggsBR(11,self.mH)
sigEfficiency_ggH = rfvSigEff_ggH.getVal()
sigEfficiency_qqH = rfvSigEff_qqH.getVal()
sigEfficiency_ZH = rfvSigEff_ZH.getVal()
sigEfficiency_WH = rfvSigEff_WH.getVal()
sigEfficiency_ttH = rfvSigEff_ttH.getVal()
if(DEBUG):
print "CS_ggH: ",CS_ggH,", CS_VBF: ",CS_VBF,", CS_WH: ",CS_WH,", CS_ZH: ",CS_ZH
print ", CS_ttH: ",CS_ttH,", BRH2e2mu: ",BRH2e2mu,", BRH4mu: ",BRH4mu,", BRH4e: ",BRH4e,", BRZZ: ",BRZZ
## SIG YIELDS
sigRate_ggH = CS_ggH*BR*sigEfficiency_ggH*1000.*self.lumi
#sigRate_VBF = CS_VBF*BR*sigEfficiency_qqH*1000.*self.lumi
qqh_eff = theInputs['qqH_eff']
sigRate_VBF = CS_VBF*BR*qqh_eff*1000.*self.lumi
sigRate_WH = CS_WH*BRZZ*sigEfficiency_WH*1000.*self.lumi
sigRate_ZH = CS_ZH*BRZZ*sigEfficiency_ZH*1000.*self.lumi
sigRate_ttH = CS_ttH*BRZZ*sigEfficiency_ttH*1000.*self.lumi
rfvSMD_Ratio_ggH = ROOT.RooFormulaVar()
rfvSMD_Ratio_qqH = ROOT.RooFormulaVar()
rfvSMD_Ratio_WH = ROOT.RooFormulaVar()
rfvSMD_Ratio_ZH = ROOT.RooFormulaVar()
rfvSMD_Ratio_ttH = ROOT.RooFormulaVar()
tag_Ratio_Name = "hzz4l_SMD_ratio_ggH_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_ggH = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDsigCut)
tag_Ratio_Name = "hzz4l_SMD_ratio_qqH_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_qqH = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDsigCut)
tag_Ratio_Name = "hzz4l_SMD_ratio_WH_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_WH = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDsigCut)
tag_Ratio_Name = "hzz4l_SMD_ratio_ZH_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_ZH = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDsigCut)
tag_Ratio_Name = "hzz4l_SMD_ratio_ttH_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_ttH = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDsigCut)
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_ggH.getVal()
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_qqH.getVal()
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_WH.getVal()
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_ZH.getVal()
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_ttH.getVal()
sigRate_ggH *= rfvSMD_Ratio_ggH.getVal()
sigRate_VBF *= rfvSMD_Ratio_qqH.getVal()
sigRate_WH *= rfvSMD_Ratio_WH.getVal()
sigRate_ZH *= rfvSMD_Ratio_ZH.getVal()
sigRate_ttH *= rfvSMD_Ratio_ttH.getVal()
tmpNormSigNoConv = signalCB_ggH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrangesignal") ).getVal()
tmpNormSigConv = sig_ggH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrangesignal") ).getVal()
tmpNormSigHM = sig_ggH_HM.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrangesignal") ).getVal()
normalizationSignal = 0.0
if self.isHighMass : normalizationSignal = tmpNormSigHM
else : normalizationSignal = self.getVariable(tmpNormSigNoConv,tmpNormSigConv,self.bUseCBnoConvolution)
print "#################### ",signalCB_ggH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrangesignal") ).getVal()
print "#################### ",signalCB_ggH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
print "#################### ",sig_ggH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrangesignal") ).getVal()
print "#################### norm Signal",normalizationSignal
sclFactorSig_ggH = sigRate_ggH/normalizationSignal
sclFactorSig_VBF = sigRate_VBF/normalizationSignal
sclFactorSig_WH = sigRate_WH/normalizationSignal
sclFactorSig_ZH = sigRate_ZH/normalizationSignal
sclFactorSig_ttH = sigRate_ttH/normalizationSignal
integral_ggH = 0.0
integral_VBF = 0.0
integral_WH = 0.0
integral_ZH = 0.0
integral_ttH = 0.0
if self.isHighMass : integral_ggH = sig_ggH_HM.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
else : integral_ggH = self.getVariable(signalCB_ggH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),sig_ggH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),self.bUseCBnoConvolution)
if self.isHighMass : integral_VBF = sig_VBF_HM.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
else : integral_VBF = self.getVariable(signalCB_VBF.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),sig_VBF.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),self.bUseCBnoConvolution)
if self.isHighMass : integral_WH = sig_WH_HM.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
else : integral_WH = self.getVariable(signalCB_WH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),sig_WH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),self.bUseCBnoConvolution)
if self.isHighMass : integral_ZH = sig_ZH_HM.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
else : integral_ZH = self.getVariable(signalCB_ZH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),sig_ZH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),self.bUseCBnoConvolution)
if self.isHighMass : integral_ttH = sig_ttH_HM.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
else : integral_ttH = self.getVariable(signalCB_ttH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),sig_ttH.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal(),self.bUseCBnoConvolution)
sigRate_ggH_Shape = sclFactorSig_ggH*integral_ggH
sigRate_VBF_Shape = sclFactorSig_VBF*integral_VBF
sigRate_WH_Shape = sclFactorSig_WH*integral_WH
sigRate_ZH_Shape = sclFactorSig_ZH*integral_ZH
sigRate_ttH_Shape = sclFactorSig_ttH*integral_ttH
normSigName = "cmshzz4l_normalizationSignal_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rrvNormSig = ROOT.RooRealVar()
if self.isHighMass :
rrvNormSig = ROOT.RooRealVar(normSigName,normSigName, sig_ggH_HM.createIntegral(ROOT.RooArgSet(CMS_zz4l_mass)).getVal())
else :
rrvNormSig = ROOT.RooRealVar(normSigName,normSigName, self.getVariable(signalCB_ggH.createIntegral(ROOT.RooArgSet(CMS_zz4l_mass)).getVal(),sig_ggH.createIntegral(ROOT.RooArgSet(CMS_zz4l_mass)).getVal(),self.bUseCBnoConvolution))
rrvNormSig.setConstant(True)
print "!!!%%%*** ",rrvNormSig.getVal()
print "!!!%%%*** ",integral_ggH
#rfvSigRate_ggH = ROOT.RooFormulaVar("ggH_norm","@0*@1*1000*{0}*{2}/{1}".format(self.lumi,rrvNormSig.getVal(),self.getVariable(signalCB_ggH.createIntegral(RooArgSet(CMS_zz4l_mass),ROOT.RooFit.Range("shape")).getVal(),sig_ggH.createIntegral(RooArgSet(CMS_zz4l_mass),ROOT.RooFit.Range("shape")).getVal(),self.bUseCBnoConvolution)),ROOT.RooArgList(rfvSigEff_ggH, rhfXsBrFuncV_1))
rfvSigRate_ggH = ROOT.RooFormulaVar("ggH_norm","@0*@1*1000*{0}*{2}/{1}".format(self.lumi,rrvNormSig.getVal(),integral_ggH),ROOT.RooArgList(rfvSigEff_ggH, rhfXsBrFuncV_1))
print "Compare integrals: integral_ggH=",integral_ggH," ; calculated=",self.getVariable(signalCB_ggH.createIntegral(RooArgSet(CMS_zz4l_mass),ROOT.RooFit.Range("shape")).getVal(),sig_ggH.createIntegral(RooArgSet(CMS_zz4l_mass),ROOT.RooFit.Range("shape")).getVal(),self.bUseCBnoConvolution)
rfvSigRate_VBF = ROOT.RooFormulaVar("qqH_norm","@0*@1*1000*{0}*{2}/{1}".format(self.lumi,rrvNormSig.getVal(),integral_VBF),ROOT.RooArgList(rfvSigEff_qqH, rhfXsBrFuncV_2))
rfvSigRate_WH = ROOT.RooFormulaVar("WH_norm","@0*@1*1000*{0}*{2}/{1}".format(self.lumi,rrvNormSig.getVal(),integral_WH),ROOT.RooArgList(rfvSigEff_WH, rhfXsBrFuncV_3))
rfvSigRate_ZH = ROOT.RooFormulaVar("ZH_norm","@0*@1*1000*{0}*{2}/{1}".format(self.lumi,rrvNormSig.getVal(),integral_ZH),ROOT.RooArgList(rfvSigEff_ZH, rhfXsBrFuncV_4))
rfvSigRate_ttH = ROOT.RooFormulaVar("ttH_norm","@0*@1*1000*{0}*{2}/{1}".format(self.lumi,rrvNormSig.getVal(),integral_ttH),ROOT.RooArgList(rfvSigEff_ttH, rhfXsBrFuncV_5))
print signalCB_ggH.createIntegral(ROOT.RooArgSet(CMS_zz4l_mass)).getVal()," ",sig_ggH.createIntegral(ROOT.RooArgSet(CMS_zz4l_mass)).getVal()
print signalCB_ggH.createIntegral(ROOT.RooArgSet(CMS_zz4l_mass),ROOT.RooFit.Range("shape")).getVal()," ",sig_ggH.createIntegral(ROOT.RooArgSet(CMS_zz4l_mass),ROOT.RooFit.Range("shape")).getVal()
print " @@@@@@@ norm sig = ",rrvNormSig.getVal()
print " @@@@@@@ rfvSigRate_ggH = ",rfvSigRate_ggH.getVal()
print " sigRate_ggH_Shape=",sigRate_ggH_Shape
print " @@@@@@@ rfvSigRate_VBF = ",rfvSigRate_VBF.getVal()
print " sigRate_VBF_Shape=",sigRate_VBF_Shape
print " @@@@@@@ rfvSigRate_WH = ",rfvSigRate_WH.getVal()
print " sigRate_WH_Shape=",sigRate_WH_Shape
print " @@@@@@@ rfvSigRate_ZH = ",rfvSigRate_ZH.getVal()
print " sigRate_ZH_Shape=",sigRate_ZH_Shape
print " @@@@@@@ rfvSigRate_ttH = ",rfvSigRate_ttH.getVal()
print " sigRate_ttH_Shape=",sigRate_ttH_Shape
print "Sum of sigRate_XYZ_Shape=",sigRate_ggH_Shape+sigRate_VBF_Shape+sigRate_WH_Shape+sigRate_ZH_Shape+sigRate_ttH_Shape
## SET RATES TO 1
## DC RATES WILL BE MULTIPLIED
## BY RATES IMPORTED TO WS
#sigRate_ggH_Shape = 1
#sigRate_VBF_Shape = 1
#sigRate_WH_Shape = 1
#sigRate_ZH_Shape = 1
#sigRate_ttH_Shape = 1
#sigRate_ggH_Shape += sigRate_VBF_Shape + sigRate_WH_Shape + sigRate_ZH_Shape + sigRate_ttH_Shape
#total_ratio = 1+ (sigRate_VBF_Shape+sigRate_WH_Shape+sigRate_ZH_Shape+sigRate_ttH_Shape)/sigRate_ggH_Shape
sigRate_ggH_Shape = theInputs['ggH_rate']
print " sigRate_ggH_Shape=",sigRate_ggH_Shape
#print "Sum of sigRate_XYZ_Shape=", sigRate_ggH_Shape*total_ratio
#sigRate_ggH_Shape *= total_ratio
sigRate_ggH_Shape += sigRate_VBF_Shape+sigRate_WH_Shape+sigRate_ZH_Shape+sigRate_ttH_Shape
## ----------------------- BACKGROUND RATES ----------------------- ##
## rates per lumi for scaling
bkgRate_qqzz = theInputs['qqZZ_rate']/theInputs['qqZZ_lumi']
bkgRate_ggzz = theInputs['ggZZ_rate']/theInputs['qqZZ_lumi']
bkgRate_zjets = theInputs['zjets_rate']/theInputs['zjets_lumi']
## Get Normalizations
normalizationBackground_qqzz = bkg_qqzz.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrange") ).getVal()
normalizationBackground_ggzz = bkg_ggzz.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrange") ).getVal()
normalizationBackground_zjets = bkg_zjets.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("fullrange") ).getVal()
print "channel: "+self.appendName
print "fullrange zjets: ",normalizationBackground_zjets
sclFactorBkg_qqzz = self.lumi*bkgRate_qqzz/normalizationBackground_qqzz
sclFactorBkg_ggzz = self.lumi*bkgRate_ggzz/normalizationBackground_ggzz
sclFactorBkg_zjets = self.lumi*bkgRate_zjets/normalizationBackground_zjets
bkgRate_qqzz_Shape = sclFactorBkg_qqzz * bkg_qqzz.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
bkgRate_ggzz_Shape = sclFactorBkg_ggzz * bkg_ggzz.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
bkgRate_zjets_Shape = sclFactorBkg_zjets * bkg_zjets.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("shape") ).getVal()
rfvSMD_Ratio_qqZZ = ROOT.RooFormulaVar()
rfvSMD_Ratio_ggZZ = ROOT.RooFormulaVar()
rfvSMD_Ratio_Zjets = ROOT.RooFormulaVar()
tag_Ratio_Name = "hzz4l_SMD_ratio_qqZZ_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_qqZZ = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDbkgCut)
tag_Ratio_Name = "hzz4l_SMD_ratio_ggZZ_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_ggZZ = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDbkgCut)
tag_Ratio_Name = "hzz4l_SMD_ratio_Zjets_{0:.0f}_{1:.0f}".format(self.channel,self.sqrts)
rfvSMD_Ratio_Zjets = ROOT.RooRealVar(tag_Ratio_Name,tag_Ratio_Name,self.SMDbkgCut)
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_qqZZ.getVal()
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_ggZZ.getVal()
print "@@@@@@@@@@@@@@@@@@@@@@ ", rfvSMD_Ratio_Zjets.getVal()
bkgRate_qqzz_Shape *= rfvSMD_Ratio_qqZZ.getVal()
bkgRate_ggzz_Shape *= rfvSMD_Ratio_ggZZ.getVal()
bkgRate_zjets_Shape *= rfvSMD_Ratio_Zjets.getVal()
if(DEBUG):
print "Shape signal rate: ",sigRate_ggH_Shape,", background rate: ",bkgRate_qqzz_Shape,", ",bkgRate_zjets_Shape," in ",low_M," - ",high_M
CMS_zz4l_mass.setRange("lowmassregion",100.,160.)
bkgRate_qqzz_lowmassregion = sclFactorBkg_qqzz * bkg_qqzz.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("lowmassregion") ).getVal()
bkgRate_ggzz_lowmassregion = sclFactorBkg_ggzz * bkg_ggzz.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("lowmassregion") ).getVal()
bkgRate_zjets_lowmassregion = sclFactorBkg_zjets * bkg_zjets.createIntegral( ROOT.RooArgSet(CMS_zz4l_mass), ROOT.RooFit.Range("lowmassregion") ).getVal()
lowmassyield = bkgRate_qqzz_lowmassregion + bkgRate_ggzz_lowmassregion + bkgRate_zjets_lowmassregion
print "low mass yield: ",lowmassyield
## --------------------------- DATASET --------------------------- ##
dataFileDir = "CMSdata"
dataTreeName = "data_obs"
dataFileName = "{0}/hzz{1}_{2}_{3}.root".format(dataFileDir,self.appendName,self.lumi,self.templateDir)
if (DEBUG): print dataFileName," ",dataTreeName
data_obs_file = ROOT.TFile(dataFileName)
print data_obs_file.Get(dataTreeName)
if not (data_obs_file.Get(dataTreeName)):
print "File, \"",dataFileName,"\", or tree, \"",dataTreeName,"\", not found"
print "Exiting..."
sys.exit()
data_obs_tree = data_obs_file.Get(dataTreeName)
data_obs = ROOT.RooDataSet()
datasetName = "data_obs_{0}".format(self.appendName)
data_obs = ROOT.RooDataSet(datasetName,datasetName,data_obs_tree,ROOT.RooArgSet(CMS_zz4l_mass,D1,D2,D3))
## --------------------------- WORKSPACE -------------------------- ##
endsInP5 = False
tmpMH = self.mH
if ( math.fabs(math.floor(tmpMH)-self.mH) > 0.001): endsInP5 = True
if (DEBUG): print "ENDS IN P5 ",endsInP5
name_Shape = ""
name_ShapeWS = ""
name_ShapeWS2 = ""
name_ShapeWSXSBR = ""
if (endsInP5): name_Shape = "{0}/HCG/{1:.1f}/hzz4l_{2}S_{3:.0f}TeV.txt".format(self.outputDir,self.mH,self.appendName,self.sqrts)
else: name_Shape = "{0}/HCG/{1:.0f}/hzz4l_{2}S_{3:.0f}TeV.txt".format(self.outputDir,self.mH,self.appendName,self.sqrts)
if (endsInP5): name_ShapeWS = "{0}/HCG/{1:.1f}/hzz4l_{2}S_{3:.0f}TeV.input.root".format(self.outputDir,self.mH,self.appendName,self.sqrts)
else: name_ShapeWS = "{0}/HCG/{1:.0f}/hzz4l_{2}S_{3:.0f}TeV.input.root".format(self.outputDir,self.mH,self.appendName,self.sqrts)
if (endsInP5): name_ShapeWSXSBR = "{0}/HCG_XSxBR/{1:.1f}/hzz4l_{2}S_{3:.0f}TeV.input.root".format(self.outputDir,self.mH,self.appendName,self.sqrts)
else: name_ShapeWSXSBR = "{0}/HCG_XSxBR/{1:.0f}/hzz4l_{2}S_{3:.0f}TeV.input.root".format(self.outputDir,self.mH,self.appendName,self.sqrts)
name_ShapeWS2 = "hzz4l_{0}S_{1:.0f}TeV.input.root".format(self.appendName,self.sqrts)
if(DEBUG): print name_Shape," ",name_ShapeWS2
w = ROOT.RooWorkspace("w","w")
w.importClassCode(RooqqZZPdf_v2.Class(),True)
w.importClassCode(RooggZZPdf_v2.Class(),True)
w.importClassCode(HZZ4L_RooSpinZeroPdf_phase.Class(),True)
w.importClassCode(RooFormulaVar.Class(),True)
if self.isHighMass :
w.importClassCode(RooRelBWHighMass.Class(),True)
getattr(w,'import')(data_obs,ROOT.RooFit.Rename("data_obs")) ### Should this be renamed?
getattr(w,'import')(r_fa3_norm) ### Should this be renamed?
#Sig_T_1.SetNameTitle("template_0Plus","template_0Plus")
#Sig_T_2.SetNameTitle("template_0Minus","template_0Plus")
#Sig_T_4.SetNameTitle("template_Int","template_Int")
#Sig_T_1.SetNameTitle("T_1","T_1")
#Sig_T_2.SetNameTitle("T_2","T_2")
#Sig_T_4.SetNameTitle("T_3","T_3")
#Sig_T_1_ResUp.SetNameTitle("T_1_ResUp","T_1_ResUp")
#Sig_T_2_ResUp.SetNameTitle("T_2_ResUp","T_2_ResUp")
#Sig_T_4_ResUp.SetNameTitle("T_3_ResUp","T_3_ResUp")
#getattr(w,'import')(Sig_T_1_ResUp_hist,0)
#getattr(w,'import')(Sig_T_2_ResUp_hist,0 )
#getattr(w,'import')(Sig_T_4_ResUp_hist,0 )
#Sig_T_1_ResDown.SetNameTitle("T_1_ResDown","T_1_ResDown")
#Sig_T_2_ResDown.SetNameTitle("T_2_ResDown","T_2_ResDown")
#Sig_T_4_ResDown.SetNameTitle("T_3_ResDown","T_3_ResDown")
#getattr(w,'import')(Sig_T_1_ResDown_hist,0)
#getattr(w,'import')(Sig_T_2_ResDown_hist,0 )
#getattr(w,'import')(Sig_T_4_ResDown_hist,0 )
ggHpdf.SetNameTitle("ggH","ggH")
getattr(w,'import')(ggHpdf, ROOT.RooFit.RecycleConflictNodes())
ggHpdf_syst1Up.SetNameTitle("ggH_Res{0}Up".format(self.appendName),"ggH_Res{0}Up".format(self.appendName))
getattr(w,'import')(ggHpdf_syst1Up, ROOT.RooFit.RecycleConflictNodes())
ggHpdf_syst1Down.SetNameTitle("ggH_Res{0}Down".format(self.appendName),"ggH_Res{0}Down".format(self.appendName))
getattr(w,'import')(ggHpdf_syst1Down, ROOT.RooFit.RecycleConflictNodes())
#ggHpdf_syst2Up.SetNameTitle("ggH_Scale{0}Up".format(self.appendName),"ggH_Scale{0}Up".format(self.appendName))
#getattr(w,'import')(ggHpdf_syst2Up, ROOT.RooFit.RecycleConflictNodes())
#ggHpdf_syst2Down.SetNameTitle("ggH_Scale{0}Down".format(self.appendName),"ggH_Scale{0}Down".format(self.appendName))
#getattr(w,'import')(ggHpdf_syst2Down, ROOT.RooFit.RecycleConflictNodes())
#getattr(w,'import')(Sig_T_1,"T_1")
#getattr(w,'import')(Sig_T_2,"T_2")
#getattr(w,'import')(Sig_T_4,"T_3")
#getattr(w,'import')(Sig_T_1,"T_1_{0}_{1}".format(self.appendName,self.sqrts))
#getattr(w,'import')(Sig_T_2,"T_2_{0}_{1}".format(self.appendName,self.sqrts))
#getattr(w,'import')(Sig_T_4,"T_3_{0}_{1}".format(self.appendName,self.sqrts))
qqZZTemplatePdf.SetNameTitle("bkg_qqzz","bkg_qqzz")
ggZZTemplatePdf.SetNameTitle("bkg_ggzz","bkg_ggzz")
#ZjetsTemplatePdf.SetNameTitle("bkg_zjets","bkg_zjets")
ZjetsTemplateMorphPdf.SetNameTitle("bkg_zjets","bkg_zjets")
getattr(w,'import')(qqZZTemplatePdf, ROOT.RooFit.RecycleConflictNodes())
getattr(w,'import')(ggZZTemplatePdf, ROOT.RooFit.RecycleConflictNodes())
getattr(w,'import')(ZjetsTemplateMorphPdf, ROOT.RooFit.RecycleConflictNodes())
#getattr(w,'import')(ZjetsTemplatePdf, ROOT.RooFit.RecycleConflictNodes())
#qqZZTemplateMorphPdf.SetNameTitle("bkg_qqzzMorph","bkg_qqzzMorph")
#getattr(w,'import')(qqZZTemplateMorphPdf, ROOT.RooFit.RecycleConflictNodes())
#testpdf.SetNameTitle("testpdf","testpdf")
#getattr(w,'import')(testpdf, ROOT.RooFit.RecycleConflictNodes())
w.writeToFile(name_ShapeWS)
w.writeToFile(name_ShapeWSXSBR)
## --------------------------- DATACARDS -------------------------- ##
systematics.setSystematics(bkgRate_qqzz_Shape, bkgRate_ggzz_Shape, bkgRate_zjets_Shape)
systematics_forXSxBR.setSystematics(bkgRate_qqzz_Shape, bkgRate_ggzz_Shape,bkgRate_zjets_Shape)
## If the channel is not declared in inputs, set rate = 0
if not self.ggH_chan: sigRate_ggH_Shape = 0
if not self.qqH_chan: sigRate_VBF_Shape = 0
if not self.WH_chan: sigRate_WH_Shape = 0
if not self.ZH_chan: sigRate_ZH_Shape = 0
if not self.ttH_chan: sigRate_ttH_Shape = 0
if not self.qqZZ_chan: bkgRate_qqzz_Shape = 0
if not self.ggZZ_chan: bkgRate_ggzz_Shape = 0
if not self.zjets_chan: bkgRate_zjets_Shape = 0
rates = {}
rates['ggH'] = sigRate_ggH_Shape
rates['qqH'] = sigRate_VBF_Shape
rates['WH'] = sigRate_WH_Shape
rates['ZH'] = sigRate_ZH_Shape
rates['ttH'] = sigRate_ttH_Shape
rates['qqZZ'] = bkgRate_qqzz_Shape
rates['ggZZ'] = bkgRate_ggzz_Shape
rates['zjets'] = bkgRate_zjets_Shape
rates['ttbar'] = 0
rates['zbb'] = 0
## Write Datacards
fo = open( name_Shape, "wb")
self.WriteDatacard(fo,theInputs, name_ShapeWS2, rates, data_obs.numEntries())
systematics.WriteSystematics(fo, theInputs)
systematics.WriteShapeSystematics(fo,theInputs)
fo.close()
## forXSxBR
if (endsInP5): name_Shape = "{0}/HCG_XSxBR/{2:.1f}/hzz4l_{1}S_{3:.0f}TeV.txt".format(self.outputDir,self.appendName,self.mH,self.sqrts)
else: name_Shape = "{0}/HCG_XSxBR/{2:.0f}/hzz4l_{1}S_{3:.0f}TeV.txt".format(self.outputDir,self.appendName,self.mH,self.sqrts)
fo = open( name_Shape, "wb" )
self.WriteDatacard(fo, theInputs,name_ShapeWS2, rates, data_obs.numEntries())
systematics_forXSxBR.WriteSystematics(fo, theInputs)
systematics_forXSxBR.WriteShapeSystematics(fo,theInputs)
fo.close()
def WriteDatacard(self,file,theInputs,nameWS,theRates,obsEvents):
numberSig = self.numberOfSigChan(theInputs)
numberBg = self.numberOfBgChan(theInputs)
file.write("imax 1\n")
file.write("jmax {0}\n".format(numberSig+numberBg-1))
file.write("kmax *\n")
file.write("------------\n")
file.write("shapes * * {0} w:$PROCESS w:$PROCESS_$SYSTEMATIC\n".format(nameWS))
file.write("------------\n")
file.write("bin a{0} \n".format(self.channel))
file.write("observation {0} \n".format(obsEvents))
file.write("------------\n")
file.write("## mass window [{0},{1}] \n".format(self.low_M,self.high_M))
file.write("bin ")
channelList=['ggH','qqH','WH','ZH','ttH','qqZZ','ggZZ','zjets','ttbar','zbb']
channelName=['ggH','qqH','WH','ZH','ttH','bkg_qqzz','bkg_ggzz','bkg_zjets','bkg_ttbar','bkg_zbb']
for chan in channelList:
if theInputs[chan]:
file.write("a{0} ".format(self.channel))
file.write("\n")
file.write("process ")
i=0
for chan in channelList:
#print 'checking if ',chan,' is in the list of to-do'
#print "{0} ".format(channelName[i])
if theInputs[chan]:
file.write("{0} ".format(channelName[i]))
#print 'writing in card index=',i,' chan=',chan
#print "{0} ".format(channelName[i])
i+=1
file.write("\n")
processLine = "process "
for x in range(-numberSig+1,1):
processLine += "{0} ".format(x)
for y in range(1,numberBg+1):
processLine += "{0} ".format(y)
file.write(processLine)
file.write("\n")
file.write("rate ")
for chan in channelList:
if theInputs[chan]:
file.write("{0:.4f} ".format(theRates[chan]))
file.write("\n")
file.write("------------\n")
def numberOfSigChan(self,inputs):
counter=0
if inputs['ggH']: counter+=1
if inputs['qqH']: counter+=1
if inputs['WH']: counter+=1
if inputs['ZH']: counter+=1
if inputs['ttH']: counter+=1
return counter
def numberOfBgChan(self,inputs):
counter=0
if inputs['qqZZ']: counter+=1
if inputs['ggZZ']: counter+=1
if inputs['zjets']: counter+=1
if inputs['ttbar']: counter+=1
if inputs['zbb']: counter+=1
return counter
|
[
"xiaomengphy@gmail.com"
] |
xiaomengphy@gmail.com
|
bdd539bb9bc7e64f304e429bb91e39fb93ee3dd1
|
1c7cb3154854a0d5f628c4285aa209affd8d2fc9
|
/chaos-ns-3/ns-3.27/src/nix-vector-routing/bindings/callbacks_list.py
|
0fba47bd06742fae23799b018f8d47fa7e15e009
|
[
"GPL-2.0-only",
"LicenseRef-scancode-free-unknown",
"MIT"
] |
permissive
|
ErikNatanael/royal-chaos
|
fac8e5891044490cd2e61b7367f284d99951d0d1
|
a8e763d3720cc3e158e98143cabce8106d8de2aa
|
refs/heads/master
| 2022-07-16T12:21:31.494237
| 2020-05-12T12:50:08
| 2020-05-12T12:50:08
| 263,333,134
| 0
| 0
|
MIT
| 2020-05-12T12:42:08
| 2020-05-12T12:42:08
| null |
UTF-8
|
Python
| false
| false
| 1,563
|
py
|
callback_classes = [
['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
[
"zhanglong3030@qq.com"
] |
zhanglong3030@qq.com
|
a9e4e3dd729477646d9f940cba33c1e17f67a29a
|
c1f11a43753dad1744e13008c1ba103826867421
|
/File_task.py
|
01e473778b872a3343a55d9c797548760b8f4471
|
[] |
no_license
|
99002561/My_Python
|
5aecd073e2dc7f2841e9306bd9289a11b448904c
|
d7f1ba5e970073a7aa44d0469998be57019af8fb
|
refs/heads/main
| 2023-03-28T00:07:14.509642
| 2021-03-26T11:14:49
| 2021-03-26T11:14:49
| 343,707,581
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,572
|
py
|
import shutil
import os
from pathlib import Path
def main():
txtBaseFilePath = [] # Stores base folder text files adrress
txtFile = [] # Stores base folder text file names
txtDestFilePath = [] # Stores dest folder text files adrress
Source_path = "D:\sai"
source = Path('D:\sai')
for root, dirs, files in os.walk(Source_path):
for file in files:
if file.endswith(".txt"): # Selecting the type of file
txtFile.append(file) # Storing all the text file names
pattren = "*txt"
for j in source.glob(pattren):
print(j)
txtBaseFilePath.append(j)
for i in range(0, len(txtFile)):
print(i, txtFile[i]) # Printing selected files with serial number
select = int(input("Select the number of files user want to copy "))
selectedfiles = [] # Stores file numbers which needs to be copied
for i in range(0, select):
selectedfiles.append(int(input("Enter file number "))) # Storing file serial numbers
for i in range(0, len(selectedfiles)):
txtDestFilePath.append(input("Enter the dest path name ")) # Providing destination path
if Path(txtDestFilePath[i]).is_dir():
print()
else:
Path(txtDestFilePath[i]).mkdir(parents=True, exist_ok=False)
for i in range(0, len(selectedfiles)):
shutil.copy(txtBaseFilePath[selectedfiles[i]], txtDestFilePath[i]) # copying process from source to dest
print("Copying is Done")
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
99002561.noreply@github.com
|
638f5ba55fba06ccfd360c4cff56e77791f1da9d
|
9cba1347ff1c2aa8ea57e5d5a232e32dc170cb81
|
/04_the_fifteen_puzzle.py
|
1dfb6d095909b0af969ccbeb0750f047d984a2cc
|
[] |
no_license
|
Alhxor/python-poc2
|
73f6a8b32399f58fcd587c5f71ba197088be14a8
|
f45ed3389030b5b665105878d3c17c1c667879cb
|
refs/heads/master
| 2020-05-26T21:08:33.835611
| 2019-05-24T07:28:01
| 2019-05-24T07:28:01
| 188,374,208
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,369
|
py
|
"""
Loyd's Fifteen puzzle - solver and visualizer
Note that solved configuration has the blank (zero) tile in upper left
Use the arrows key to swap this tile with its neighbors
"""
import poc_fifteen_gui
class Puzzle:
"""
Class representation for the Fifteen puzzle
"""
def __init__(self, puzzle_height, puzzle_width, initial_grid=None):
"""
Initialize puzzle with default height and width
Returns a Puzzle object
"""
self._height = puzzle_height
self._width = puzzle_width
self._grid = [[col + puzzle_width * row
for col in range(self._width)]
for row in range(self._height)]
if initial_grid != None:
for row in range(puzzle_height):
for col in range(puzzle_width):
self._grid[row][col] = initial_grid[row][col]
def __str__(self):
"""
Generate string representaion for puzzle
Returns a string
"""
ans = ""
for row in range(self._height):
ans += str(self._grid[row])
ans += "\n"
return ans
#####################################
# GUI methods
def get_height(self):
"""
Getter for puzzle height
Returns an integer
"""
return self._height
def get_width(self):
"""
Getter for puzzle width
Returns an integer
"""
return self._width
def get_number(self, row, col):
"""
Getter for the number at tile position pos
Returns an integer
"""
return self._grid[row][col]
def set_number(self, row, col, value):
"""
Setter for the number at tile position pos
"""
self._grid[row][col] = value
def clone(self):
"""
Make a copy of the puzzle to update during solving
Returns a Puzzle object
"""
new_puzzle = Puzzle(self._height, self._width, self._grid)
return new_puzzle
########################################################
# Core puzzle methods
def current_position(self, solved_row, solved_col):
"""
Locate the current position of the tile that will be at
position (solved_row, solved_col) when the puzzle is solved
Returns a tuple of two integers
"""
solved_value = (solved_col + self._width * solved_row)
for row in range(self._height):
for col in range(self._width):
if self._grid[row][col] == solved_value:
return (row, col)
assert False, "Value " + str(solved_value) + " not found"
def update_puzzle(self, move_string):
"""
Updates the puzzle state based on the provided move string
"""
zero_row, zero_col = self.current_position(0, 0)
for direction in move_string:
if direction == "l":
assert zero_col > 0, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]
self._grid[zero_row][zero_col - 1] = 0
zero_col -= 1
elif direction == "r":
assert zero_col < self._width - 1, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]
self._grid[zero_row][zero_col + 1] = 0
zero_col += 1
elif direction == "u":
assert zero_row > 0, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]
self._grid[zero_row - 1][zero_col] = 0
zero_row -= 1
elif direction == "d":
assert zero_row < self._height - 1, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]
self._grid[zero_row + 1][zero_col] = 0
zero_row += 1
else:
assert False, "invalid direction: " + direction
##################################################################
# Phase one methods
def move_zero(self, target_row, target_col):
"""
Generates a move string that moves zero tile
to specified position, going up/down first then right/left
"""
pos = self.current_position(0, 0)
move = ""
move_row = target_row - pos[0]
move_col = target_col - pos[1]
if move_row < 0: # moving up
move += 'u' * abs(move_row)
if move_col < 0: # moving left
move += 'l' * abs(move_col)
if move_row > 0: # moving down
move += 'd' * move_row
if move_col > 0: # moving right
move += 'r' * move_col
#self.update_puzzle(move)
return move
def move_to_zero(self, target_row, target_col):
"""
Generates a move string that moves a tile that should be
at target coordinates to the current position of zero tile
then moves zero tile to the left
"""
zero_pos = self.current_position(0, 0)
tile_pos = self.current_position(target_row, target_col)
#print "tile_pos:", tile_pos
move_zero = self.move_zero(tile_pos[0], tile_pos[1])
move = ""
if zero_pos[1] == tile_pos[1]: # up
move = 'lddru' * (zero_pos[0] - tile_pos[0] - 1) + 'ld' # moving down
elif zero_pos[0] == tile_pos[0]: # same row
if zero_pos[1] < tile_pos[1]: # right
move = 'ulldr' * (tile_pos[1] - zero_pos[1] - 1) + 'ulld' # moving left
elif zero_pos[1] > tile_pos[1]: # left
move = 'urrdl' * (zero_pos[1] - tile_pos[1] - 1) # moving right
else:
vert_first, vert_back = 'u', 'd'
if tile_pos[0] == 0:
vert_first, vert_back = 'd', 'u'
if zero_pos[1] < tile_pos[1]: # up right
move = (vert_first + 'll' + vert_back + 'r')\
* (tile_pos[1] - zero_pos[1] - 1)\
+ ('dlu' if tile_pos[0] == 0 else 'ullddru') # moving left
elif zero_pos[1] > tile_pos[1]: # up left
move = 'drrul' * (zero_pos[1] - tile_pos[1] - 1) + 'dru' # moving right
move += 'lddru' * (zero_pos[0] - tile_pos[0] - 1) + 'ld' # moving down
return move_zero + move
def _lower_row_invariant_nozero(self, target_row, target_col):
"""
Helper invariant method, checks conditions in lower_row_invariant
except zero position
"""
for col in range(target_col + 1, self._width):
if self._grid[target_row][col] != col + self._width * target_row:
return False
for row in range(target_row + 1, self._height):
for col in range(0, self._width):
if self._grid[row][col] != col + self._width * row:
return False
return True
def lower_row_invariant(self, target_row, target_col):
"""
Check whether the puzzle satisfies the specified invariant
at the given position in the bottom rows of the puzzle (target_row > 1)
Returns a boolean
"""
#print self._grid[target_row][target_col] == 0
#print self._lower_row_invariant_nozero(target_row, target_col)
return self._grid[target_row][target_col] == 0 and \
self._lower_row_invariant_nozero(target_row, target_col)
def solve_interior_tile(self, target_row, target_col):
"""
Place correct tile at target position
Updates puzzle and returns a move string
"""
#print "Calling lower_row_invariant(" + str(target_row) + ", " + str(target_col) + ")"
assert self.lower_row_invariant(target_row, target_col)
move = self.move_to_zero(target_row, target_col)
self.update_puzzle(move)
return move
def solve_col0_tile(self, target_row):
"""
Solve tile in column zero on specified row (> 1)
Updates puzzle and returns a move string
"""
assert self.lower_row_invariant(target_row, 0)
tile_pos = self.current_position(target_row, 0)
solution, prefix = "", ""
if tile_pos[1] == 0 and tile_pos[0] == target_row - 1: # tile is just one square up
solution = 'u' + 'r' * (self._width - 1)
else: # tile is somewhere else
prefix = 'ur'
self.update_puzzle(prefix)
solution = self.move_to_zero(target_row, 0)
solution += "ruldrdlurdluurddlu" + 'r' * (self._width - 1)
self.update_puzzle(solution)
return prefix + solution
#############################################################
# Phase two methods
def _upper_row_invariant_nozero(self, target_col):
"""
Helper invariant method, checks columns to the right
for upper (row < 2) invariants
"""
for row in range(0, 2):
for col in range(target_col + 1, self._width):
if self._grid[row][col] != col + self._width * row:
return False
return True
def row0_invariant(self, target_col):
"""
Check whether the puzzle satisfies the row zero invariant
at the given column (col > 1)
Returns a boolean
"""
return self._grid[0][target_col] == 0 and \
self._grid[1][target_col] == target_col + self._width and \
self._upper_row_invariant_nozero(target_col) and \
self._lower_row_invariant_nozero(1, self._width - 1)
def row1_invariant(self, target_col):
"""
Check whether the puzzle satisfies the row one invariant
at the given column (col > 1)
Returns a boolean
"""
return self._grid[1][target_col] == 0 and \
self._upper_row_invariant_nozero(target_col) and \
self._lower_row_invariant_nozero(1, self._width - 1)
def solve_row0_tile(self, target_col):
"""
Solve the tile in row zero at the specified column
Updates puzzle and returns a move string
"""
assert self.row0_invariant(target_col)
tile_pos = self.current_position(0, target_col)
solution, prefix = "", ""
if tile_pos[0] == 0 and tile_pos[1] == target_col - 1:
solution = 'ld'
else:
prefix = 'ld'
self.update_puzzle(prefix)
solution = self.move_to_zero(0, target_col)
# urdl urrd luld rruld
solution += "urdlurrdluldrruld"
self.update_puzzle(solution)
return prefix + solution
def solve_row1_tile(self, target_col):
"""
Solve the tile in row one at the specified column
Updates puzzle and returns a move string
"""
assert self.row1_invariant(target_col)
move = self.move_to_zero(1, target_col) + 'ur'
self.update_puzzle(move)
return move
###########################################################
# Phase 3 methods
def solve_2x2(self):
"""
Solve the upper left 2x2 part of the puzzle
Updates the puzzle and returns a move string
"""
solution = ""
if self._grid[1][0] == 1 + self._width: # left
solution = 'lu'
elif self._grid[0][1] == 1 + self._width: # up
solution = 'ul'
else:
solution = 'lurdlu'
self.update_puzzle(solution)
return solution
def _is_solved(self):
"""
Checks if a part of the puzzle is already solved
Returns first unsolved position in (row, col) format
"""
for row in range(self._height - 1, -1, -1):
for col in range(self._width - 1, -1, -1):
if self._grid[row][col] != col + row * self._width:
return row, col
return 0, 0
def solve_puzzle(self):
"""
Generate a solution string for a puzzle
Updates the puzzle and returns a move string
"""
#zero_pos = self.current_position(0, 0)
move_zero, solution = "", ""
starting_row, starting_col = self._is_solved()
#print starting_row, starting_col
if starting_row == 0 and starting_col == 0:
#print "already solved"
return ""
move_zero = self.move_zero(starting_row, starting_col)
self.update_puzzle(move_zero)
if starting_row > 1: # phase 1
#print "phase 1"
#print self
if starting_col < self._width - 1:
for col in range(starting_col, 0, -1):
solution += self.solve_interior_tile(starting_row, col)
solution += self.solve_col0_tile(starting_row)
starting_row -= 1
starting_col = self._width - 1
for row in range(starting_row, 1, -1):
for col in range(starting_col, 0, -1):
#print self
#print "Calling solve_interior_tile(" + str(row), ", " + str(col) + ")"
solution += self.solve_interior_tile(row, col)
#print "Calling solve_col0_tile(" + str(row) + ")"
#print self
solution += self.solve_col0_tile(row)
starting_row, starting_col = 1, self._width - 1
if starting_row < 2:
if starting_col > 1: # phase 2
#print "phase 2"
#print "starting_row, starting_col: ", starting_row, starting_col
#print self
for col in range(starting_col, 1, -1):
solution += self.solve_row1_tile(col)
solution += self.solve_row0_tile(col)
starting_col = 1
if starting_col < 2: # phase 3
#print "phase 3"
#print self
solution += self.solve_2x2()
return move_zero + solution
# Start interactive simulation
poc_fifteen_gui.FifteenGUI(Puzzle(5, 6))
def create_grid(width, height):
"""
Creates a 15-puzzle style grid
"""
return [[col + width * row for col in range(width)]
for row in range(height)]
def test_puzzle():
"""
Helper function for testing Puzzle objects
"""
p2x4 = Puzzle(2, 4, [[0, 3, 2, 7], [4, 5, 6, 1]])
print p2x4
print p2x4.solve_puzzle()
print p2x4
grid = [[3, 2], [1, 0]]
p2x2 = Puzzle(2, 2, grid)
p2x2.update_puzzle('lurd')
print " --- Testing 2x2 --- "
print p2x2
print p2x2.solve_puzzle()
print p2x2
print " --- Testing 3x3 --- "
p3x3 = Puzzle(3, 3)
p3x3.update_puzzle('rdrdlluurrddlulu')
print p3x3
print p3x3.solve_puzzle()
print p3x3
print " --- Testing 4x4 --- "
p4x4 = Puzzle(4, 4)
p4x4.update_puzzle('rdrdrdllluuurrrdddlululu')
print p4x4
print p4x4.solve_puzzle()
print p4x4
print " --- Testing 4x5 --- "
p4x5 = Puzzle(4, 5, [[15, 16, 0, 3, 4], [5, 6, 7, 8, 9], [10, 11, 12, 13, 14], [1, 2, 17, 18, 19]])
print p4x5
print p4x5.solve_puzzle()
print p4x5
#test_puzzle()
|
[
"hellk.pub@gmail.com"
] |
hellk.pub@gmail.com
|
32a1a54c0fe5c4dd686c12138a6fa988e738d0bd
|
e0831b4a9b356df4e396523ce8ba8be9a9dd5081
|
/addresses/apps.py
|
3c934e1a4046912603f10897ed9263b8711879b9
|
[
"Unlicense"
] |
permissive
|
Xenoyr24/django-project
|
f5835e0350d6234fb8a4fed22cb7bf158af6ef1a
|
0a525178324f822afc39f6989b7df09731672d9f
|
refs/heads/master
| 2021-09-11T07:38:00.554827
| 2020-03-26T15:33:13
| 2020-03-26T15:33:13
| 250,049,557
| 0
| 0
|
Unlicense
| 2020-03-25T19:55:27
| 2020-03-25T17:45:40
|
HTML
|
UTF-8
|
Python
| false
| false
| 92
|
py
|
from django.apps import AppConfig
class AddressesConfig(AppConfig):
name = 'addresses'
|
[
"kevishen.naidu@hotmail.com"
] |
kevishen.naidu@hotmail.com
|
42503fc4888d5bd1f8e8ba85d53d4f916bbb9850
|
23f5361dcce6db5561defc285134a115744649ca
|
/config.py
|
510b72328a6e36d5315420ab72253e27e2214899
|
[] |
no_license
|
stepsame/flask
|
70f5856e7b7564b48f8e33e916022a10c39db7cf
|
1e6e8b263096caeab0f7babf67f98ca330503077
|
refs/heads/master
| 2021-01-10T03:28:12.984115
| 2016-01-06T09:23:19
| 2016-01-06T09:23:19
| 46,393,867
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,975
|
py
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
SSL_DISABLE = True
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
SQLALCHEMY_RECORD_QUERIES = True
MAIL_SERVER = 'smtp.mail.yahoo.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
FLASKY_MAIL_SUBJECT_PREFIX = '[Flasky]'
FLASKY_MAIL_SENDER = 'Flasky Admin <stepsame@yahoo.com>'
FLASKY_ADMIN = os.environ.get('FLASKY_ADMIN') or 'stepsame@gmail.com'
FLASKY_POSTS_PER_PAGE = 20
FLASKY_FOLLOWERS_PER_PAGE = 50
FLASKY_COMMENTS_PER_PAGE = 30
FLASKY_SLOW_DB_QUERY_TIME = 0.5
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
@classmethod
def init_app(cls, app):
Config.init_app(app)
# email errors to the administrators
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if getattr(cls, 'MAIL_USERNAME', None) is not None:
credentials = (cls.MAIL_USERNAME, cls.MAIL_PASSWORD)
if getattr(cls, 'MAIL_USE_SSL', None):
secure = ()
mail_handler = SMTPHandler(
mailhost=(cls.MAIL_SERVER, cls.MAIL_PORT),
fromaddr=cls.FLASKY_MAIL_SENDER,
toaddrs=[cls.FLASKY_ADMIN],
subject=cls.FLASKY_MAIL_SUBJECT_PREFIX + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
class HerokuConfig(ProductionConfig):
SSL_DISABLE = bool(os.environ.get('SSL_DISABLE'))
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# log to stderr
import logging
from logging import StreamHandler
file_handler = StreamHandler()
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
# handle proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
class UnixConfig(ProductionConfig):
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# log to syslog
import logging
from logging.handlers import SysLogHandler
syslog_handler = SysLogHandler()
syslog_handler.setLevel(logging.WARNING)
app.logger.addHandler(syslog_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'unix': UnixConfig,
'default': DevelopmentConfig
}
|
[
"stepsame@gmail.com"
] |
stepsame@gmail.com
|
7edc05364e74d79daf9bbbc2819ff4fbdb5cc0f3
|
dfa2698e6b10650cfd3e1c696cdbb2e03c1351aa
|
/assignments/a2/src/q2/minimax.py
|
d59d4ced472cc2f28303166fdc6174b7928d3a40
|
[] |
no_license
|
zahin-mohammad/ece457a
|
4aebd3bcd05f0e176bfd70d582d9b871e71655b7
|
eea169a12d7e329f55c2d019ce7a243c01185fb3
|
refs/heads/master
| 2022-12-01T16:32:18.490101
| 2020-08-08T18:49:23
| 2020-08-08T18:49:23
| 263,969,355
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,306
|
py
|
from typing import Any
MAX_DEPTH = 2
# TODO: Implement alpha/beta pruning
# cost, alpha[depth!=1], beta[depth!=1], node[depth=1]
def configure_minimax(evaluate, get_children):
def minimax(root):
def minimax_dfs(node, is_max, depth, alpha, beta):
''' Pseudo Code:
get parent_alpha
get parent_beta
if is leaf node or max_depth:
evaluate my cost
evaluate alpha to return if i am max
evaluate beta to return If i am min
return cost, alpha, beta
initialize my_cost to None
initialize my_alpha to parent_alpha
initialize my_beta to parent_beta
get children
for child in children:
get child_cost
get child_alpha and child_beta
if i am max node:
if child_alpha >= parent_beta:
my_cost = child_alpha
prune
my_cost = max of my_cost and child_cost
my_alpha = max of my_alpha or child_alpha
else if i am min node:
if child_beta <= parent_alpha:
my_cost = child_beta
prune
my_cost = min of my_cost and child_cost
my_beta = min of my_beta or child_beta
return my_cost, my_alpha, my_beta
'''
def is_leaf_node(n):
return next(get_children(n), None) is None
if depth == MAX_DEPTH or is_leaf_node(node):
return evaluate(node)
if is_max:
value = float('-inf')
for child in get_children(node):
value = max(value, minimax_dfs(
child, False, depth + 1, alpha, beta))
alpha = max(alpha, value)
if alpha >= beta:
break
return value
else:
value = float('inf')
for child in get_children(node):
value = min(value, minimax_dfs(
child, True, depth + 1, alpha, beta))
beta = min(beta, value)
if beta <= alpha:
break
return value
'''Pseudo Code for Root:
initialize my_children to []
initialize my_alpha to float('-inf')
initialize my_beta to float('inf')
get children
for child in children:
get child_cost, child_alpha, child_beta using minimax
update my_alpha and my_beta
append (child_cost, child)
set max_child to max of children based on cost
return max_child
'''
my_children_and_their_cost = []
my_alpha = float('-inf')
my_beta = float('inf')
children = get_children(root)
for child in children:
child_cost = minimax_dfs(
child, False, 1, my_alpha, my_beta)
my_alpha = max(my_alpha, child_cost)
my_children_and_their_cost.append((child_cost, child))
return my_children_and_their_cost
return minimax
|
[
"zahin.mohammad@gmail.com"
] |
zahin.mohammad@gmail.com
|
fddb59f4f81a6548008ed0e8a49983aeaa48c430
|
c0a387e235662787940e3296ad1a4d9c62ad03eb
|
/get_team_info.py
|
885020d2acf2608cd4ed323c73dae68055226649
|
[] |
no_license
|
Emptymu/NBA-Stats-Analysis
|
1e4b23c9225a5517dcb455ffc2538234c87e13b4
|
56e336a6b6ef683ecf97b11983d147348959764b
|
refs/heads/master
| 2021-01-21T18:11:06.070676
| 2018-02-02T08:03:59
| 2018-02-02T08:03:59
| 92,021,097
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 957
|
py
|
import requests
import csv
def get_team_info():
''' GET TEAM INFO (ID AND NAME) '''
with open('team_id.csv', 'w') as f:
fieldnames = ['TEAM_ID', 'SEASON_YEAR', 'TEAM_CITY', 'TEAM_NAME', 'TEAM_ABBREVIATION', 'TEAM_CONFERENCE', 'W', 'L', 'PCT', 'CONF_RANK', 'MIN_YEAR', 'MAX_YEAR']
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
builder = {} # USE FIELDNAMES AS THE FILTER AND PASS EACH ROW INTO BUILDER
# NBA TEAMS IDS ARE FROM 1610612737 ~ 1610612766
for team_id in range(1610612737, 1610612767):
respond = requests.get('http://stats.nba.com/stats/teaminfocommon?LeagueID=00&SeasonType=Regular+Season&TeamID=%d&season=2015-16' % team_id).json()
info_list_header = respond['resultSets'][0]['headers']
info_list = respond['resultSets'][0]['rowSet']
for info in info_list:
for name in fieldnames:
builder[name] = info[info_list_header.index(name)]
writer.writerow(builder)
get_team_info()
|
[
"jasper920011@gmail.com"
] |
jasper920011@gmail.com
|
835d827423d0a97d4ccfcce37748731b22decdad
|
81294aa48764ba76cb6dda2784d3dfe08784ffd6
|
/mkproj/core.py
|
c8fe8d697b16cfa8717afe909a24027cffeeba6a
|
[
"MIT"
] |
permissive
|
tjquillan/mkproj
|
33a367aa3b6851004f3843437f630e7d61e6ae3a
|
8c0839b5a4a38693e8b9f3ee3552af428754ea7a
|
refs/heads/master
| 2023-03-14T22:24:40.240903
| 2020-11-02T18:53:59
| 2020-11-02T18:53:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,681
|
py
|
import sys
from collections.abc import MutableMapping
from pathlib import Path
from typing import Any, Dict, List, Tuple
import networkx
from pluginbase import PluginBase, PluginSource
from . import LockingDict, config, environment, spinner
from .bases import BaseTask, TaskFailedException
def depends(*tasks):
def depends() -> set: # pylint: disable=redefined-outer-name
return set(tasks)
def wrapper(cls):
setattr(cls, depends.__name__, staticmethod(depends))
return cls
return wrapper
def overrides(*tasks):
def overrides() -> set: # pylint: disable=redefined-outer-name
return set(tasks)
def wrapper(cls):
setattr(cls, overrides.__name__, staticmethod(overrides))
return cls
return wrapper
class TaskIndex(MutableMapping):
"""
An index of tasks. This index is built with the constraints langs and mixins.
Langs is responsible for the specification of the languages the project should
be created using. Mixins are used for differentiation within a language (Ex: within python
pipenv and poetry would be mixins on top of the base python tasks as they accomplish many
of the same goals).
"""
def __init__(
self,
project_name: str,
project_path: Path,
langs: List[str],
mixins: List[str],
*args,
**kwargs,
):
self._langs: List[str] = langs
self._mixins: List[str] = mixins
self._data: LockingDict = LockingDict(
{"project-name": project_name, "project-path": project_path}
)
self._tasks: Dict[str, Dict[str, Any]] = dict(*args, **kwargs)
self._index()
def _index(self):
skip_tasks: List[str] = list(config.get_config("tasks", "skip"))
# fmt: off
self._tasks = {
n.task_id(): {
"class": n(self._data),
"overridden": False,
"overrider": None,
}
for n in BaseTask.__subclasses__()
if n.task_id() not in skip_tasks # Check if the task should be skipped
and n.lang_id() in self._langs # Check if the task is in the langs specified
and (n.mixin_id() in self._mixins # Check if the task is in the mixins specified
or n.mixin_id() is None) # or if it is not a mixin
}
# fmt: on
for task in self._tasks:
for overridden_task in self._tasks[task]["class"].overrides():
self._override(overridden_task, task)
def _override(self, overridden: str, overrider: str):
if not self._tasks[overridden]["overridden"]:
self._tasks[overridden]["overridden"] = True
self._tasks[overridden]["overrider"] = overrider
else:
spinner.print_error(
"Tasks: {} both attempt to override task: '{}'".format(
[self._tasks[overridden]["overrider"], overrider], overridden
)
)
sys.exit(1)
def __setitem__(self, key: str, value):
self._tasks[key] = value
def __getitem__(self, key: str):
return self._tasks[key]
def __delitem__(self, key: str):
del self._tasks[key]
def __iter__(self):
return iter(self._tasks)
def __len__(self):
return len(self._tasks)
def __repr__(self):
return self._tasks.__repr__()
class TaskGraph:
"""
A dependency graph of tasks in a task index. This class is responsable for tracking
task dependencies and running tasks.
"""
def __init__(self, tasks: TaskIndex):
self._graph: networkx.DiGraph = networkx.DiGraph()
self._tasks: TaskIndex = tasks
self._build_graph()
@property
def tasks(self) -> TaskIndex:
return self._tasks
def _build_graph(self):
# Assemble the initial nodes and edges
nodes: List[str] = self._tasks.keys()
edges: List[Tuple[str, str]] = [
(
task,
self._tasks[dep]["overrider"]
if self._tasks[dep]["overridden"]
else dep,
)
for task in nodes
if not self._tasks[task]["overridden"]
for dep in self._tasks[task]["class"].depends()
if not set()
]
self._graph.add_nodes_from(nodes, success=False, error=False)
self._graph.add_edges_from(edges)
def _run_nodes(self, nodes: networkx.classes.reportviews.NodeView):
rerun_nodes: list = []
for node in nodes:
if not self._tasks[node]["overridden"]:
if (
not self._graph.nodes[node]["error"]
and not list(self._graph.successors(node))
):
try:
self._tasks[node]["class"].run()
self._graph.nodes[node]["success"] = True
edges: list = [
(dep, node) for dep in self._graph.predecessors(node)
]
self._graph.remove_edges_from(edges)
except TaskFailedException:
self._graph.nodes[node]["error"] = True
for dep in self._graph.predecessors(node):
self._graph.nodes[dep]["error"] = True
elif not self._graph.nodes[node]["error"]:
rerun_nodes.append(node)
if rerun_nodes:
self._run_nodes(rerun_nodes)
def run_nodes(self):
self._run_nodes(self._graph.nodes)
from .cli.options import State # isort:skip # pylint: disable=wrong-import-position
def create_project(project_name: str, state: State):
project_path: Path = Path("{0}/{1}".format(Path.cwd(), project_name))
if project_path.exists():
spinner.print_error("Project already exists. Aborting...")
sys.exit(1)
langs: list = ["generic"]
if state.lang is not None:
langs.append(state.lang)
mixins: list = list(config.get_config(state.lang, "mixins")) + state.mixins
# Load external tasks
plugin_base: PluginBase = PluginBase(package="mkproj.plugins")
plugin_source: PluginSource = plugin_base.make_plugin_source(
searchpath=["{}/tasks".format(environment.APP_DIRS.user_data_dir)]
)
for plugin in plugin_source.list_plugins():
plugin_source.load_plugin(plugin)
tasks: TaskIndex = TaskIndex(project_name, project_path, langs, mixins)
graph: TaskGraph = TaskGraph(tasks)
spinner.start()
graph.run_nodes()
spinner.ok()
|
[
"tjquillan@gmail.com"
] |
tjquillan@gmail.com
|
6518580d8c0f42beea9b03ecc5cf5026c5eb4b0b
|
a2f6e449e6ec6bf54dda5e4bef82ba75e7af262c
|
/venv/Lib/site-packages/pandas/tests/io/pytables/__init__.py
|
04573ec7273cbbee29a9587f2fd75e67ef512d86
|
[] |
no_license
|
mylonabusiness28/Final-Year-Project-
|
e4b79ccce6c19a371cac63c7a4ff431d6e26e38f
|
68455795be7902b4032ee1f145258232212cc639
|
refs/heads/main
| 2023-07-08T21:43:49.300370
| 2021-06-05T12:34:16
| 2021-06-05T12:34:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:7899383beb479b67e688296a1f549aed94a571607226a1afea25dce1a3dc152c
size 411
|
[
"chuksajeh1@gmail.com"
] |
chuksajeh1@gmail.com
|
06246345ad959b165ea82542b3e9cc96cdc417fc
|
d4bc3ca7499b16495a1303f72fd423fda7a52d00
|
/mysite/misc_pages/models.py
|
fd9677d67d2226ca462f3d5ff68010a1317190d8
|
[] |
no_license
|
MichaelHayward/Web-Dev-Site-2018-Revision
|
f80252475a28078941124de4be5cb47f8279f26b
|
3b831460f30686a631d70a448457f3a1a01152e1
|
refs/heads/master
| 2020-03-23T11:05:31.120976
| 2018-09-07T13:29:55
| 2018-09-07T13:29:55
| 141,482,221
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 477
|
py
|
from django.db import models
from wagtail.core.models import Page
from wagtail.core.fields import RichTextField
from wagtail.admin.edit_handlers import FieldPanel
from wagtail.images.edit_handlers import ImageChooserPanel
class WritablePage(Page):
intro = models.CharField(max_length=250)
body = RichTextField(blank=True)
content_panels = Page.content_panels + [
FieldPanel("intro", classname="full"),
FieldPanel("body", classname="full"),
]
|
[
"33433507+MichaelHayward@users.noreply.github.com"
] |
33433507+MichaelHayward@users.noreply.github.com
|
d8ad74ba8fd7cbc3a2478cf802bcc9433cd489cc
|
85a9590f334729c06f927732040ab1cedd563ed5
|
/modules/subsecuencias_cdk2.py
|
f8654f709cb8622ad1abf5be9da54898193056f2
|
[] |
no_license
|
jRicciL/ERK2_notebooks
|
47d50e0ca17598201c986dd74e498cdbc423ac53
|
713cca983502076d1711861a0199def697e0b0eb
|
refs/heads/master
| 2020-08-10T01:51:43.263583
| 2020-06-02T00:40:33
| 2020-06-02T00:40:33
| 214,226,869
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,122
|
py
|
def _get_seq(ranges, x, sep = ' '):
from itertools import chain
lista = [list( range(valor[0], valor[1] + 1) ) for valor in ranges]
# Se obtiene la lista de residuos, incluida en formato de cadena de texto
seq_residues = list(chain.from_iterable(lista))
seq_residues_str = sep.join(str(e) for e in seq_residues)
if x == 'str':
final_seq = seq_residues_str
elif x == 'list':
final_seq = seq_residues
else:
final_seq = "Especifica el tipo de retorno: 'str' o 'list'"
return(final_seq)
def get_pocket_residues(x='str', sep = ' '):
# Pocket (4FKW y su ligando a 7 A): 8-19, 30-33, 64-65, 79-90, 129-134, 143-146
# resid 8 to 19 30 to 33 64 65 79 to 90 129 to 134 143 to 146
pocket_rangeResidues = [[8,19], [30,33], [64,65], [79,90], [129,134], [143,146]]
final_seq = _get_seq(pocket_rangeResidues, x, sep)
return(final_seq)
def get_pisani_residues(x='str', sep = ' '):
pisiani_rangeResidues = [ [4,12], [17, 24], [29,34], [46,55], [66,71], [76,81],
[87,93], [101, 120], [121, 135], [140, 150], [182, 194], [277, 282]]
final_seq = _get_seq(pisiani_rangeResidues, x, sep)
return(final_seq)
|
[
"ricci.lob@gmail.com"
] |
ricci.lob@gmail.com
|
2d9e9f6a853e2154bde067a6cf0432985bf90bb3
|
5bf6b61a3a2398e1622d9ff3566e9de62582480f
|
/User interfaces/CGM/management_step_3/functions/Data.py
|
6c198a99077e5c1ff8cd6f18758130115bba3b7f
|
[] |
no_license
|
uzairov/FE-800
|
7a0e1df876bcc80a43ccf23d54f293dc8293a0c4
|
32e8c8d57de50adf62fbf153a31a6a7e5d80016b
|
refs/heads/master
| 2023-04-21T21:20:49.068732
| 2021-04-26T07:57:08
| 2021-04-26T07:57:08
| 355,610,204
| 1
| 0
| null | 2021-04-15T08:16:45
| 2021-04-07T16:19:42
| null |
UTF-8
|
Python
| false
| false
| 4,316
|
py
|
import numpy as np
from .data_preprocessing import *
class Data:
# def __init__(self, tickers, from_data, to_data, database = ):
def __init__(self, tickers, start_date, end_date, host):
self.tickers = tickers
#self.assetclass = get_asset_class(host=,tickers = )
self.host = host
self.price = get_daily_price(self.host, tickers, start_date, end_date)
self.num = len(tickers)
self.risk_free_rate = 0.0075/360
self.returns = self.get_stock_return(self.price)
self.affiliate_return = self.get_affliate_return_from_database(tickers)
self.capital_weights = get_mktcap_weight(tickers, host=self.host)
# def get_daily_price(self, ETF=None, startdate=None, enddate=None):
# # ETF selection
# if ETF == None:
# price = get_hist_data_close_for_many(host=self.host)
# else:
# price = get_hist_data_close_for_many(ETF=ETF, host=self.host)
#
# # Time period selection
# if startdate == None:
# if enddate == None:
# price = price[:][:]
# else:
# price = price[:enddate][:]
# else:
# if enddate == 0:
# price = price[startdate:][:]
# else:
# price = price[startdate:enddate][:]
# return price
def get_all_price_from_database(self, tickers):
# price_dataframe = pd.read_csv(r"C:\My Files\Study\17 Spring\800 - Special Problems in FE (MS)\Code\FE-800\csv\test.csv",index_col="Date")
price_dataframe = get_hist_data_close_for_many(tickers)
price_dataframe = price_dataframe.dropna()
return price_dataframe
def get_covariance_matrix(self, if_print=False):
# self.stock_return = get_stock_return(self.stock_price)
covariance_matrix = np.cov(self.returns, rowvar=False)
if if_print:
print("Covariance Matrix is \n", covariance_matrix)
return covariance_matrix
def set_risk_free_rate(self, risk_free_rate):
self.risk_free_rate = risk_free_rate
def get_stock_return(self, price_data,if_print=False):
# stock_returns = self.price.shift(1) / self.price - 1
stock_returns = self.price / self.price.shift(1) - 1 # date under ascending order
# self.stock_return = self.stock_price.apply(get_stock_return)
stock_returns = stock_returns.dropna()
if if_print:
print("Return Matrix is \n", stock_returns)
return stock_returns
def get_equal_weighted_return(self, if_print=False):
equal_weighted_return = self.returns.stack().mean(axis = 1)
if if_print:
print("Equal weighted portfolio return (benchmark): ", equal_weighted_return)
return equal_weighted_return
def get_equal_weighted_std(self, if_print=False):
equal_weighted_std = np.std(self.returns.mean(axis = 1))
if if_print:
print("Equal weighted portfolio std (benchmark): ",equal_weighted_std)
return equal_weighted_std
def get_equal_weighted_daily_return(self):
# todo: daily return
pass
def get_affliate_return_from_database(self,tickers):
Q_affiliate_list = get_RA_views(tickers,host='localhost:27017')
return Q_affiliate_list
def get_risk_aversion_coefficient(self, if_print=False):
risk_aversion = (self.get_equal_weighted_return() - self.risk_free_rate) / (self.get_equal_weighted_std() ** 2)
if if_print:
print("risk aversion coefficient is ", risk_aversion)
return risk_aversion
def main():
# csv_file_path = r"C:\My Files\Study\17 Spring\800 - Special Problems in FE (MS)\Code\FE-800\csv\test.csv"
# import_price_data = pd.read_csv(filepath_or_buffer=csv_file_path, index_col=0).dropna()
# data=Data(import_price_data)
# print(data.returns.columns)
ETF_tickers = ['VTI', 'ITOT', 'SCHB', 'VEA', 'IXUS', 'SCHF', 'VWO', 'IEMG', 'SCHE', 'VIG', 'DVY', 'SCHD', 'VGSH',
'IEF', 'TLT', 'MUB', 'TFI', 'PZA', 'SCHP', 'TIP', 'IPE', 'XLE', 'DJP', 'VDE']
data = Data(ETF_tickers)
path = r"C:\My Files\Study\17 Spring\800 - Special Problems in FE (MS)\Code\FE-800"
print(data.capital_weights)
if __name__ == "__main__":
main()
|
[
"sit.songyang.guo@gmail.com"
] |
sit.songyang.guo@gmail.com
|
e150a57696a86898a7fafe09dbcf8b112fcf6890
|
8c8973a3024b7deda89bd105452245691d66215a
|
/03_EstruturasRepeticao/29_tabela_199.py
|
02baf0864c8ed7490ccadaff8c0e768a27bd4957
|
[] |
no_license
|
xtreia/pythonBrasilExercicios
|
3782226d693a93e4394e654b15fe54900ae709f4
|
948f3a8a0c32ad661ef0da0e242774c5f8eeffcc
|
refs/heads/master
| 2020-07-16T05:19:48.640536
| 2018-04-27T21:38:49
| 2018-04-27T21:38:49
| 205,727,630
| 1
| 0
| null | 2019-09-01T20:16:24
| 2019-09-01T20:16:24
| null |
UTF-8
|
Python
| false
| false
| 107
|
py
|
print 'Loja Quase Dois - Tabela de precos'
for i in range(1, 51):
print '%d - R$ %.2f' % (i, i * 1.99)
|
[
"tales.viegas@corp.terra.com.br"
] |
tales.viegas@corp.terra.com.br
|
d13b04569a6a88744f44c03224c3d6b96d58161c
|
1bc2a254cc1a0405f1c5257af693d03c131b4155
|
/eval_hog_custom.py
|
aa4c445b6890006a2b668a6594664fe3073a8e97
|
[] |
no_license
|
sm354/Pedestrian-Detection
|
580b6128bb7e2ed52319ee4fd7810eca48bd367e
|
5f693929562cb17e968aed4e87fefa948b2faf77
|
refs/heads/main
| 2023-08-26T15:39:38.048041
| 2021-11-10T19:48:59
| 2021-11-10T19:48:59
| 425,338,235
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,259
|
py
|
from ipdb import set_trace
import json
import os
import cv2
import numpy as np
import argparse
from ipdb import set_trace
import pandas as pd
from tqdm import tqdm
from utils import *
import pickle
from skimage.feature import hog
import imutils
def parse_args():
parser = argparse.ArgumentParser(description='Pedestrian Detection using Custom HoG Person Detector')
parser.add_argument('--root', type=str, default="./")
parser.add_argument('--test', type=str, default="PennFudanPed_val.json")
parser.add_argument('--out', type=str, default="PennFudanPed_custom_hog_pred.json")
parser.add_argument('--model',type=str, default="hog_custom.pt")
parser.add_argument('--num_pyr_lyrs',type=int, default=1, help='number of pyramid layers')
args = parser.parse_args()
return args
def make_predictions(clf, root, test_json, output_json, num_pyr_lyrs, patch_size):
# predictions will be saved iteratively
predictions = []
no_pred_count = 0
nms_count = 0
sigmoid = torch.nn.Sigmoid() # use sigmoid to normalize svm scores
# for saving images with predicted bboxes, and comparing them with annotations
annotations = test_json['annotations']
annotations = pd.json_normalize(annotations)
save_preds_dir = os.path.join(args.root, "predictions_hog_custom")
if os.path.exists(save_preds_dir) == False:
os.mkdir(save_preds_dir)
# read the images using the file name in the json file
print("\nstarting inference over given test.json")
img_dicts = test_json['images']
# detect multiscale hyperparameters
winStride = (12, 24)
padding = (10, 10)
for img_dict in tqdm(img_dicts):
img = cv2.imread(os.path.join(root,img_dict['file_name']))
img_id = img_dict['id']
# go into various pyramid levels, get all the predicted bbs
# after getting all possible bbs, apply nms finally
# img_list = [img]
bboxes, scores = [], []
curr_img = img
for level_num in range(num_pyr_lyrs):
if level_num != 0:
curr_img = cv2.pyrDown(curr_img)
for y in range(0, curr_img.shape[0] - patch_size[1], winStride[1]):
for x in range(0, curr_img.shape[1] - patch_size[0], winStride[0]):
patch = curr_img[y:y+patch_size[1], x:x+patch_size[0]]
if patch.shape[0] < patch_size[1] or patch.shape[1] < patch_size[0]:
continue
hog_descriptor = hog(
patch,
orientations=9, pixels_per_cell=(8, 8),
cells_per_block=(3, 3), block_norm='L2-Hys',
visualize=False, transform_sqrt=False,
feature_vector=True, multichannel=True
)
hog_descriptor = hog_descriptor.reshape(1, -1)
svm_pred = clf.predict(hog_descriptor)
if svm_pred[0] == 1:
svm_score = abs(clf.decision_function(hog_descriptor)[0])
# set_trace()
x1 = x * (2 ** level_num)
y1 = y * (2 ** level_num)
w = patch_size[0] * (2 ** level_num)
h = patch_size[1] * (2 ** level_num)
bbox = [x1, y1, w, h]
bboxes.append(bbox)
scores.append(svm_score)
if len(scores) != 0:
bboxes = np.array(bboxes).astype(int)
scores = np.array(scores).astype(float).reshape(-1)
# do NMS and append the predictions in COCO format
init = len(scores)
bboxes, scores = do_NMS(bboxes, scores, overlapThresh=0.8) # bboxes.dtype is int, scores.dtype is float
final = len(scores)
nms_count += (init-final)
if len(scores) == 0:
# no predictions
# print("no prediction encountered")
no_pred_count+=1
continue
for bb, score in zip(bboxes, scores):
pred = {}
pred["image_id"] = img_id
pred["score"] = sigmoid(torch.tensor(float(score))).item()
pred["category_id"] = 1
pred["bbox"] = bb.astype(float).tolist()
predictions.append(pred)
# for visualization of bboxes and comparison with annotations
save_img_with_pred(img, img_id, bboxes, scores, list(annotations.loc[annotations['image_id'] == img_id]['bbox']), save_preds_dir)
print("no predictions for %u images out of %u"%(no_pred_count, len(img_dicts)))
with open(output_json, "w") as f:
json.dump(predictions, f)
print("Non-Maximal Suppression reduced %u Bounding Boxes"%(nms_count))
def main(root, test_json, output_json, num_pyr_lyrs):
# pretrained hog model
clf = pickle.load(open(args.model, 'rb'))
make_predictions(clf,root,test_json, output_json, num_pyr_lyrs, patch_size=(120,240))
if __name__ == "__main__":
fix_seed(seed=4)
args = parse_args()
test_json = json.loads(open(args.test,'r').read())
main(args.root, test_json, args.out, args.num_pyr_lyrs)
|
[
"shubhamiitd18@gmail.com"
] |
shubhamiitd18@gmail.com
|
250bdd434c3ca14198d3b1dc4817974af8a02c58
|
36f91525be7418d90f77687e31554c86561013be
|
/venv/bin/easy_install
|
27b20fdc7f13220c7148e3122347c50d4db02d9a
|
[] |
no_license
|
rizkimn/attendance-system
|
a2bfafa0470ca76e25bd64d2beee1abb77a510b7
|
16e90d397dee2036d8183f06d635daab48f55645
|
refs/heads/main
| 2023-04-22T19:38:40.630024
| 2021-05-04T08:41:16
| 2021-05-04T08:41:16
| 359,022,477
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 281
|
#!/home/rizkimnur/Documents/python/attendance-system/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"rizkimnur0@gmail.com"
] |
rizkimnur0@gmail.com
|
|
ea0fa413a310a9b8bdfc40c852953889d422ebbe
|
a55dbf229ae9a38b003158d16e0ad436a91d616d
|
/python/codejam2017/round1/problemA_alphabet_cake/test_alphabet_cake.py
|
d0e85981d7f0d96d7907eb999cf176a03eedfe0b
|
[] |
no_license
|
lala7573/algorithm
|
175d03cad51d85b0cf0fbf4df6178e35c7a7deac
|
d361b991426232c9d5111eac7d8367187cf83e04
|
refs/heads/master
| 2021-03-27T20:50:14.895003
| 2017-05-15T11:59:44
| 2017-05-15T11:59:44
| 91,326,155
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,988
|
py
|
import unittest
from codejam2017.round1.problemA_alphabet_cake.alphabet_cake import AlphabetCake
class TestAlphabetCake(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_1(self):
self.assertEqual(AlphabetCake.solve(
3, 3, [
['G', '?', '?'],
['?', 'C', '?'],
['?', '?', 'J']
]), 'GGG\nCCC\nJJJ')
def test_2(self):
self.assertEqual(AlphabetCake.solve(
3, 3, [
['G', '?', '?'],
['?', 'G', '?'],
['?', '?', 'J']
]), 'GGG\nGGG\nJJJ')
def test_3(self):
self.assertEqual(AlphabetCake.solve(
4, 4, [
['G', '?', '?', '?'],
['?', 'G', '?', '?'],
['?', '?', '?', '?'],
['?', '?', '?', 'J']
]), 'GGGG\nGGGG\nGGGG\nJJJJ')
def test_4(self):
# 3 4
# CGED
# ?AFB
# ????
self.assertEqual(AlphabetCake.solve(
3, 4, [
['C', 'G', 'E', 'D'],
['?', 'A', 'F', 'B'],
['?', '?', '?', '?']
]), 'CGED\nAAFB\nAAFB')
def test_5(self):
self.assertEqual(AlphabetCake.solve(
3, 4, [
['?', '?', '?', '?'],
['?', 'C', 'J', '?'],
['?', '?', '?', '?']
]
), 'CCJJ\nCCJJ\nCCJJ')
def test_6(self):
self.assertEqual(AlphabetCake.solve(
3, 4, [
['?', '?', '?', '?'],
['?', 'C', 'A', '?'],
['D', '?', 'B', 'E']
]
), 'CCAA\nCCAA\nDDBE')
def test_7(self):
self.assertEqual(AlphabetCake.solve(
4, 3, [
['?', '?', '?'],
['E', '?', '?'],
['A', 'B', 'F'],
['?', 'D', 'C'],
]
), 'EEE\nEEE\nABF\nDDC')
|
[
"joanne.hwang@kakaocorp.com"
] |
joanne.hwang@kakaocorp.com
|
18d8d5ce0227d7af58c2e7063a16ebfd0aaef731
|
b0b6a0c9d000c5bf7ace9b8d3fe1229551b5c1c2
|
/LoadData/migrations/0001_initial.py
|
4058901ff38eac849de9492398b6e2df2c17e1f2
|
[] |
no_license
|
gyanezfeliu/FlapWeb
|
c14dd08bcb600d9d0197c765cf96a08aff5587b7
|
9421547f3e9905775a103fe19e4f78ac120f7116
|
refs/heads/master
| 2022-11-28T14:17:50.964784
| 2019-09-27T20:47:18
| 2019-09-27T20:47:18
| 152,670,469
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,290
|
py
|
# Generated by Django 2.1.2 on 2018-11-16 19:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dna',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('sboluri', models.TextField()),
],
),
migrations.CreateModel(
name='Experiment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('machine', models.TextField()),
],
),
migrations.CreateModel(
name='Inducer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('concentration', models.FloatField()),
('puchemid', models.TextField()),
],
),
migrations.CreateModel(
name='LoadProcess',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('file', models.TextField()),
],
),
migrations.CreateModel(
name='Measurement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('value', models.FloatField()),
('time', models.FloatField()),
],
),
migrations.CreateModel(
name='Sample',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('row', models.TextField()),
('col', models.IntegerField()),
('media', models.TextField()),
('strain', models.TextField()),
('experiment_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='LoadData.Experiment')),
],
),
migrations.CreateModel(
name='Vector',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dna_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='LoadData.Dna')),
('sample_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='LoadData.Sample')),
],
),
migrations.AddField(
model_name='measurement',
name='sample_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='LoadData.Sample'),
),
migrations.AddField(
model_name='inducer',
name='sample_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='LoadData.Sample'),
),
]
|
[
"gyanezfeliu@gmail.com"
] |
gyanezfeliu@gmail.com
|
edd4a21019c7cc1c1b9b7eb3abe2e8acb32f929b
|
d2b0c67b919783cceb58bc25ae0b18dc7d4ce892
|
/ExoCTK/tor/contam_tool/f_visibilityPeriods.py
|
48b7d3824ab4feb7b5673f3b13fa18fcd8eadd15
|
[] |
no_license
|
natashabatalha/ExoCTK
|
7cff16184bd999e5eb50e1c935e12020594c8e50
|
7b996f77fd7b87eac381ca396877bda4121f18a8
|
refs/heads/master
| 2021-07-01T01:47:51.028537
| 2017-09-07T18:32:53
| 2017-09-07T18:32:53
| 106,414,418
| 2
| 0
| null | 2017-10-10T12:30:31
| 2017-10-10T12:30:30
| null |
UTF-8
|
Python
| false
| false
| 17,229
|
py
|
# =====================================================================================
# Series of functions to compute the visibility periods for a given (RA,DEC) with
# in some cases the possibility to select a PA value.
#
# Functions derived from the code of Wayne Kinzel provided by Jeff Valenti
# Extract from the e-mail of Wayne Kinzel:
# As before, the code is not officially tested, nor is it an official STScI product.
# Users should be warned that the apparent position of the Sun changes ~+/-0.2 degrees
# depending upon where JWST is in its orbit. So do not rely strongly on these results
# if the target is within ~0.2 degrees of |ecliptic latitude| 45 degrees or 85 degrees.
# For example if a target is at 84.9 degrees latitude and the tool says it is CVZ, it
# may not be with the operational orbit.
#
# =====================================================================================
import sys
import math
import ephemeris_old2x
D2R = math.pi / 180. #degrees to radians
R2D = 180. / math.pi #radians to degrees
PI2 = 2. * math.pi # 2 pi
def f_computeVisibilityPeriods(ephemeris, mjdmin, mjdmax, ra, dec):
'''
# -----------------------------------------------------------
# METHOD f_computeVisibilityPeriods()
# TYPE function
#
# DESCRIPTION function that will compute the visibility
# periods for a given (RA,DEC) over a given
# time period.
#
# SYNTAX f_computeVisibilityPeriods(ephemeris, mjdmin,
# mjdmax, ra, dec)
#
# ephemeris: input ephemeris object
# mjdmin: beginning of the search interval (modified
# Julian date). It must be covered by the ephemeris.
# mjdmax: end of the search interval (modified
# Julian date). It must be covered by the ephemeris.
# ra: input RA coordinate (equatorial coordinate, in rad)
# dec: input DEC coordinate (equatorial coordinate, in rad)
#
# Returns two lists containing the start end end of each
# visibility period and a list containing a status flag:
# flag = 0 visibility period fully in the search interval
# flag = -1 start of the visibility period truncated by
# the start of the search interval
# flag = -2 end of the visibility period truncated by
# the end of the search interval
# flag = +1 the search interval is fully included in
# the visibility period
#
# -----------------------------------------------------------
'''
# ===========================================================
# Paranoid checks
# ===========================================================
# print "# RA = {:12.8f} rad = {:12.8f} deg".format(ra, ra / D2R)
# print "# DEC = {:12.8f} rad = {:12.8f} deg".format(dec, dec / D2R)
# print"# No constraint on the PA."
if (ephemeris.amin > mjdmin):
print("f_computeVisibilityPeriods(): the start of the search interval is not covered by the ephemeris.")
print("Ephemeris start date (modified Julian date): {:8.5f}".format(ephemeris.amin))
print("Search interval start date (modified Julian date): {:8.5f}".format(mjdmin))
raise ValueError
if (ephemeris.amax < mjdmax):
print("f_computeVisibilityPeriods(): the end of the search interval is not covered by the ephemeris.")
print("Ephemeris end date (modified Julian date): {:8.5f}".format(ephemeris.amax))
print("Search interval end date (modified Julian date): {:8.5f}".format(mjdmax))
raise ValueError
# ===========================================================
# Scanning the search period
# ===========================================================
# Flag used to track the beginning and the end of a
# visibility period
iflip = False
wstart = mjdmin
startList = []
endList = []
statusList = []
# Scannning step size (must be small enough to make sure that
# it cannot contain a full vsibility period (we would miss
# it)
scanningStepSize = 0.1
span = int((mjdmax - mjdmin) / scanningStepSize)
# Initialisation (first step of the scan is outside from the
# loop
iflag_old = ephemeris.in_FOR(mjdmin,ra,dec)
for i in range(span):
# Current date (the last step may be partial to remain
# within the search interval
currentdate = mjdmin + (i + 1) * scanningStepSize
if (currentdate >= mjdmax):
currentdate = mjdmax
iflag = ephemeris.in_FOR(currentdate, ra, dec)
# Checking if we are reaching the beginning or the end of a visibility period
# (in which case the iflag value will change)
if iflag != iflag_old:
# Setting the iflip flag to True to keep track of the change (in order to
# detect CVZ object which are permanenetly visible)
# If iflag = True we are starting a visibility period and use a bisection method
# to find the exact transition date. This assumes that there is a single
# transition in the interval => it looks like a step size of 0.1 day is
# sufficient to ensure that.
if (iflag):
wstart = ephemeris.bisect_by_FOR(currentdate, currentdate-scanningStepSize, ra, dec)
# IF iflag = False we are reaching the end of a visibility period.
# Like for the previous case a bisection method is used to locate
# accurately the end of the visibility period.
else:
wend = ephemeris.bisect_by_FOR(currentdate-scanningStepSize, currentdate, ra, dec)
startList.append(wstart)
endList.append(wend)
if (iflip):
statusList.append(0)
else:
statusList.append(-1)
iflip = True
iflag_old = iflag
# If there was a transition and we end up with a valid date, we close the interval with the
# end of the search interval
if (iflag and iflip):
startList.append(wstart)
endList.append(currentdate)
statusList.append(-2)
# There is also the case were the visibility period covers the complete search interval
if (iflag and (not iflip)):
startList.append(mjdmin)
endList.append(mjdmax)
statusList.append(1)
# End of the function
return startList, endList, statusList
def f_computeVisibilityPeriodsWithPA(ephemeris, mjdmin, mjdmax, ra, dec, pa):
'''
# -----------------------------------------------------------
# METHOD f_computeVisibilityPeriodsWithPA()
# TYPE function
#
# DESCRIPTION function that will compute the visibility
# periods for a given (RA,DEC), a given PA and
# over a given time period.
#
# SYNTAX f_computeVisibilityPeriodsWithPA(ephemeris, mjdmin,
# mjdmax, ra, dec, pa)
#
# ephemeris: input ephemeris object
# mjdmin: beginning of the search interval (modified
# Julian date). It must be covered by the ephemeris.
# mjdmax: end of the search interval (modified
# Julian date). It must be covered by the ephemeris.
# ra: input RA coordinate (equatorial coordinate, in rad)
# dec: input DEC coordinate (equatorial coordinate, in rad)
# pa: input PA (in rad)
#
# Returns two lists containing the start end end of each
# visibility period and a list containing a status flag:
# flag = 0 visibility period fully in the search interval
# flag = -1 start of the visibility period truncated by
# the start of the search interval
# flag = -2 end of the visibility period truncated by
# the end of the search interval
# flag = +1 the search interval is fully included in
# the visibility period
#
# -----------------------------------------------------------
'''
# ===========================================================
# Paranoid checks
# ===========================================================
# print "# RA = {:12.8f} rad = {:12.8f} deg".format(ra, ra / D2R)
# print "# DEC = {:12.8f} rad = {:12.8f} deg".format(dec, dec / D2R)
# print"# No constraint on the PA."
if (ephemeris.amin > mjdmin):
print("f_computeVisibilityPeriodsWithPA(): the start of the search interval is not covered by the ephemeris.")
print("Ephemeris start date (modified Julian date): {:8.5f}".format(ephemeris.amin))
print("Search interval start date (modified Julian date): {:8.5f}".format(mjdmin))
raise ValueError
if (ephemeris.amax < mjdmax):
print("f_computeVisibilityPeriodsWithPA(): the end of the search interval is not covered by the ephemeris.")
print("Ephemeris end date (modified Julian date): {:8.5f}".format(ephemeris.amax))
print("Search interval end date (modified Julian date): {:8.5f}".format(mjdmax))
raise ValueError
# ===========================================================
# Scanning the search period
# ===========================================================
# Flag used to track the beginning and the end of a
# visibility period
iflip = False
wstart = mjdmin
startList = []
endList = []
statusList = []
# Scannning step size (must be small enough to make sure that
# it cannot contain a full vsibility period (we would miss
# it)
scanningStepSize = 0.1
span = int((mjdmax - mjdmin) / scanningStepSize)
# Initialisation (first step of the scan is outside from the
# loop
iflag_old = ephemeris.is_valid(mjdmin, ra, dec, pa)
for i in range(span):
# Current date (the last step may be partial to remain
# within the search interval
currentdate = mjdmin + (i + 1) * scanningStepSize
if (currentdate >= mjdmax):
currentdate = mjdmax
iflag = ephemeris.is_valid(currentdate, ra, dec, pa)
# Checking if we are reaching the beginning or the end of a visibility period
# (in which case the iflag value will change)
if iflag != iflag_old:
# Setting the iflip flag to True to keep track of the change (in order to
# detect CVZ object which are permanenetly visible)
# If iflag = True we are starting a visibility period and use a bisection method
# to find the exact transition date. This assumes that there is a single
# transition in the interval => it looks like a step size of 0.1 day is
# sufficient to ensure that.
if (iflag):
wstart = ephemeris.bisect_by_attitude(currentdate, currentdate-scanningStepSize, ra, dec, pa)
# IF iflag = False we are reaching the end of a visibility period.
# Like for the previous case a bisection method is used to locate
# accurately the end of the visibility period.
else:
wend = ephemeris.bisect_by_attitude(currentdate-scanningStepSize, currentdate, ra, dec, pa)
startList.append(wstart)
endList.append(wend)
if (iflip):
statusList.append(0)
else:
statusList.append(-1)
iflip = True
iflag_old = iflag
# If there was a transition and we end up with a valid date, we close the interval with the
# end of the search interval
if (iflag and iflip):
startList.append(wstart)
endList.append(currentdate)
statusList.append(-2)
# There is also the case were the visibility period covers the complete search interval
if (iflag and (not iflip)):
startList.append(mjdmin)
endList.append(mjdmax)
statusList.append(1)
# End of the function
return startList, endList, statusList
def f_computeDurationOfVisibilityPeriodWithPA(ephemeris, mjdmin, mjdmax, ra, dec, pa, mjdc):
'''
# -----------------------------------------------------------
# METHOD f_computeDurationOfVisibilityPeriodWithPA()
# TYPE function
#
# DESCRIPTION function that will compute the duration of
# a specific visibility period associated to
# a given (RA,DEC), a given PA and given
# date.
#
# SYNTAX f_computeDurationOfVisibilityPeriodWithPA(ephemeris,
# mjdmin, mjdmax, ra, dec, pa, mjdc)
#
# ephemeris: input ephemeris object
# mjdmin: beginning of the search interval (modified
# Julian date). It must be covered by the ephemeris.
# mjdmax: end of the search interval (modified
# Julian date). It must be covered by the ephemeris.
# ra: input RA coordinate (equatorial coordinate, in rad)
# dec: input DEC coordinate (equatorial coordinate, in rad)
# pa: input PA (in rad)
# mjdc: date within the visibility period (i.e. compatible
# with (RA,DEC) and PA.
#
# Returns start,end,status
# Status flag:
# flag = 0 visibility period fully in the search interval
# flag = -1 start of the visibility period truncated by
# the start of the search interval
# flag = -2 end of the visibility period truncated by
# the end of the search interval
# flag = +1 the search interval is fully included in
# the visibility period
#
# -----------------------------------------------------------
'''
# ===========================================================
# Paranoid checks
# ===========================================================
# print "# RA = {:12.8f} rad = {:12.8f} deg".format(ra, ra / D2R)
# print "# DEC = {:12.8f} rad = {:12.8f} deg".format(dec, dec / D2R)
# print"# No constraint on the PA."
if (ephemeris.amin > mjdmin):
print("f_computeDurationOfVisibilityPeriodWithPA(): the start of the search interval is not covered by the ephemeris.")
print("Ephemeris start date (modified Julian date): {:8.5f}".format(ephemeris.amin))
print("Search interval start date (modified Julian date): {:8.5f}".format(mjdmin))
raise ValueError
if (ephemeris.amax < mjdmax):
print("f_computeDurationOfVisibilityPeriodWithPA(): the end of the search interval is not covered by the ephemeris.")
print("Ephemeris end date (modified Julian date): {:8.5f}".format(ephemeris.amax))
print("Search interval end date (modified Julian date): {:8.5f}".format(mjdmax))
raise ValueError
if (mjdmin > mjdc):
print("f_computeDurationOfVisibilityPeriodWithPA(): initial date is not included in the search interval.")
print("Search interval start date (modified Julian date): {:8.5f}".format(mjdmin))
print("Initial date (modified Julian date): {:8.5f}".format(mjdc))
raise ValueError
if (mjdmax < mjdc):
print("f_computeDurationOfVisibilityPeriodWithPA(): initial date is not included in the search interval.")
print("Search interval end date (modified Julian date): {:8.5f}".format(mjdmax))
print("Initial date (modified Julian date): {:8.5f}".format(mjdc))
raise ValueError
iflag = ephemeris.is_valid(mjdc, ra, dec, pa)
if (not iflag):
print("f_computeDurationOfVisibilityPeriodWithPA(): invalid date (not in a vsibility period).")
print("Date (modified Julian date): {:8.5f}".format(mjdc))
raise ValueError
# ===========================================================
# Lookign for the start of the visibility period
# ===========================================================
scanningStepSize = 0.1
iflipLeft = False
currentmjd = mjdc
continueFlag = True
boundaryFlag = False
while (continueFlag):
currentmjd -= scanningStepSize
if (currentmjd < mjdmin):
currentmjd = mjdmin
boundaryFlag = True
continueFlag = False
iflag = ephemeris.is_valid(currentmjd, ra, dec, pa)
if (not iflag):
wstart = ephemeris.bisect_by_attitude(currentmjd, currentmjd+scanningStepSize, ra, dec, pa)
iflipLeft = True
continueFlag = False
elif (boundaryFlag):
wstart = mjdmin
iflipRight = False
currentmjd = mjdc
boundaryFlag = False
continueFlag = True
while (continueFlag):
currentmjd += scanningStepSize
if (currentmjd > mjdmax):
currentmjd = mjdmax
boundaryFlag = True
continueFlag = False
iflag = ephemeris.is_valid(currentmjd, ra, dec, pa)
if (not iflag):
wend = ephemeris.bisect_by_attitude(currentmjd-scanningStepSize, currentmjd, ra, dec, pa)
iflipRight = True
continueFlag = False
elif (boundaryFlag):
wend = mjdmax
if ((not iflipLeft) and (not iflipRight)):
status = 1
elif (not iflipLeft):
status = -1
elif (not iflipRight):
status = -2
else:
status = 0
# End of the function
return wstart, wend, status
|
[
"rafia0037@gmail.com"
] |
rafia0037@gmail.com
|
d1772e7e5ce09349017f1c2dd30cdfbab93383ed
|
977396938e6a077423276eda152d4541578eb527
|
/migrations/versions/f9155326f52d_.py
|
6b91c18d0de33f955ebc4eeda030c79a1e03e91c
|
[] |
no_license
|
Areum0921/web_pybo
|
688c741a5a8b5fa3d8df51f058c7ec0a8288ae91
|
0c830eda270dbbe3257e3458af4576b38d5dbaa8
|
refs/heads/master
| 2023-06-19T06:40:41.327188
| 2021-07-16T02:29:34
| 2021-07-16T02:29:34
| 355,765,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,375
|
py
|
"""empty message
Revision ID: f9155326f52d
Revises: 5496eea3137d
Create Date: 2021-03-29 14:31:37.557367
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = 'f9155326f52d'
down_revision = '5496eea3137d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('_alembic_tmp_answer')
with op.batch_alter_table('answer', schema=None) as batch_op:
batch_op.alter_column('ip',
existing_type=sa.VARCHAR(length=50),
nullable=False)
with op.batch_alter_table('question', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), server_default='1', nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_question_user_id_user'), 'user', ['user_id'], ['id'], ondelete='CASCADE')
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.create_unique_constraint(batch_op.f('uq_user_email'), ['email'])
batch_op.create_unique_constraint(batch_op.f('uq_user_username'), ['username'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('uq_user_username'), type_='unique')
batch_op.drop_constraint(batch_op.f('uq_user_email'), type_='unique')
with op.batch_alter_table('question', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_question_user_id_user'), type_='foreignkey')
batch_op.drop_column('user_id')
with op.batch_alter_table('answer', schema=None) as batch_op:
batch_op.alter_column('ip',
existing_type=sa.VARCHAR(length=50),
nullable=True)
op.create_table('_alembic_tmp_answer',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('question_id', sa.INTEGER(), nullable=True),
sa.Column('content', sa.TEXT(), nullable=False),
sa.Column('create_date', sa.DATETIME(), nullable=False),
sa.Column('ip', sa.VARCHAR(length=50), nullable=False),
sa.ForeignKeyConstraint(['question_id'], ['question.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
|
[
"a90907@gmail.com"
] |
a90907@gmail.com
|
5ac18cbdef9d19e5b20538cf4607c5551ca81f13
|
750269b63dedbf2d3b402a15346681d9abcb991b
|
/crops/views.py
|
5d9d0cc6dc69a8be2bd48bd24dc0e293c9b7efe2
|
[] |
no_license
|
SRaiz/Krishi-Karma
|
36f67a45d4f9306ed10a3ced633a808b6ccd7a5b
|
7eb7348575cf9152b006eb0328dc8138fddd2d3b
|
refs/heads/master
| 2022-12-12T15:57:08.809116
| 2020-07-17T19:20:31
| 2020-07-17T19:20:31
| 243,632,577
| 0
| 1
| null | 2022-12-08T07:22:40
| 2020-02-27T22:48:33
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,587
|
py
|
import pandas as pd
from django.http import HttpResponse
from django.shortcuts import render
from apps.ml.cropsyield_classifier import random_forest
from .models import Crop, Yield
crops = Crop.objects.all()
yields = Yield.objects.all()
yield_df = pd.DataFrame.from_records(yields.values())
crops_df = pd.DataFrame.from_records(crops.values())
def index(request):
data_sd = values_for_homepage(yield_df)
return render(request, 'index.html', {
'crops': crops,
'states': data_sd
})
'''
This method is returning the states and districts to be shown on the homepage.
'''
def values_for_homepage(yield_df):
return yield_df['state_name'].unique()
def filter_districts(request):
if request.method == 'POST':
state = request.POST['state']
filtered_df = yield_df[yield_df.state_name == state]
uniq_dist = filtered_df['district_name'].unique()
districts_string = ','.join(map(str, uniq_dist))
return HttpResponse(districts_string)
def filter_crops(request):
if request.method == 'POST':
state = request.POST['state']
district = request.POST['district']
filtered_df = yield_df[ (yield_df.state_name == state) & (yield_df.district_name == district) ]
uniq_crops = filtered_df['crop'].unique()
crops_string = ','.join(map(str, uniq_crops))
# Get all crops and also send it for comparison and hiding
all_crops = crops_df['name'].unique()
all_crops_string = ','.join(map(str, all_crops))
string_to_send = all_crops_string + '====' + crops_string
return HttpResponse(string_to_send);
def predict_yield(request):
if request.method == 'POST':
state = request.POST.get('state', False);
district = request.POST.get('district', False);
year = request.POST.get('year', False);
season = request.POST.get('season', False);
landArea = request.POST.get('landArea', False);
crop = request.POST.get('crop', False);
# Filter the dataframe on basis of district state and year to get the rainfall data
filtered_df = yield_df[ (yield_df.state_name == state) & (yield_df.district_name == district) & (yield_df.crop_year == int(year)) ]
filtered_df_prod = yield_df[ (yield_df.state_name == state) & (yield_df.district_name == district) & (yield_df.crop_year == int(year)) & (yield_df.crop == crop) ]
minimum_rainfall = filtered_df['min_rainfall'].unique()[0]
maximum_rainfall = filtered_df['max_rainfall'].unique()[0]
average_rainfall = filtered_df['mean_rainfall'].unique()[0]
total_annual_rainfall = filtered_df['annual_rainfall'].unique()[0]
production = filtered_df_prod['production'].unique()[0].max()
crop_yield = (production / float(landArea)).round(3)
# Get the prediction and show it on screen
input_data = {
"state_name": state,
"district_name": district,
"crop_year": int(year),
"season": season,
"crop": crop,
"area": float(landArea),
"min_rainfall": minimum_rainfall,
"max_rainfall": maximum_rainfall,
"mean_rainfall": average_rainfall,
"annual_rainfall": total_annual_rainfall,
"production": production,
"yield": crop_yield
}
rf_alg = random_forest.RandomForestClassifier()
response = rf_alg.compute_prediction(input_data)
return HttpResponse(response.get('label'))
|
[
"sidharthraizada07@gmail.com"
] |
sidharthraizada07@gmail.com
|
a3c15c175c51765051f69df3b52980e1fd7a3f0a
|
e3ec7260806c1e2b045a0de93a150a5c3fc1b9df
|
/test_sin.py
|
cfd93e45de3207cebfeb5d1bfd66b21c78b149ef
|
[
"Apache-2.0"
] |
permissive
|
FearFactor1/SPA
|
58a21c9ec7a72a78f5ff50214e58faac43a3059d
|
a05aaa924c5bebb52cd508ebdf7fd3b81c49fac7
|
refs/heads/master
| 2021-07-07T04:25:12.525595
| 2020-11-16T14:35:33
| 2020-11-16T14:35:33
| 204,684,720
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 801
|
py
|
from selenium import webdriver
import time
from selenium.webdriver.support.ui import Select
link = "http://suninjuly.github.io/selects2.html"
#nav__item-auto > a
try:
browser = webdriver.Chrome()
browser.get(link)
num1 = browser.find_element_by_css_selector("#num1").text
num2 = browser.find_element_by_css_selector("#num2").text
sum = int(num1) + int(num2)
select = Select(browser.find_element_by_tag_name("select"))
select.select_by_value(str(sum))
button = browser.find_element_by_xpath("//*/button[contains(text(), 'Submit')]")
button.click()
finally:
# успеваем скопировать код за 30 секунд
time.sleep(30)
# закрываем браузер после всех манипуляций
browser.quit()
|
[
"zelezodoroznik@yandex.ru"
] |
zelezodoroznik@yandex.ru
|
dc3f8793d740e0cf8d825bacb6e97764c8c288b2
|
be20ff4fe04864c6f48317e9bbebdf6546358caa
|
/Enthought/exercises/python_language/roman_dictionary/roman_dictionary_solution.py
|
9e3df15d356eb328498ddce6f8d12cd904c5c386
|
[] |
no_license
|
cardsrock10/Python-Training
|
3267e20ee9e70683b0daba0007e87aaf4acf5022
|
7bc83cdd6955cb1498e6f391ce9274d4c75a0a3b
|
refs/heads/master
| 2021-04-15T11:56:52.197773
| 2018-03-23T23:09:27
| 2018-03-23T23:09:27
| 126,546,647
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,311
|
py
|
"""
Roman Dictionary
----------------
Mark Antony keeps a list of the people he knows in several dictionaries
based on their relationship to him::
friends = {'julius': '100 via apian', 'cleopatra': '000 pyramid parkway'}
romans = dict(brutus='234 via tratorium', cassius='111 aqueduct lane')
countrymen = dict([('plebius','786 via bunius'),
('plebia', '786 via bunius')])
1. Print out the names for all of Antony's friends.
2. Now all of their addresses.
3. Now print them as "pairs".
4. Hmmm. Something unfortunate befell Julius. Remove him from the
friends list.
5. Antony needs to mail everyone for his second-triumvirate party. Make
a single dictionary containing everyone.
6. Antony's stopping over in Egypt and wants to swing by Cleopatra's
place while he is there. Get her address.
7. The barbarian hordes have invaded and destroyed all of Rome.
Clear out everyone from the dictionary you created in step 5.
"""
friends = {'julius': '100 via apian', 'cleopatra': '000 pyramid parkway'}
romans = dict(brutus='234 via tratorium', cassius='111 aqueduct lane')
countrymen = dict([('plebius','786 via bunius'), ('plebia', '786 via bunius')])
# Print out the names for all of Antony's friends:
print 'friend names:', friends.keys()
print
# Now all of their addresses:
print 'friend addresses:', friends.values()
print
# Now print them as "pairs":
print 'friend (name, address) pairs:', friends.items()
print
# Hmmm. Something unfortunate befell Julius. Remove him from the friends
# list:
del friends['julius']
# Antony needs to mail everyone for his second-triaumvirate party. Make a
# single dictionary containing everyone:
mailing_list = {}
mailing_list.update(friends)
mailing_list.update(romans)
mailing_list.update(countrymen)
print 'party mailing list:'
print mailing_list
print
# Or, using a loop (which we haven't learned about yet...):
print 'party mailing list:'
for name, address in mailing_list.items():
print name, ':\t', address
print
# Antony's stopping over in Egypt and wants to swing by Cleopatra's place
# while he is there. Get her address:
print "Cleopatra's address:", friends['cleopatra']
# The barbarian hordes have invaded and destroyed all of Rome. Clear out
# everyone from the dictionary:
mailing_list.clear()
|
[
"rmbirmi@srn.sandia.gov"
] |
rmbirmi@srn.sandia.gov
|
3a7c1d7adfb59f00b11ae77e1d37b1885d33f881
|
d1ad7bfeb3f9e3724f91458277284f7d0fbe4b2d
|
/react/003-react-django-justdjango/backend/env/bin/sqlformat
|
b08eaac3345a9fc3b0a7dbb48e6607276b57395a
|
[] |
no_license
|
qu4ku/tutorials
|
01d2d5a3e8740477d896476d02497d729a833a2b
|
ced479c5f81c8aff0c4c89d2a572227824445a38
|
refs/heads/master
| 2023-03-10T20:21:50.590017
| 2023-03-04T21:57:08
| 2023-03-04T21:57:08
| 94,262,493
| 0
| 0
| null | 2023-01-04T21:37:16
| 2017-06-13T22:07:54
|
PHP
|
UTF-8
|
Python
| false
| false
| 307
|
#!/Users/kamilwroniewicz/_code/_github/_tutorials/react/003-react-django-justdjango/backend/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"qu4ku@hotmail.com"
] |
qu4ku@hotmail.com
|
|
3720c2cfb59920028d138cfe49a9a780696b3a31
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03804/s226430328.py
|
40ea2fbbe5322fb1e9b734e857d574fcafee112b
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 532
|
py
|
N, M = map(int, input().split())
A = [input() for _ in range(N)]
B = [input() for _ in range(M)]
for i in range(N - M + 1):
for j in range(N - M + 1):
check = True
count = 0
for k in range(M):
if (A[i + k][j: j + M] == B[k]):
# print(A[i + k][j:j + M], B[k])
count += 1
continue
else:
check = False
break
if (check and count == M):
print('Yes')
exit()
print('No')
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
66ff77da4530f172e873831475c4198c3aa8c691
|
94e54c1e885808cab39fc6de3aca906b72d09d7b
|
/src/day5/d5part1.py
|
56170599cfebed4876b3137ea39f5bbeeed1737d
|
[] |
no_license
|
jondarrer/advent-of-code-2020
|
d7a54f9d6fb1869796cc972ec0ffd47bfa743e83
|
c942f950231d5a5585acf1357eb578776d7bf2e9
|
refs/heads/main
| 2023-01-31T15:45:34.052160
| 2020-12-06T11:00:58
| 2020-12-06T11:00:58
| 317,469,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,466
|
py
|
import read_input_file
def convert_boarding_pass_to_seat(boarding_pass):
'''Converts a boarding pass, e.g. FBFBBFFRLR, to seat, e.g. row 44 column 5'''
row = convert_binary_to_decimal(boarding_pass[0:7], 'B')
column = convert_binary_to_decimal(boarding_pass[7:10], 'R')
return {'row': row, 'column': column}
def convert_seat_to_seat_id(seat):
'''The seat as a seat id, e.g. row 44 column 5 is seat id 357 (row + (column * 8))'''
return seat['row'] * 8 + seat['column']
def convert_binary_to_decimal(binary, one_char):
'''Converts a binary number to decimal, with a specified character for 1, so FBF with B as 1 gives 3'''
decimal = 0
# https://www.w3schools.com/python/python_howto_reverse_string.asp
for index, bit in enumerate(binary[::-1]):
if (bit == one_char):
decimal += (2 ** index + 1) - 1
return decimal
def highest_seat_id_from_boarding_passes(boarding_passes):
'''The highest seat id from a list of boarding passes'''
boarding_pass_with_highest_seat_id = max(boarding_passes, key=lambda boarding_pass: convert_seat_to_seat_id(
convert_boarding_pass_to_seat(boarding_pass)))
return convert_seat_to_seat_id(convert_boarding_pass_to_seat(boarding_pass_with_highest_seat_id))
if __name__ == '__main__':
lines = read_input_file.read(
'/Users/jondarrer/Code/advent-of-code-2020/src/input/day5.txt')
print(highest_seat_id_from_boarding_passes(lines))
|
[
"jonny@jondarrer.me.uk"
] |
jonny@jondarrer.me.uk
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.