blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9bb472c06e0fb2a411f3f967bb9670ec5de3153e
|
a5562be95afeb6d3dca8dffa29ea47a89d5df8e3
|
/{{cookiecutter.project_slug}}/robot.py
|
348f1280b4d39a203f8dbcc5e55c5354ffeeee85
|
[] |
no_license
|
rt-learn-python/cookiecutter-default
|
8a1c0a2cc746bf42e8ebe941df8136cbdcf7ae7a
|
68f745b8913fc2659fff7d00fda0ab7ccc235d7a
|
refs/heads/master
| 2020-08-03T13:59:08.413928
| 2019-09-30T06:18:48
| 2019-09-30T06:18:48
| 211,776,736
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 55
|
py
|
class Robot:
def walk(self):
return 'walk'
|
[
"royce.com@gmail.com"
] |
royce.com@gmail.com
|
579b307f1c0389a2cde3850053508f999877efb7
|
c286de21b8fd5f367f5a8cb4ee929d68fe453c28
|
/users/migrations/0001_initial.py
|
84af6812b69819c432a226cb291d54c9cf5fb2f1
|
[] |
no_license
|
afarntrog/barter
|
ca764f4996fdb2cb1ac794e975c8e4a83ed6ca09
|
dec1c06f6d215a7bfb6afced881274785eeb0c05
|
refs/heads/master
| 2022-07-13T20:49:32.903143
| 2020-05-10T17:33:15
| 2020-05-10T17:33:15
| 255,227,949
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,268
|
py
|
# Generated by Django 3.0.5 on 2020-04-12 19:58
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import phonenumber_field.modelfields
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address_line1', models.CharField(max_length=45, verbose_name='Address line 1')),
('address_line2', models.CharField(blank=True, max_length=45, verbose_name='Address line 2')),
('zip_code', models.CharField(max_length=10, verbose_name='Postal Code')),
('city', models.CharField(max_length=50)),
('state', models.CharField(max_length=40, verbose_name='State/Province')),
('country', models.CharField(max_length=40, verbose_name='Country')),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name_plural': 'Addresses',
'unique_together': {('address_line1', 'address_line2', 'zip_code', 'city', 'state', 'country')},
},
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('review', models.CharField(max_length=250)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_pic', models.ImageField(null=True, upload_to='profile_pics/')),
('phone', phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=128, region=None)),
('bio', models.TextField(blank=True, max_length=500, null=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('address', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='users.Address')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"aaronfarntrog@gmail.com"
] |
aaronfarntrog@gmail.com
|
3e18da620546972c8c1f090984e40ef751199ba2
|
487ce91881032c1de16e35ed8bc187d6034205f7
|
/codes/CodeJamCrawler/16_0_1_neat/16_0_1_shrucis1_ProblemA.py
|
27f595965602f0b634f8ccd8be151ad017db5273
|
[] |
no_license
|
DaHuO/Supergraph
|
9cd26d8c5a081803015d93cf5f2674009e92ef7e
|
c88059dc66297af577ad2b8afa4e0ac0ad622915
|
refs/heads/master
| 2021-06-14T16:07:52.405091
| 2016-08-21T13:39:13
| 2016-08-21T13:39:13
| 49,829,508
| 2
| 0
| null | 2021-03-19T21:55:46
| 2016-01-17T18:23:00
|
Python
|
UTF-8
|
Python
| false
| false
| 521
|
py
|
def lastnum(N):
remaining = ["0","1","2","3","4","5","6","7","8","9"]
if(N==0):
return "INSOMNIA"
i = 1
while True:
a = str(N*i)
for e in a:
if e in remaining:
remaining.remove(e)
if(len(remaining)==0):
return a
i += 1
f = open(r"C:\Users\Neil\Downloads\A-large.in")
s = f.read().split("\n")
f.close()
t = int(s[0])
for j in range(t):
print("Case #" + (str(j+1)) + ": " + lastnum(int(s[1+j])))
|
[
"[dhuo@tcd.ie]"
] |
[dhuo@tcd.ie]
|
3b7e003816a39418ffd9b83fa55b72d6be8d5911
|
c916156034bccbe54ec5dfee48950aeacfb31f15
|
/RPS.py
|
4466e38b552f048dc467684574ccaaab609aff25
|
[] |
no_license
|
Aaronphilip2003/Voice-Controlled-Rock-Paper-Scissors
|
ebbeb9e69b4bfbe226e5a18762771ec93b25b5a7
|
06779673103e7c79218a49ca02e393a44baf6d6d
|
refs/heads/main
| 2023-04-30T12:49:14.315209
| 2021-05-09T17:08:45
| 2021-05-09T17:08:45
| 365,734,706
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,486
|
py
|
import pyaudio
import speech_recognition as sr
import pyttsx3
import random
# Making the computer Speak
engine=pyttsx3.init('sapi5')
voices=engine.getProperty('voices')
engine.setProperty('voice',voices[0].id)
print(voices[0].id)
def speak(audio):
engine.say(audio)
engine.runAndWait()
#Speech Recognition
def takeCommand():
r=sr.Recognizer()
with sr.Microphone() as source:
print("Listening...........")
r.pause_threshold=1
audio=r.listen(source)
try:
print("Recognising...........")
query=r.recognize_google(audio,language='en-in')
print(query)
except:
print("Please Say that again.......")
return "None"
return query
if __name__=="__main__":
while True:
query=takeCommand().lower()
comp = random.randint(1, 3)
player = 0
if query == "rock":
player = 1
elif query == "paper":
player = 2
elif query == "scissors":
player = 3
if player == 1:
if comp == 1:
speak("TIE! The computer chose Rock")
print("TIE! The computer chose Rock")
elif comp == 2:
speak("You Lost! The computer chose Paper")
print("You Lost! The computer chose Paper")
elif comp == 3:
speak("You Won! The computer chose Scissors")
print("You Won! The computer chose Scissors")
if player == 2:
if comp == 1:
speak("You Won! The computer chose Rock")
print("You Won! The computer chose Rock")
elif comp == 2:
speak("Tie! The computer chose Paper")
print("Tie! The computer chose Paper")
elif comp == 3:
speak("You Lost! The computer chose Scissors")
print("You Lost! The computer chose Scissors")
if player == 3:
if comp == 1:
speak("You Lost! The computer chose Rock")
print("You Lost! The computer chose Rock")
elif comp == 2:
speak("You Won! The computer chose Paper")
print("You Won! The computer chose Paper")
elif comp == 3:
speak("TIE! The computer chose Scissors")
print("TIE! The computer chose Scissors")
|
[
"noreply@github.com"
] |
Aaronphilip2003.noreply@github.com
|
e71a8321f27c9018ac8caa3a6a8bb1bc8abd87ae
|
4536452b8fdadae1e4365cda1ccd6d5d9cb06d8d
|
/airflow/tests/executors/test_base_executor.py
|
06a47541e404ce9f1009f411acda9b43609581e7
|
[
"BSD-3-Clause",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
kira-lin/ve450-declarative-deployment-framework
|
73cb6e3d82810aaef800cb654b3651b334f65673
|
f28e8b468568c8623134db5a1a8757860788799f
|
refs/heads/master
| 2020-04-02T09:26:45.915721
| 2018-12-08T14:04:24
| 2018-12-08T14:04:24
| 154,293,242
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,657
|
py
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow.executors.base_executor import BaseExecutor
from airflow.utils.state import State
from datetime import datetime
class BaseExecutorTest(unittest.TestCase):
def test_get_event_buffer(self):
executor = BaseExecutor()
date = datetime.utcnow()
try_number = 1
key1 = ("my_dag1", "my_task1", date, try_number)
key2 = ("my_dag2", "my_task1", date, try_number)
key3 = ("my_dag2", "my_task2", date, try_number)
state = State.SUCCESS
executor.event_buffer[key1] = state
executor.event_buffer[key2] = state
executor.event_buffer[key3] = state
self.assertEqual(len(executor.get_event_buffer(("my_dag1",))), 1)
self.assertEqual(len(executor.get_event_buffer()), 2)
self.assertEqual(len(executor.event_buffer), 0)
|
[
"linzhilynn@gmail.com"
] |
linzhilynn@gmail.com
|
0f729841b590b519b15a1eb654dbe0a9ab8e5838
|
5d3fd9328cf3fab1056d79cd8464df3f1719b30e
|
/MG5_aMC_v2_6_7/tests/unit_tests/iolibs/test_export_cpp.py
|
5d2db30939e4bcd16b537fb06e7208e2dec1f326
|
[] |
no_license
|
BKailasapathy/madgraph
|
c8d34147146edda1f147e8259539c0e86e6209c2
|
949fcf00f111eadf8948827e2933952b7823778d
|
refs/heads/master
| 2023-07-15T08:38:08.382422
| 2021-08-21T09:12:23
| 2021-08-21T09:12:23
| 398,511,168
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 96,439
|
py
|
################################################################################
#
# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
"""Unit test library for the export Pythia8 format routines"""
import StringIO
import copy
import fractions
import os
import re
import tests.IOTests as IOTests
import tests.unit_tests as unittest
import aloha.aloha_writers as aloha_writers
import aloha.create_aloha as create_aloha
import madgraph.iolibs.export_cpp as export_cpp
import madgraph.iolibs.file_writers as writers
import madgraph.iolibs.helas_call_writers as helas_call_writer
import models.import_ufo as import_ufo
import madgraph.iolibs.save_load_object as save_load_object
import madgraph.iolibs.group_subprocs as group_subprocs
import madgraph.core.base_objects as base_objects
import madgraph.core.color_algebra as color
import madgraph.core.helas_objects as helas_objects
import madgraph.core.diagram_generation as diagram_generation
import madgraph.various.misc as misc
from madgraph import MG5DIR
import tests.unit_tests.core.test_helas_objects as test_helas_objects
import tests.unit_tests.iolibs.test_file_writers as test_file_writers
pjoin = os.path.join
#===============================================================================
# IOExportPythia8Test
#===============================================================================
class IOExportPythia8Test(IOTests.IOTestManager, test_file_writers.CheckFileCreate):
"""Test class for the export v4 module"""
mymodel = base_objects.Model()
mymatrixelement = helas_objects.HelasMatrixElement()
created_files = ['test.h', 'test.cc'
]
def assertFileContains(self,*args,**opts):
"""Wrapper to make sure that the function assertFileContains, of
test_file_writers is used. We cannot put IOTests.IOTestManager last
in the hierarchy because the structure requires it to be first always."""
return test_file_writers.CheckFileCreate.assertFileContains(
self,*args,**opts)
def setUp(self):
test_file_writers.CheckFileCreate.clean_files
# Set up model
mypartlist = base_objects.ParticleList()
myinterlist = base_objects.InteractionList()
# u and c quarkd and their antiparticles
mypartlist.append(base_objects.Particle({'name':'u',
'antiname':'u~',
'spin':2,
'color':3,
'mass':'ZERO',
'width':'ZERO',
'texname':'u',
'antitexname':'\bar u',
'line':'straight',
'charge':2. / 3.,
'pdg_code':2,
'propagating':True,
'is_part':True,
'self_antipart':False}))
u = mypartlist[len(mypartlist) - 1]
antiu = copy.copy(u)
antiu.set('is_part', False)
mypartlist.append(base_objects.Particle({'name':'c',
'antiname':'c~',
'spin':2,
'color':3,
'mass':'MC',
'width':'ZERO',
'texname':'c',
'antitexname':'\bar c',
'line':'straight',
'charge':2. / 3.,
'pdg_code':4,
'propagating':True,
'is_part':True,
'self_antipart':False}))
c = mypartlist[len(mypartlist) - 1]
antic = copy.copy(c)
antic.set('is_part', False)
# A gluon
mypartlist.append(base_objects.Particle({'name':'g',
'antiname':'g',
'spin':3,
'color':8,
'mass':'ZERO',
'width':'ZERO',
'texname':'g',
'antitexname':'g',
'line':'curly',
'charge':0.,
'pdg_code':21,
'propagating':True,
'is_part':True,
'self_antipart':True}))
g = mypartlist[len(mypartlist) - 1]
# A photon
mypartlist.append(base_objects.Particle({'name':'Z',
'antiname':'Z',
'spin':3,
'color':1,
'mass':'MZ',
'width':'WZ',
'texname':'Z',
'antitexname':'Z',
'line':'wavy',
'charge':0.,
'pdg_code':23,
'propagating':True,
'is_part':True,
'self_antipart':True}))
z = mypartlist[len(mypartlist) - 1]
# A gluino
mypartlist.append(base_objects.Particle({'name':'go',
'antiname':'go',
'spin':2,
'color':8,
'mass':'MGO',
'width':'WGO',
'texname':'go',
'antitexname':'go',
'line':'straight',
'charge':0.,
'pdg_code':1000021,
'propagating':True,
'is_part':True,
'self_antipart':True}))
go = mypartlist[len(mypartlist) - 1]
# A sextet diquark
mypartlist.append(base_objects.Particle({'name':'six',
'antiname':'six~',
'spin':1,
'color':6,
'mass':'MSIX',
'width':'WSIX',
'texname':'six',
'antitexname':'sixbar',
'line':'straight',
'charge':4./3.,
'pdg_code':6000001,
'propagating':True,
'is_part':True,
'self_antipart':False}))
six = mypartlist[len(mypartlist) - 1]
antisix = copy.copy(six)
antisix.set('is_part', False)
# Gluon couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 1,
'particles': base_objects.ParticleList(\
[antiu, \
u, \
g]),
'color': [color.ColorString([color.T(2, 1, 0)])],
'lorentz':['FFV1'],
'couplings':{(0, 0):'GC_10'},
'orders':{'QCD':1}}))
# Gamma couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 2,
'particles': base_objects.ParticleList(\
[antiu, \
u, \
z]),
'color': [color.ColorString([color.T(1, 0)])],
'lorentz':['FFV2', 'FFV5'],
'couplings':{(0,0): 'GC_35', (0,1): 'GC_47'},
'orders':{'QED':1}}))
# Gluon couplings to gluinos
myinterlist.append(base_objects.Interaction({
'id': 3,
'particles': base_objects.ParticleList(\
[go, \
go, \
g]),
'color': [color.ColorString([color.f(0,1,2)])],
'lorentz':['FFV1'],
'couplings':{(0, 0):'GC_8'},
'orders':{'QCD':1}}))
# Sextet couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 4,
'particles': base_objects.ParticleList(\
[u, \
u, \
antisix]),
'color': [color.ColorString([color.K6Bar(2, 0, 1)])],
'lorentz':['FFS1'],
'couplings':{(0,0): 'GC_24'},
'orders':{'QSIX':1}}))
myinterlist.append(base_objects.Interaction({
'id': 5,
'particles': base_objects.ParticleList(\
[antiu, \
antiu, \
six]),
'color': [color.ColorString([color.K6(2, 0, 1)])],
'lorentz':['FFS1'],
'couplings':{(0,0): 'GC_24'},
'orders':{'QSIX':1}}))
self.mymodel.set('particles', mypartlist)
self.mymodel.set('interactions', myinterlist)
self.mymodel.set('name', 'sm')
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':2,
'state':False}))
myleglist.append(base_objects.Leg({'id':-2,
'state':False}))
myleglist.append(base_objects.Leg({'id':2,
'state':True}))
myleglist.append(base_objects.Leg({'id':-2,
'state':True}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel,
'orders':{'QSIX':0}})
myamplitude = diagram_generation.Amplitude({'process': myproc})
self.mymatrixelement = helas_objects.HelasMultiProcess(myamplitude)
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':4,
'state':False,
'number' : 1}))
myleglist.append(base_objects.Leg({'id':-4,
'state':False,
'number' : 2}))
myleglist.append(base_objects.Leg({'id':4,
'state':True,
'number' : 3}))
myleglist.append(base_objects.Leg({'id':-4,
'state':True,
'number' : 4}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel,
'orders':{'QSIX':0}})
self.mymatrixelement.get('matrix_elements')[0].\
get('processes').append(myproc)
self.mycppwriter = helas_call_writer.CPPUFOHelasCallWriter(self.mymodel)
self.pythia8_exporter = export_cpp.OneProcessExporterPythia8(\
self.mymatrixelement, self.mycppwriter,
process_string = "q q~ > q q~")
self.cpp_exporter = export_cpp.OneProcessExporterCPP(\
self.mymatrixelement, self.mycppwriter,
process_string = "q q~ > q q~")
tearDown = test_file_writers.CheckFileCreate.clean_files
def test_pythia8_export_functions(self):
"""Test functions used by the Pythia export"""
# Test the exporter setup
self.assertEqual(self.pythia8_exporter.model, self.mymodel)
self.assertEqual(self.pythia8_exporter.matrix_elements, self.mymatrixelement.get('matrix_elements'))
self.assertEqual(self.pythia8_exporter.process_string, "q q~ > q q~")
self.assertEqual(self.pythia8_exporter.process_name, "Sigma_sm_qqx_qqx")
self.assertEqual(self.pythia8_exporter.nexternal, 4)
self.assertEqual(self.pythia8_exporter.ninitial, 2)
self.assertEqual(self.pythia8_exporter.nfinal, 2)
self.assertTrue(self.pythia8_exporter.single_helicities)
self.assertEqual(self.pythia8_exporter.wavefunctions, self.mymatrixelement.get('matrix_elements')[0].get_all_wavefunctions())
# Test get_process_influx
processes = self.mymatrixelement.get('matrix_elements')[0].get('processes')
self.assertEqual(self.pythia8_exporter.get_process_influx(), "qqbarSame")
self.assertEqual(self.pythia8_exporter.get_id_masses(processes[0]), "")
self.assertEqual(self.pythia8_exporter.get_id_masses(processes[1]), \
"""int id3Mass() const {return 4;}
int id4Mass() const {return 4;}""")
self.assertEqual(self.pythia8_exporter.get_resonance_lines(), \
"virtual int resonanceA() const {return 23;}")
def test_write_process_h_file(self):
"""Test writing the .h Pythia file for a matrix element"""
goal_string = \
"""//==========================================================================
// This file has been automatically generated for Pythia 8
// MadGraph5_aMC@NLO v. %(version)s, %(date)s
// By the MadGraph5_aMC@NLO Development Team
// Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch
//==========================================================================
#ifndef Pythia8_Sigma_sm_qqx_qqx_H
#define Pythia8_Sigma_sm_qqx_qqx_H
#include <complex>
#include "Pythia8/SigmaProcess.h"
#include "Parameters_sm.h"
using namespace std;
namespace Pythia8
{
//==========================================================================
// A class for calculating the matrix elements for
// Process: u u~ > u u~ QSIX=0
// Process: c c~ > c c~ QSIX=0
//--------------------------------------------------------------------------
class Sigma_sm_qqx_qqx : public Sigma2Process
{
public:
// Constructor.
Sigma_sm_qqx_qqx() {}
// Initialize process.
virtual void initProc();
// Calculate flavour-independent parts of cross section.
virtual void sigmaKin();
// Evaluate sigmaHat(sHat).
virtual double sigmaHat();
// Select flavour, colour and anticolour.
virtual void setIdColAcol();
// Evaluate weight for decay angles.
virtual double weightDecay(Event& process, int iResBeg, int iResEnd);
// Info on the subprocess.
virtual string name() const {return "q q~ > q q~ (sm)";}
virtual int code() const {return 10000;}
virtual string inFlux() const {return "qqbarSame";}
virtual int resonanceA() const {return 23;}
// Tell Pythia that sigmaHat returns the ME^2
virtual bool convertM2() const {return true;}
private:
// Private functions to calculate the matrix element for all subprocesses
// Calculate wavefunctions
void calculate_wavefunctions(const int perm[], const int hel[]);
static const int nwavefuncs = 8;
std::complex<double> w[nwavefuncs][18];
static const int namplitudes = 4;
std::complex<double> amp[namplitudes];
double matrix_uux_uux();
// Constants for array limits
static const int nexternal = 4;
static const int nprocesses = 1;
// Store the matrix element value from sigmaKin
double matrix_element[nprocesses];
// Color flows, used when selecting color
double * jamp2[nprocesses];
// Pointer to the model parameters
Parameters_sm * pars;
};
} // end namespace Pythia8
#endif // Pythia8_Sigma_sm_qqx_qqx_H
""" % misc.get_pkg_info()
self.pythia8_exporter.write_process_h_file(\
writers.CPPWriter(self.give_pos('test.h')))
#print open(self.give_pos('test.h')).read()
self.assertFileContains('test.h', goal_string)
def test_write_process_cc_file(self):
"""Test writing the .cc Pythia file for a matrix element"""
goal_string = \
"""//==========================================================================
// This file has been automatically generated for Pythia 8 by
// MadGraph5_aMC@NLO v. %(version)s, %(date)s
// By the MadGraph5_aMC@NLO Development Team
// Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch
//==========================================================================
#include "Sigma_sm_qqx_qqx.h"
#include "HelAmps_sm.h"
using namespace Pythia8_sm;
namespace Pythia8
{
//==========================================================================
// Class member functions for calculating the matrix elements for
// Process: u u~ > u u~ QSIX=0
// Process: c c~ > c c~ QSIX=0
//--------------------------------------------------------------------------
// Initialize process.
void Sigma_sm_qqx_qqx::initProc()
{
// Instantiate the model class and set parameters that stay fixed during run
pars = Parameters_sm::getInstance();
pars->setIndependentParameters(particleDataPtr, couplingsPtr, slhaPtr);
pars->setIndependentCouplings();
// Set massive/massless matrix elements for c/b/mu/tau
mcME = particleDataPtr->m0(4);
mbME = 0.;
mmuME = 0.;
mtauME = 0.;
jamp2[0] = new double[2];
}
//--------------------------------------------------------------------------
// Evaluate |M|^2, part independent of incoming flavour.
void Sigma_sm_qqx_qqx::sigmaKin()
{
// Set the parameters which change event by event
pars->setDependentParameters(particleDataPtr, couplingsPtr, slhaPtr, alpS);
pars->setDependentCouplings();
// Reset color flows
for(int i = 0; i < 2; i++ )
jamp2[0][i] = 0.;
// Local variables and constants
const int ncomb = 16;
static bool goodhel[ncomb] = {ncomb * false};
static int ntry = 0, sum_hel = 0, ngood = 0;
static int igood[ncomb];
static int jhel;
double t[nprocesses];
// Helicities for the process
static const int helicities[ncomb][nexternal] = {{-1, -1, -1, -1}, {-1, -1,
-1, 1}, {-1, -1, 1, -1}, {-1, -1, 1, 1}, {-1, 1, -1, -1}, {-1, 1, -1, 1},
{-1, 1, 1, -1}, {-1, 1, 1, 1}, {1, -1, -1, -1}, {1, -1, -1, 1}, {1, -1,
1, -1}, {1, -1, 1, 1}, {1, 1, -1, -1}, {1, 1, -1, 1}, {1, 1, 1, -1}, {1,
1, 1, 1}};
// Denominators: spins, colors and identical particles
const int denominators[nprocesses] = {36};
ntry = ntry + 1;
// Reset the matrix elements
for(int i = 0; i < nprocesses; i++ )
{
matrix_element[i] = 0.;
t[i] = 0.;
}
// Define permutation
int perm[nexternal];
for(int i = 0; i < nexternal; i++ )
{
perm[i] = i;
}
// For now, call setupForME() here
id1 = 2;
id2 = -2;
if( !setupForME())
{
return;
}
if (sum_hel == 0 || ntry < 10)
{
// Calculate the matrix element for all helicities
for(int ihel = 0; ihel < ncomb; ihel++ )
{
if (goodhel[ihel] || ntry < 2)
{
calculate_wavefunctions(perm, helicities[ihel]);
t[0] = matrix_uux_uux();
double tsum = 0;
for(int iproc = 0; iproc < nprocesses; iproc++ )
{
matrix_element[iproc] += t[iproc];
tsum += t[iproc];
}
// Store which helicities give non-zero result
if (tsum != 0. && !goodhel[ihel])
{
goodhel[ihel] = true;
ngood++;
igood[ngood] = ihel;
}
}
}
jhel = 0;
sum_hel = min(sum_hel, ngood);
}
else
{
// Only use the "good" helicities
for(int j = 0; j < sum_hel; j++ )
{
jhel++;
if (jhel >= ngood)
jhel = 0;
double hwgt = double(ngood)/double(sum_hel);
int ihel = igood[jhel];
calculate_wavefunctions(perm, helicities[ihel]);
t[0] = matrix_uux_uux();
for(int iproc = 0; iproc < nprocesses; iproc++ )
{
matrix_element[iproc] += t[iproc] * hwgt;
}
}
}
for (int i = 0; i < nprocesses; i++ )
matrix_element[i] /= denominators[i];
}
//--------------------------------------------------------------------------
// Evaluate |M|^2, including incoming flavour dependence.
double Sigma_sm_qqx_qqx::sigmaHat()
{
// Select between the different processes
if(id1 == 4 && id2 == -4)
{
// Add matrix elements for processes with beams (4, -4)
return matrix_element[0];
}
else if(id1 == 2 && id2 == -2)
{
// Add matrix elements for processes with beams (2, -2)
return matrix_element[0];
}
else
{
// Return 0 if not correct initial state assignment
return 0.;
}
}
//--------------------------------------------------------------------------
// Select identity, colour and anticolour.
void Sigma_sm_qqx_qqx::setIdColAcol()
{
if(id1 == 4 && id2 == -4)
{
// Pick one of the flavor combinations (4, -4)
int flavors[1][2] = {{4, -4}};
vector<double> probs;
double sum = matrix_element[0];
probs.push_back(matrix_element[0]/sum);
int choice = rndmPtr->pick(probs);
id3 = flavors[choice][0];
id4 = flavors[choice][1];
}
else if(id1 == 2 && id2 == -2)
{
// Pick one of the flavor combinations (2, -2)
int flavors[1][2] = {{2, -2}};
vector<double> probs;
double sum = matrix_element[0];
probs.push_back(matrix_element[0]/sum);
int choice = rndmPtr->pick(probs);
id3 = flavors[choice][0];
id4 = flavors[choice][1];
}
setId(id1, id2, id3, id4);
// Pick color flow
int ncolor[1] = {2};
if((id1 == 2 && id2 == -2 && id3 == 2 && id4 == -2) || (id1 == 4 && id2 == -4
&& id3 == 4 && id4 == -4))
{
vector<double> probs;
double sum = jamp2[0][0] + jamp2[0][1];
for(int i = 0; i < ncolor[0]; i++ )
probs.push_back(jamp2[0][i]/sum);
int ic = rndmPtr->pick(probs);
static int colors[2][8] = {{1, 0, 0, 1, 2, 0, 0, 2}, {2, 0, 0, 1, 2, 0, 0,
1}};
setColAcol(colors[ic][0], colors[ic][1], colors[ic][2], colors[ic][3],
colors[ic][4], colors[ic][5], colors[ic][6], colors[ic][7]);
}
}
//--------------------------------------------------------------------------
// Evaluate weight for angles of decay products in process
double Sigma_sm_qqx_qqx::weightDecay(Event& process, int iResBeg, int iResEnd)
{
// Just use isotropic decay (default)
return 1.;
}
//==========================================================================
// Private class member functions
//--------------------------------------------------------------------------
// Evaluate |M|^2 for each subprocess
void Sigma_sm_qqx_qqx::calculate_wavefunctions(const int perm[], const int
hel[])
{
// Calculate wavefunctions for all processes
double p[nexternal][4];
int i;
// Convert Pythia 4-vectors to double[]
for(i = 0; i < nexternal; i++ )
{
p[i][0] = pME[i].e();
p[i][1] = pME[i].px();
p[i][2] = pME[i].py();
p[i][3] = pME[i].pz();
}
// Calculate all wavefunctions
ixxxxx(p[perm[0]], mME[0], hel[0], +1, w[0]);
oxxxxx(p[perm[1]], mME[1], hel[1], -1, w[1]);
oxxxxx(p[perm[2]], mME[2], hel[2], +1, w[2]);
ixxxxx(p[perm[3]], mME[3], hel[3], -1, w[3]);
FFV1_3(w[0], w[1], pars->GC_10, pars->ZERO, pars->ZERO, w[4]);
FFV2_5_3(w[0], w[1], pars->GC_35, pars->GC_47, pars->MZ, pars->WZ, w[5]);
FFV1_3(w[0], w[2], pars->GC_10, pars->ZERO, pars->ZERO, w[6]);
FFV2_5_3(w[0], w[2], pars->GC_35, pars->GC_47, pars->MZ, pars->WZ, w[7]);
// Calculate all amplitudes
// Amplitude(s) for diagram number 0
FFV1_0(w[3], w[2], w[4], pars->GC_10, amp[0]);
FFV2_5_0(w[3], w[2], w[5], pars->GC_35, pars->GC_47, amp[1]);
FFV1_0(w[3], w[1], w[6], pars->GC_10, amp[2]);
FFV2_5_0(w[3], w[1], w[7], pars->GC_35, pars->GC_47, amp[3]);
}
double Sigma_sm_qqx_qqx::matrix_uux_uux()
{
int i, j;
// Local variables
const int ngraphs = 4;
const int ncolor = 2;
std::complex<double> ztemp;
std::complex<double> jamp[ncolor];
// The color matrix;
static const double denom[ncolor] = {1, 1};
static const double cf[ncolor][ncolor] = {{9, 3}, {3, 9}};
// Calculate color flows
jamp[0] = +1./6. * amp[0] - amp[1] + 1./2. * amp[2];
jamp[1] = -1./2. * amp[0] - 1./6. * amp[2] + amp[3];
// Sum and square the color flows to get the matrix element
double matrix = 0;
for(i = 0; i < ncolor; i++ )
{
ztemp = 0.;
for(j = 0; j < ncolor; j++ )
ztemp = ztemp + cf[i][j] * jamp[j];
matrix = matrix + real(ztemp * conj(jamp[i]))/denom[i];
}
// Store the leading color flows for choice of color
for(i = 0; i < ncolor; i++ )
jamp2[0][i] += real(jamp[i] * conj(jamp[i]));
return matrix;
}
} // end namespace Pythia8
""" % misc.get_pkg_info()
exporter = export_cpp.OneProcessExporterPythia8(self.mymatrixelement,
self.mycppwriter, process_string = "q q~ > q q~")
exporter.write_process_cc_file(\
writers.CPPWriter(self.give_pos('test.cc')))
#print open(self.give_pos('test.cc')).read()
self.assertFileContains('test.cc', goal_string)
def test_write_process_cc_file_uu_six(self):
"""Test writing the .cc Pythia file for u u > six"""
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':2,
'state':False,
'number' : 1}))
myleglist.append(base_objects.Leg({'id':2,
'state':False,
'number' : 2}))
myleglist.append(base_objects.Leg({'id':6000001,
'number' : 3}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel})
myamplitude = diagram_generation.Amplitude({'process': myproc})
mymatrixelement = helas_objects.HelasMultiProcess(myamplitude)
exporter = export_cpp.OneProcessExporterPythia8(\
mymatrixelement, self.mycppwriter,
process_string = "q q > six")
goal_string = \
"""//==========================================================================
// This file has been automatically generated for Pythia 8 by
// MadGraph5_aMC@NLO v. %(version)s, %(date)s
// By the MadGraph5_aMC@NLO Development Team
// Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch
//==========================================================================
#include "Sigma_sm_qq_six.h"
#include "HelAmps_sm.h"
using namespace Pythia8_sm;
namespace Pythia8
{
//==========================================================================
// Class member functions for calculating the matrix elements for
// Process: u u > six
//--------------------------------------------------------------------------
// Initialize process.
void Sigma_sm_qq_six::initProc()
{
// Instantiate the model class and set parameters that stay fixed during run
pars = Parameters_sm::getInstance();
pars->setIndependentParameters(particleDataPtr, couplingsPtr, slhaPtr);
pars->setIndependentCouplings();
// Set massive/massless matrix elements for c/b/mu/tau
mcME = particleDataPtr->m0(4);
mbME = 0.;
mmuME = 0.;
mtauME = 0.;
jamp2[0] = new double[1];
}
//--------------------------------------------------------------------------
// Evaluate |M|^2, part independent of incoming flavour.
void Sigma_sm_qq_six::sigmaKin()
{
// Set the parameters which change event by event
pars->setDependentParameters(particleDataPtr, couplingsPtr, slhaPtr, alpS);
pars->setDependentCouplings();
// Reset color flows
for(int i = 0; i < 1; i++ )
jamp2[0][i] = 0.;
// Local variables and constants
const int ncomb = 4;
static bool goodhel[ncomb] = {ncomb * false};
static int ntry = 0, sum_hel = 0, ngood = 0;
static int igood[ncomb];
static int jhel;
double t[nprocesses];
// Helicities for the process
static const int helicities[ncomb][nexternal] = {{-1, -1, 0}, {-1, 1, 0}, {1,
-1, 0}, {1, 1, 0}};
// Denominators: spins, colors and identical particles
const int denominators[nprocesses] = {36};
ntry = ntry + 1;
// Reset the matrix elements
for(int i = 0; i < nprocesses; i++ )
{
matrix_element[i] = 0.;
t[i] = 0.;
}
// Define permutation
int perm[nexternal];
for(int i = 0; i < nexternal; i++ )
{
perm[i] = i;
}
// For now, call setupForME() here
id1 = 2;
id2 = 2;
if( !setupForME())
{
return;
}
if (sum_hel == 0 || ntry < 10)
{
// Calculate the matrix element for all helicities
for(int ihel = 0; ihel < ncomb; ihel++ )
{
if (goodhel[ihel] || ntry < 2)
{
calculate_wavefunctions(perm, helicities[ihel]);
t[0] = matrix_uu_six();
double tsum = 0;
for(int iproc = 0; iproc < nprocesses; iproc++ )
{
matrix_element[iproc] += t[iproc];
tsum += t[iproc];
}
// Store which helicities give non-zero result
if (tsum != 0. && !goodhel[ihel])
{
goodhel[ihel] = true;
ngood++;
igood[ngood] = ihel;
}
}
}
jhel = 0;
sum_hel = min(sum_hel, ngood);
}
else
{
// Only use the "good" helicities
for(int j = 0; j < sum_hel; j++ )
{
jhel++;
if (jhel >= ngood)
jhel = 0;
double hwgt = double(ngood)/double(sum_hel);
int ihel = igood[jhel];
calculate_wavefunctions(perm, helicities[ihel]);
t[0] = matrix_uu_six();
for(int iproc = 0; iproc < nprocesses; iproc++ )
{
matrix_element[iproc] += t[iproc] * hwgt;
}
}
}
for (int i = 0; i < nprocesses; i++ )
matrix_element[i] /= denominators[i];
}
//--------------------------------------------------------------------------
// Evaluate |M|^2, including incoming flavour dependence.
double Sigma_sm_qq_six::sigmaHat()
{
// Select between the different processes
if(id1 == 2 && id2 == 2)
{
// Add matrix elements for processes with beams (2, 2)
return matrix_element[0];
}
else
{
// Return 0 if not correct initial state assignment
return 0.;
}
}
//--------------------------------------------------------------------------
// Select identity, colour and anticolour.
void Sigma_sm_qq_six::setIdColAcol()
{
if(id1 == 2 && id2 == 2)
{
// Pick one of the flavor combinations (6000001,)
int flavors[1][1] = {{6000001}};
vector<double> probs;
double sum = matrix_element[0];
probs.push_back(matrix_element[0]/sum);
int choice = rndmPtr->pick(probs);
id3 = flavors[choice][0];
}
setId(id1, id2, id3);
// Pick color flow
int ncolor[1] = {1};
if((id1 == 2 && id2 == 2 && id3 == 6000001))
{
vector<double> probs;
double sum = jamp2[0][0];
for(int i = 0; i < ncolor[0]; i++ )
probs.push_back(jamp2[0][i]/sum);
int ic = rndmPtr->pick(probs);
static int colors[1][6] = {{1, 0, 2, 0, 1, -2}};
setColAcol(colors[ic][0], colors[ic][1], colors[ic][2], colors[ic][3],
colors[ic][4], colors[ic][5]);
}
}
//--------------------------------------------------------------------------
// Evaluate weight for angles of decay products in process
double Sigma_sm_qq_six::weightDecay(Event& process, int iResBeg, int iResEnd)
{
// Just use isotropic decay (default)
return 1.;
}
//==========================================================================
// Private class member functions
//--------------------------------------------------------------------------
// Evaluate |M|^2 for each subprocess
void Sigma_sm_qq_six::calculate_wavefunctions(const int perm[], const int hel[])
{
// Calculate wavefunctions for all processes
double p[nexternal][4];
int i;
// Convert Pythia 4-vectors to double[]
for(i = 0; i < nexternal; i++ )
{
p[i][0] = pME[i].e();
p[i][1] = pME[i].px();
p[i][2] = pME[i].py();
p[i][3] = pME[i].pz();
}
// Calculate all wavefunctions
oxxxxx(p[perm[0]], mME[0], hel[0], -1, w[0]);
ixxxxx(p[perm[1]], mME[1], hel[1], +1, w[1]);
sxxxxx(p[perm[2]], +1, w[2]);
// Calculate all amplitudes
// Amplitude(s) for diagram number 0
FFS1C1_0(w[1], w[0], w[2], pars->GC_24, amp[0]);
}
double Sigma_sm_qq_six::matrix_uu_six()
{
int i, j;
// Local variables
const int ngraphs = 1;
const int ncolor = 1;
std::complex<double> ztemp;
std::complex<double> jamp[ncolor];
// The color matrix;
static const double denom[ncolor] = {1};
static const double cf[ncolor][ncolor] = {{6}};
// Calculate color flows
jamp[0] = -amp[0];
// Sum and square the color flows to get the matrix element
double matrix = 0;
for(i = 0; i < ncolor; i++ )
{
ztemp = 0.;
for(j = 0; j < ncolor; j++ )
ztemp = ztemp + cf[i][j] * jamp[j];
matrix = matrix + real(ztemp * conj(jamp[i]))/denom[i];
}
// Store the leading color flows for choice of color
for(i = 0; i < ncolor; i++ )
jamp2[0][i] += real(jamp[i] * conj(jamp[i]));
return matrix;
}
} // end namespace Pythia8
""" % misc.get_pkg_info()
exporter.write_process_cc_file(\
writers.CPPWriter(self.give_pos('test.cc')))
#print open(self.give_pos('test.cc')).read()
self.assertFileContains('test.cc', goal_string)
@IOTests.createIOTest()
def testIO_write_dec_multiprocess_files(self):
"""target: write_dec_multiprocess_files.h
target: write_dec_multiprocess_files.cc
"""
# Setup a model
mypartlist = base_objects.ParticleList()
myinterlist = base_objects.InteractionList()
# A gluon
mypartlist.append(base_objects.Particle({'name':'g',
'antiname':'g',
'spin':3,
'color':8,
'mass':'zero',
'width':'zero',
'texname':'g',
'antitexname':'g',
'line':'curly',
'charge':0.,
'pdg_code':21,
'propagating':True,
'is_part':True,
'self_antipart':True}))
g = mypartlist[-1]
# A quark U and its antiparticle
mypartlist.append(base_objects.Particle({'name':'u',
'antiname':'u~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'u',
'antitexname':'\bar u',
'line':'straight',
'charge':2. / 3.,
'pdg_code':2,
'propagating':True,
'is_part':True,
'self_antipart':False}))
u = mypartlist[-1]
antiu = copy.copy(u)
antiu.set('is_part', False)
# A quark S and its antiparticle
mypartlist.append(base_objects.Particle({'name':'s',
'antiname':'s~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'d',
'antitexname':'\bar d',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':3,
'propagating':True,
'is_part':True,
'self_antipart':False}))
s = mypartlist[-1]
antis = copy.copy(s)
antis.set('is_part', False)
# A quark D and its antiparticle
mypartlist.append(base_objects.Particle({'name':'d',
'antiname':'d~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'d',
'antitexname':'\bar d',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':1,
'propagating':True,
'is_part':True,
'self_antipart':False}))
d = mypartlist[-1]
antid = copy.copy(d)
antid.set('is_part', False)
# A Z
mypartlist.append(base_objects.Particle({'name':'z',
'antiname':'z',
'spin':3,
'color':1,
'mass':'MZ',
'width':'WZ',
'texname':'Z',
'antitexname':'Z',
'line':'wavy',
'charge':0.,
'pdg_code':23,
'propagating':True,
'is_part':True,
'self_antipart':True}))
z = mypartlist[-1]
# Gluon and photon couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 1,
'particles': base_objects.ParticleList(\
[antiu, \
u, \
g]),
'color': [color.ColorString([color.T(2,1,0)])],
'lorentz':['FFV1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 2,
'particles': base_objects.ParticleList(\
[antid, \
d, \
g]),
'color': [color.ColorString([color.T(2,1,0)])],
'lorentz':['FFV1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 3,
'particles': base_objects.ParticleList(\
[antis, \
s, \
g]),
'color': [color.ColorString([color.T(2,1,0)])],
'lorentz':['FFV1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
# Coupling of Z to quarks
myinterlist.append(base_objects.Interaction({
'id': 6,
'particles': base_objects.ParticleList(\
[antiu, \
u, \
z]),
'color': [color.ColorString([color.T(1,0)])],
'lorentz':['FFV1', 'FFV2'],
'couplings':{(0, 0):'GUZ1', (0, 1):'GUZ2'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 7,
'particles': base_objects.ParticleList(\
[antid, \
d, \
z]),
'color': [color.ColorString([color.T(1,0)])],
'lorentz':['FFV1', 'FFV2'],
'couplings':{(0, 0):'GDZ1', (0, 0):'GDZ2'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 8,
'particles': base_objects.ParticleList(\
[antis, \
s, \
z]),
'color': [color.ColorString([color.T(1,0)])],
'lorentz':['FFV1', 'FFV2'],
'couplings':{(0, 0):'GDZ1', (0, 0):'GDZ2'},
'orders':{'QED':1}}))
mymodel = base_objects.Model()
mymodel.set('particles', mypartlist)
mymodel.set('interactions', myinterlist)
mymodel.set('name', 'sm')
# Set parameters
external_parameters = [\
base_objects.ParamCardVariable('zero', 0.,'DUM', 1),
base_objects.ParamCardVariable('MZ', 91.,'MASS', 23),
base_objects.ParamCardVariable('WZ', 2.,'DECAY', 23)]
couplings = [\
base_objects.ModelVariable('GQQ', '1.', 'complex'),
base_objects.ModelVariable('GQED', '0.1', 'complex'),
base_objects.ModelVariable('G', '1.', 'complex'),
base_objects.ModelVariable('GUZ1', '0.1', 'complex'),
base_objects.ModelVariable('GUZ2', '0.1', 'complex'),
base_objects.ModelVariable('GDZ1', '0.05', 'complex'),
base_objects.ModelVariable('GDZ2', '0.05', 'complex'),
base_objects.ModelVariable('ZZQQ', '0.01', 'complex')]
mymodel.set('parameters', {('external',): external_parameters})
mymodel.set('couplings', {(): couplings})
mymodel.set('functions', [])
p = [21,1,2,3,-1,-2,-3]
q = [1,2,-1,-2]
procs = [[p,p,[23],p]]
decays = [[[23],p,p]]
my_processes = base_objects.ProcessDefinitionList()
decayprocs = base_objects.ProcessDefinitionList()
for proc in procs:
# Define the multiprocess
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id, 'state': True}) for id in proc])
my_leglist[0].set('state', False)
my_leglist[1].set('state', False)
my_process = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
my_processes.append(my_process)
for proc in decays:
# Define the decays
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id, 'state': True}) for id in proc])
my_leglist[0].set('state', False)
my_process = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel,
'is_decay_chain': True})
decayprocs.append(my_process)
for proc in my_processes:
proc.set('decay_chains', decayprocs)
decay_chains = diagram_generation.MultiProcess(my_processes,
collect_mirror_procs = True)
dc_subproc_group = group_subprocs.DecayChainSubProcessGroup.\
group_amplitudes(diagram_generation.DecayChainAmplitudeList(\
decay_chains.get('amplitudes')))
subproc_groups = \
dc_subproc_group.generate_helas_decay_chain_subproc_groups()
# Check number of groups
self.assertEqual(len(subproc_groups), 2)
self.assertEqual([g.get('name') for g in subproc_groups],
['gq_zq_z_qq','qq_zg_z_qq'])
subprocess_group = subproc_groups[0]
matrix_elements = subprocess_group.get('matrix_elements')
exporter = export_cpp.OneProcessExporterPythia8(matrix_elements,
self.mycppwriter)
# Test .h file output
exporter.write_process_h_file(\
writers.CPPWriter(pjoin(self.IOpath,'write_dec_multiprocess_files.h')))
# Test .cc file output
text = exporter.get_process_function_definitions()
my_writer = writers.CPPWriter(pjoin(
self.IOpath,'write_dec_multiprocess_files.cc'))
my_writer.write(text)
@IOTests.createIOTest()
def testIO_write_cpp_go_process_cc_file(self):
""" target: cpp_go_process.cc
"""
#Test writing the .cc C++ standalone file for u u~ > go go
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':2,
'state':False}))
myleglist.append(base_objects.Leg({'id':-2,
'state':False}))
myleglist.append(base_objects.Leg({'id':1000021,
'state':True}))
myleglist.append(base_objects.Leg({'id':1000021,
'state':True}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel})
myamplitude = diagram_generation.Amplitude({'process': myproc})
matrix_element = helas_objects.HelasMultiProcess(myamplitude)
matrix_element.get('matrix_elements')[0].set('has_mirror_process',
True)
exporter = export_cpp.OneProcessExporterCPP(matrix_element,
self.mycppwriter)
exporter.write_process_cc_file(\
writers.CPPWriter(os.path.join(self.IOpath,'cpp_go_process.cc')))
def disabled_test_write_process_files(self):
"""Test writing the .h and .cc Pythia file for a matrix element"""
export_cpp.generate_process_files_pythia8(self.mymatrixelement,
self.mycppwriter,
process_string = "q q~ > q q~",
path = "/tmp")
print "Please try compiling the file /tmp/Sigma_sm_qqx_qqx.cc:"
print "cd /tmp; g++ -c -I $PATH_TO_PYTHIA8/include Sigma_sm_qqx_qqx.cc.cc"
#===============================================================================
# ExportUFOModelPythia8Test
#===============================================================================
class ExportUFOModelPythia8Test(unittest.TestCase,
test_file_writers.CheckFileCreate):
created_files = [
]
def setUp(self):
model_pkl = os.path.join(MG5DIR, 'models','sm','model.pkl')
if os.path.isfile(model_pkl):
self.model = save_load_object.load_from_file(model_pkl)
else:
sm_path = import_ufo.find_ufo_path('sm')
self.model = import_ufo.import_model(sm_path)
self.model = save_load_object.load_from_file(model_pkl)
self.model_builder = export_cpp.UFOModelConverterPythia8(\
self.model, "/tmp",
replace_dict={'include_prefix':'Pythia8/'})
test_file_writers.CheckFileCreate.clean_files
tearDown = test_file_writers.CheckFileCreate.clean_files
def test_write_pythia8_parameter_files(self):
"""Test writing the Pythia model parameter files"""
goal_file_h = \
"""//==========================================================================
// This file has been automatically generated for Pythia 8
# MadGraph5_aMC@NLO v. %(version)s, %(date)s
# By the MadGraph5_aMC@NLO Development Team
# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch
//==========================================================================
#ifndef Pythia8_parameters_sm_H
#define Pythia8_parameters_sm_H
#include <complex>
#include "Pythia8/ParticleData.h"
#include "Pythia8/StandardModel.h"
#include "Pythia8/SusyLesHouches.h"
using namespace std;
using namespace Pythia8;
class Parameters_sm
{
public:
static Parameters_sm* getInstance();
// Model parameters independent of aS
double mdl_WTau,mdl_WH,mdl_WT,mdl_WW,mdl_WZ,mdl_MTA,mdl_MM,mdl_Me,mdl_MH,mdl_MB,mdl_MT,mdl_MC,mdl_MZ,mdl_ymtau,mdl_ymm,mdl_yme,mdl_ymt,mdl_ymb,mdl_ymc,mdl_etaWS,mdl_rhoWS,mdl_AWS,mdl_lamWS,mdl_Gf,aEWM1,ZERO,mdl_lamWS__exp__2,mdl_lamWS__exp__3,mdl_MZ__exp__2,mdl_MZ__exp__4,mdl_sqrt__2,mdl_MH__exp__2,mdl_aEW,mdl_MW,mdl_sqrt__aEW,mdl_ee,mdl_MW__exp__2,mdl_sw2,mdl_cw,mdl_sqrt__sw2,mdl_sw,mdl_g1,mdl_gw,mdl_vev,mdl_vev__exp__2,mdl_lam,mdl_yb,mdl_yc,mdl_ye,mdl_ym,mdl_yt,mdl_ytau,mdl_muH,mdl_ee__exp__2,mdl_sw__exp__2,mdl_cw__exp__2;
std::complex<double> mdl_CKM1x1,mdl_CKM1x2,mdl_complexi,mdl_CKM1x3,mdl_CKM2x1,mdl_CKM2x2,mdl_CKM2x3,mdl_CKM3x1,mdl_CKM3x2,mdl_CKM3x3,mdl_conjg__CKM1x3,mdl_conjg__CKM2x3,mdl_conjg__CKM3x3,mdl_conjg__CKM2x1,mdl_conjg__CKM3x1,mdl_conjg__CKM2x2,mdl_conjg__CKM3x2,mdl_conjg__CKM1x1,mdl_conjg__CKM1x2,mdl_I1x31,mdl_I1x32,mdl_I1x33,mdl_I2x12,mdl_I2x13,mdl_I2x22,mdl_I2x23,mdl_I2x32,mdl_I2x33,mdl_I3x21,mdl_I3x22,mdl_I3x23,mdl_I3x31,mdl_I3x32,mdl_I3x33,mdl_I4x13,mdl_I4x23,mdl_I4x33;
// Model parameters dependent on aS
double aS,mdl_sqrt__aS,G,mdl_G__exp__2;
// Model couplings independent of aS
std::complex<double> GC_1,GC_2,GC_3,GC_4,GC_5,GC_6,GC_7,GC_8,GC_9,GC_13,GC_14,GC_15,GC_16,GC_17,GC_18,GC_19,GC_20,GC_21,GC_22,GC_23,GC_24,GC_25,GC_26,GC_27,GC_28,GC_29,GC_30,GC_31,GC_32,GC_33,GC_34,GC_35,GC_36,GC_37,GC_38,GC_39,GC_40,GC_41,GC_42,GC_43,GC_44,GC_45,GC_46,GC_47,GC_48,GC_49,GC_50,GC_51,GC_52,GC_53,GC_54,GC_55,GC_56,GC_57,GC_58,GC_59,GC_60,GC_61,GC_62,GC_63,GC_64,GC_65,GC_66,GC_67,GC_68,GC_69,GC_70,GC_71,GC_72,GC_73,GC_74,GC_75,GC_76,GC_77,GC_78,GC_79,GC_80,GC_81,GC_82,GC_83,GC_84,GC_85,GC_86,GC_87,GC_88,GC_89,GC_90,GC_91,GC_92,GC_93,GC_94,GC_95,GC_96,GC_97,GC_98,GC_99,GC_100,GC_101,GC_102,GC_103,GC_104,GC_105,GC_106,GC_107,GC_108;
// Model couplings dependent on aS
std::complex<double> GC_12,GC_11,GC_10;
// Set parameters that are unchanged during the run
void setIndependentParameters(ParticleData*& pd, Couplings*& csm, SusyLesHouches*& slhaPtr);
// Set couplings that are unchanged during the run
void setIndependentCouplings();
// Set parameters that are changed event by event
void setDependentParameters(ParticleData*& pd, Couplings*& csm, SusyLesHouches*& slhaPtr, double alpS);
// TMP: hardcoded bogus implementation with no arguments since this
// is being called from within the matrix elements.
void setDependentParameters() {};
// Set couplings that are changed event by event
void setDependentCouplings();
// Print parameters that are unchanged during the run
void printIndependentParameters();
// Print couplings that are unchanged during the run
void printIndependentCouplings();
// Print parameters that are changed event by event
void printDependentParameters();
// Print couplings that are changed event by event
void printDependentCouplings();
private:
static Parameters_sm* instance;
};
#endif // Pythia8_parameters_sm_H
"""% misc.get_pkg_info()
goal_file_cc = \
"""//==========================================================================
// This file has been automatically generated for Pythia 8 by
# MadGraph5_aMC@NLO v. %(version)s, %(date)s
# By the MadGraph5_aMC@NLO Development Team
# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch
//==========================================================================
#include <iostream>
#include "Parameters_sm.h"
#include "Pythia8/PythiaStdlib.h"
using namespace Pythia8;
// Initialize static instance
Parameters_sm* Parameters_sm::instance = 0;
// Function to get static instance - only one instance per program
Parameters_sm* Parameters_sm::getInstance(){
if (instance == 0)
instance = new Parameters_sm();
return instance;
}
void Parameters_sm::setIndependentParameters(ParticleData*& pd, Couplings*& csm, SusyLesHouches*& slhaPtr){
mdl_WTau=pd->mWidth(15);
mdl_WH=pd->mWidth(25);
mdl_WT=pd->mWidth(6);
mdl_WW=pd->mWidth(24);
mdl_WZ=pd->mWidth(23);
mdl_MTA=pd->m0(15);
mdl_MM=pd->m0(13);
mdl_Me=pd->m0(11);
mdl_MH=pd->m0(25);
mdl_MB=pd->m0(5);
mdl_MT=pd->m0(6);
mdl_MC=pd->m0(4);
mdl_MZ=pd->m0(23);
mdl_ymtau=pd->mRun(15, pd->m0(24));
mdl_ymm=pd->mRun(13, pd->m0(24));
mdl_yme=pd->mRun(11, pd->m0(24));
mdl_ymt=pd->mRun(6, pd->m0(24));
mdl_ymb=pd->mRun(5, pd->m0(24));
mdl_ymc=pd->mRun(4, pd->m0(24));
if(!slhaPtr->getEntry<double>("wolfenstein", 4, mdl_etaWS)){
cout << "Warning, setting mdl_etaWS to 3.410000e-01" << endl;
mdl_etaWS = 3.410000e-01;}
if(!slhaPtr->getEntry<double>("wolfenstein", 3, mdl_rhoWS)){
cout << "Warning, setting mdl_rhoWS to 1.320000e-01" << endl;
mdl_rhoWS = 1.320000e-01;}
if(!slhaPtr->getEntry<double>("wolfenstein", 2, mdl_AWS)){
cout << "Warning, setting mdl_AWS to 8.080000e-01" << endl;
mdl_AWS = 8.080000e-01;}
if(!slhaPtr->getEntry<double>("wolfenstein", 1, mdl_lamWS)){
cout << "Warning, setting mdl_lamWS to 2.253000e-01" << endl;
mdl_lamWS = 2.253000e-01;}
mdl_Gf = M_PI*csm->alphaEM(((pd->m0(23))*(pd->m0(23))))*((pd->m0(23))*(pd->m0(23)))/(sqrt(2.)*((pd->m0(24))*(pd->m0(24)))*(((pd->m0(23))*(pd->m0(23)))-((pd->m0(24))*(pd->m0(24)))));
aEWM1 = 1./csm->alphaEM(((pd->m0(23))*(pd->m0(23))));
ZERO = 0.;
mdl_lamWS__exp__2 = ((mdl_lamWS)*(mdl_lamWS));
mdl_CKM1x1 = 1.-mdl_lamWS__exp__2/2.;
mdl_CKM1x2 = mdl_lamWS;
mdl_complexi = std::complex<double>(0.,1.);
mdl_lamWS__exp__3 = ((mdl_lamWS)*(mdl_lamWS)*(mdl_lamWS));
mdl_CKM1x3 = mdl_AWS*mdl_lamWS__exp__3*(-(mdl_etaWS*mdl_complexi)+mdl_rhoWS);
mdl_CKM2x1 = -mdl_lamWS;
mdl_CKM2x2 = 1.-mdl_lamWS__exp__2/2.;
mdl_CKM2x3 = mdl_AWS*mdl_lamWS__exp__2;
mdl_CKM3x1 = mdl_AWS*mdl_lamWS__exp__3*(1.-mdl_etaWS*mdl_complexi-mdl_rhoWS);
mdl_CKM3x2 = -(mdl_AWS*mdl_lamWS__exp__2);
mdl_CKM3x3 = 1.;
mdl_MZ__exp__2 = ((mdl_MZ)*(mdl_MZ));
mdl_MZ__exp__4 = ((mdl_MZ)*(mdl_MZ)*(mdl_MZ)*(mdl_MZ));
mdl_sqrt__2 = sqrt(2.);
mdl_MH__exp__2 = ((mdl_MH)*(mdl_MH));
mdl_conjg__CKM1x3 = conj(mdl_CKM1x3);
mdl_conjg__CKM2x3 = conj(mdl_CKM2x3);
mdl_conjg__CKM3x3 = conj(mdl_CKM3x3);
mdl_conjg__CKM2x1 = conj(mdl_CKM2x1);
mdl_conjg__CKM3x1 = conj(mdl_CKM3x1);
mdl_conjg__CKM2x2 = conj(mdl_CKM2x2);
mdl_conjg__CKM3x2 = conj(mdl_CKM3x2);
mdl_conjg__CKM1x1 = conj(mdl_CKM1x1);
mdl_conjg__CKM1x2 = conj(mdl_CKM1x2);
mdl_aEW = 1./aEWM1;
mdl_MW = sqrt(mdl_MZ__exp__2/2.+sqrt(mdl_MZ__exp__4/4.-(mdl_aEW*M_PI*mdl_MZ__exp__2)/(mdl_Gf*mdl_sqrt__2)));
mdl_sqrt__aEW = sqrt(mdl_aEW);
mdl_ee = 2.*mdl_sqrt__aEW*sqrt(M_PI);
mdl_MW__exp__2 = ((mdl_MW)*(mdl_MW));
mdl_sw2 = 1.-mdl_MW__exp__2/mdl_MZ__exp__2;
mdl_cw = sqrt(1.-mdl_sw2);
mdl_sqrt__sw2 = sqrt(mdl_sw2);
mdl_sw = mdl_sqrt__sw2;
mdl_g1 = mdl_ee/mdl_cw;
mdl_gw = mdl_ee/mdl_sw;
mdl_vev = (2.*mdl_MW*mdl_sw)/mdl_ee;
mdl_vev__exp__2 = ((mdl_vev)*(mdl_vev));
mdl_lam = mdl_MH__exp__2/(2.*mdl_vev__exp__2);
mdl_yb = (mdl_ymb*mdl_sqrt__2)/mdl_vev;
mdl_yc = (mdl_ymc*mdl_sqrt__2)/mdl_vev;
mdl_ye = (mdl_yme*mdl_sqrt__2)/mdl_vev;
mdl_ym = (mdl_ymm*mdl_sqrt__2)/mdl_vev;
mdl_yt = (mdl_ymt*mdl_sqrt__2)/mdl_vev;
mdl_ytau = (mdl_ymtau*mdl_sqrt__2)/mdl_vev;
mdl_muH = sqrt(mdl_lam*mdl_vev__exp__2);
mdl_I1x31 = mdl_yb*mdl_conjg__CKM1x3;
mdl_I1x32 = mdl_yb*mdl_conjg__CKM2x3;
mdl_I1x33 = mdl_yb*mdl_conjg__CKM3x3;
mdl_I2x12 = mdl_yc*mdl_conjg__CKM2x1;
mdl_I2x13 = mdl_yt*mdl_conjg__CKM3x1;
mdl_I2x22 = mdl_yc*mdl_conjg__CKM2x2;
mdl_I2x23 = mdl_yt*mdl_conjg__CKM3x2;
mdl_I2x32 = mdl_yc*mdl_conjg__CKM2x3;
mdl_I2x33 = mdl_yt*mdl_conjg__CKM3x3;
mdl_I3x21 = mdl_CKM2x1*mdl_yc;
mdl_I3x22 = mdl_CKM2x2*mdl_yc;
mdl_I3x23 = mdl_CKM2x3*mdl_yc;
mdl_I3x31 = mdl_CKM3x1*mdl_yt;
mdl_I3x32 = mdl_CKM3x2*mdl_yt;
mdl_I3x33 = mdl_CKM3x3*mdl_yt;
mdl_I4x13 = mdl_CKM1x3*mdl_yb;
mdl_I4x23 = mdl_CKM2x3*mdl_yb;
mdl_I4x33 = mdl_CKM3x3*mdl_yb;
mdl_ee__exp__2 = ((mdl_ee)*(mdl_ee));
mdl_sw__exp__2 = ((mdl_sw)*(mdl_sw));
mdl_cw__exp__2 = ((mdl_cw)*(mdl_cw));
}
void Parameters_sm::setIndependentCouplings(){
GC_1 = -(mdl_ee*mdl_complexi)/3.;
GC_2 = (2.*mdl_ee*mdl_complexi)/3.;
GC_3 = -(mdl_ee*mdl_complexi);
GC_4 = mdl_ee*mdl_complexi;
GC_5 = mdl_ee__exp__2*mdl_complexi;
GC_6 = 2.*mdl_ee__exp__2*mdl_complexi;
GC_7 = -mdl_ee__exp__2/(2.*mdl_cw);
GC_8 = (mdl_ee__exp__2*mdl_complexi)/(2.*mdl_cw);
GC_9 = mdl_ee__exp__2/(2.*mdl_cw);
GC_13 = mdl_I1x31;
GC_14 = mdl_I1x32;
GC_15 = mdl_I1x33;
GC_16 = -mdl_I2x12;
GC_17 = -mdl_I2x13;
GC_18 = -mdl_I2x22;
GC_19 = -mdl_I2x23;
GC_20 = -mdl_I2x32;
GC_21 = -mdl_I2x33;
GC_22 = mdl_I3x21;
GC_23 = mdl_I3x22;
GC_24 = mdl_I3x23;
GC_25 = mdl_I3x31;
GC_26 = mdl_I3x32;
GC_27 = mdl_I3x33;
GC_28 = -mdl_I4x13;
GC_29 = -mdl_I4x23;
GC_30 = -mdl_I4x33;
GC_31 = -2.*mdl_complexi*mdl_lam;
GC_32 = -4.*mdl_complexi*mdl_lam;
GC_33 = -6.*mdl_complexi*mdl_lam;
GC_34 = (mdl_ee__exp__2*mdl_complexi)/(2.*mdl_sw__exp__2);
GC_35 = -((mdl_ee__exp__2*mdl_complexi)/mdl_sw__exp__2);
GC_36 = (mdl_cw__exp__2*mdl_ee__exp__2*mdl_complexi)/mdl_sw__exp__2;
GC_37 = -mdl_ee/(2.*mdl_sw);
GC_38 = -(mdl_ee*mdl_complexi)/(2.*mdl_sw);
GC_39 = (mdl_ee*mdl_complexi)/(2.*mdl_sw);
GC_40 = (mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_41 = (mdl_CKM1x1*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_42 = (mdl_CKM1x2*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_43 = (mdl_CKM1x3*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_44 = (mdl_CKM2x1*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_45 = (mdl_CKM2x2*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_46 = (mdl_CKM2x3*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_47 = (mdl_CKM3x1*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_48 = (mdl_CKM3x2*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_49 = (mdl_CKM3x3*mdl_ee*mdl_complexi)/(mdl_sw*mdl_sqrt__2);
GC_50 = -(mdl_cw*mdl_ee*mdl_complexi)/(2.*mdl_sw);
GC_51 = (mdl_cw*mdl_ee*mdl_complexi)/(2.*mdl_sw);
GC_52 = -((mdl_cw*mdl_ee*mdl_complexi)/mdl_sw);
GC_53 = (mdl_cw*mdl_ee*mdl_complexi)/mdl_sw;
GC_54 = -mdl_ee__exp__2/(2.*mdl_sw);
GC_55 = -(mdl_ee__exp__2*mdl_complexi)/(2.*mdl_sw);
GC_56 = mdl_ee__exp__2/(2.*mdl_sw);
GC_57 = (-2.*mdl_cw*mdl_ee__exp__2*mdl_complexi)/mdl_sw;
GC_58 = -(mdl_ee*mdl_complexi*mdl_sw)/(6.*mdl_cw);
GC_59 = (mdl_ee*mdl_complexi*mdl_sw)/(2.*mdl_cw);
GC_60 = -(mdl_cw*mdl_ee)/(2.*mdl_sw)-(mdl_ee*mdl_sw)/(2.*mdl_cw);
GC_61 = -(mdl_cw*mdl_ee*mdl_complexi)/(2.*mdl_sw)+(mdl_ee*mdl_complexi*mdl_sw)/(2.*mdl_cw);
GC_62 = (mdl_cw*mdl_ee*mdl_complexi)/(2.*mdl_sw)+(mdl_ee*mdl_complexi*mdl_sw)/(2.*mdl_cw);
GC_63 = (mdl_cw*mdl_ee__exp__2*mdl_complexi)/mdl_sw-(mdl_ee__exp__2*mdl_complexi*mdl_sw)/mdl_cw;
GC_64 = -(mdl_ee__exp__2*mdl_complexi)+(mdl_cw__exp__2*mdl_ee__exp__2*mdl_complexi)/(2.*mdl_sw__exp__2)+(mdl_ee__exp__2*mdl_complexi*mdl_sw__exp__2)/(2.*mdl_cw__exp__2);
GC_65 = mdl_ee__exp__2*mdl_complexi+(mdl_cw__exp__2*mdl_ee__exp__2*mdl_complexi)/(2.*mdl_sw__exp__2)+(mdl_ee__exp__2*mdl_complexi*mdl_sw__exp__2)/(2.*mdl_cw__exp__2);
GC_66 = -(mdl_ee__exp__2*mdl_vev)/(2.*mdl_cw);
GC_67 = (mdl_ee__exp__2*mdl_vev)/(2.*mdl_cw);
GC_68 = -2.*mdl_complexi*mdl_lam*mdl_vev;
GC_69 = -6.*mdl_complexi*mdl_lam*mdl_vev;
GC_70 = -(mdl_ee__exp__2*mdl_vev)/(4.*mdl_sw__exp__2);
GC_71 = -(mdl_ee__exp__2*mdl_complexi*mdl_vev)/(4.*mdl_sw__exp__2);
GC_72 = (mdl_ee__exp__2*mdl_complexi*mdl_vev)/(2.*mdl_sw__exp__2);
GC_73 = (mdl_ee__exp__2*mdl_vev)/(4.*mdl_sw__exp__2);
GC_74 = -(mdl_ee__exp__2*mdl_vev)/(2.*mdl_sw);
GC_75 = (mdl_ee__exp__2*mdl_vev)/(2.*mdl_sw);
GC_76 = -(mdl_ee__exp__2*mdl_vev)/(4.*mdl_cw)-(mdl_cw*mdl_ee__exp__2*mdl_vev)/(4.*mdl_sw__exp__2);
GC_77 = (mdl_ee__exp__2*mdl_vev)/(4.*mdl_cw)-(mdl_cw*mdl_ee__exp__2*mdl_vev)/(4.*mdl_sw__exp__2);
GC_78 = -(mdl_ee__exp__2*mdl_vev)/(4.*mdl_cw)+(mdl_cw*mdl_ee__exp__2*mdl_vev)/(4.*mdl_sw__exp__2);
GC_79 = (mdl_ee__exp__2*mdl_vev)/(4.*mdl_cw)+(mdl_cw*mdl_ee__exp__2*mdl_vev)/(4.*mdl_sw__exp__2);
GC_80 = -(mdl_ee__exp__2*mdl_complexi*mdl_vev)/2.-(mdl_cw__exp__2*mdl_ee__exp__2*mdl_complexi*mdl_vev)/(4.*mdl_sw__exp__2)-(mdl_ee__exp__2*mdl_complexi*mdl_sw__exp__2*mdl_vev)/(4.*mdl_cw__exp__2);
GC_81 = mdl_ee__exp__2*mdl_complexi*mdl_vev+(mdl_cw__exp__2*mdl_ee__exp__2*mdl_complexi*mdl_vev)/(2.*mdl_sw__exp__2)+(mdl_ee__exp__2*mdl_complexi*mdl_sw__exp__2*mdl_vev)/(2.*mdl_cw__exp__2);
GC_82 = -(mdl_yb/mdl_sqrt__2);
GC_83 = -((mdl_complexi*mdl_yb)/mdl_sqrt__2);
GC_84 = -((mdl_complexi*mdl_yc)/mdl_sqrt__2);
GC_85 = mdl_yc/mdl_sqrt__2;
GC_86 = -mdl_ye;
GC_87 = mdl_ye;
GC_88 = -(mdl_ye/mdl_sqrt__2);
GC_89 = -((mdl_complexi*mdl_ye)/mdl_sqrt__2);
GC_90 = -mdl_ym;
GC_91 = mdl_ym;
GC_92 = -(mdl_ym/mdl_sqrt__2);
GC_93 = -((mdl_complexi*mdl_ym)/mdl_sqrt__2);
GC_94 = -((mdl_complexi*mdl_yt)/mdl_sqrt__2);
GC_95 = mdl_yt/mdl_sqrt__2;
GC_96 = -mdl_ytau;
GC_97 = mdl_ytau;
GC_98 = -(mdl_ytau/mdl_sqrt__2);
GC_99 = -((mdl_complexi*mdl_ytau)/mdl_sqrt__2);
GC_100 = (mdl_ee*mdl_complexi*mdl_conjg__CKM1x1)/(mdl_sw*mdl_sqrt__2);
GC_101 = (mdl_ee*mdl_complexi*mdl_conjg__CKM1x2)/(mdl_sw*mdl_sqrt__2);
GC_102 = (mdl_ee*mdl_complexi*mdl_conjg__CKM1x3)/(mdl_sw*mdl_sqrt__2);
GC_103 = (mdl_ee*mdl_complexi*mdl_conjg__CKM2x1)/(mdl_sw*mdl_sqrt__2);
GC_104 = (mdl_ee*mdl_complexi*mdl_conjg__CKM2x2)/(mdl_sw*mdl_sqrt__2);
GC_105 = (mdl_ee*mdl_complexi*mdl_conjg__CKM2x3)/(mdl_sw*mdl_sqrt__2);
GC_106 = (mdl_ee*mdl_complexi*mdl_conjg__CKM3x1)/(mdl_sw*mdl_sqrt__2);
GC_107 = (mdl_ee*mdl_complexi*mdl_conjg__CKM3x2)/(mdl_sw*mdl_sqrt__2);
GC_108 = (mdl_ee*mdl_complexi*mdl_conjg__CKM3x3)/(mdl_sw*mdl_sqrt__2);
}
void Parameters_sm::setDependentParameters(ParticleData*& pd, Couplings*& csm, SusyLesHouches*& slhaPtr, double alpS){
aS = alpS;
mdl_sqrt__aS = sqrt(aS);
G = 2.*mdl_sqrt__aS*sqrt(M_PI);
mdl_G__exp__2 = ((G)*(G));
}
void Parameters_sm::setDependentCouplings(){
GC_12 = mdl_complexi*mdl_G__exp__2;
GC_11 = mdl_complexi*G;
GC_10 = -G;
}
// Routines for printing out parameters
void Parameters_sm::printIndependentParameters(){
cout << "sm model parameters independent of event kinematics:" << endl;
cout << setw(20) << "mdl_WTau " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_WTau << endl;
cout << setw(20) << "mdl_WH " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_WH << endl;
cout << setw(20) << "mdl_WT " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_WT << endl;
cout << setw(20) << "mdl_WW " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_WW << endl;
cout << setw(20) << "mdl_WZ " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_WZ << endl;
cout << setw(20) << "mdl_MTA " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MTA << endl;
cout << setw(20) << "mdl_MM " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MM << endl;
cout << setw(20) << "mdl_Me " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_Me << endl;
cout << setw(20) << "mdl_MH " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MH << endl;
cout << setw(20) << "mdl_MB " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MB << endl;
cout << setw(20) << "mdl_MT " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MT << endl;
cout << setw(20) << "mdl_MC " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MC << endl;
cout << setw(20) << "mdl_MZ " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MZ << endl;
cout << setw(20) << "mdl_ymtau " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ymtau << endl;
cout << setw(20) << "mdl_ymm " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ymm << endl;
cout << setw(20) << "mdl_yme " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_yme << endl;
cout << setw(20) << "mdl_ymt " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ymt << endl;
cout << setw(20) << "mdl_ymb " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ymb << endl;
cout << setw(20) << "mdl_ymc " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ymc << endl;
cout << setw(20) << "mdl_etaWS " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_etaWS << endl;
cout << setw(20) << "mdl_rhoWS " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_rhoWS << endl;
cout << setw(20) << "mdl_AWS " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_AWS << endl;
cout << setw(20) << "mdl_lamWS " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_lamWS << endl;
cout << setw(20) << "mdl_Gf " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_Gf << endl;
cout << setw(20) << "aEWM1 " << "= " << setiosflags(ios::scientific) << setw(10) << aEWM1 << endl;
cout << setw(20) << "ZERO " << "= " << setiosflags(ios::scientific) << setw(10) << ZERO << endl;
cout << setw(20) << "mdl_lamWS__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_lamWS__exp__2 << endl;
cout << setw(20) << "mdl_CKM1x1 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM1x1 << endl;
cout << setw(20) << "mdl_CKM1x2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM1x2 << endl;
cout << setw(20) << "mdl_complexi " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_complexi << endl;
cout << setw(20) << "mdl_lamWS__exp__3 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_lamWS__exp__3 << endl;
cout << setw(20) << "mdl_CKM1x3 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM1x3 << endl;
cout << setw(20) << "mdl_CKM2x1 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM2x1 << endl;
cout << setw(20) << "mdl_CKM2x2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM2x2 << endl;
cout << setw(20) << "mdl_CKM2x3 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM2x3 << endl;
cout << setw(20) << "mdl_CKM3x1 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM3x1 << endl;
cout << setw(20) << "mdl_CKM3x2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM3x2 << endl;
cout << setw(20) << "mdl_CKM3x3 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_CKM3x3 << endl;
cout << setw(20) << "mdl_MZ__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MZ__exp__2 << endl;
cout << setw(20) << "mdl_MZ__exp__4 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MZ__exp__4 << endl;
cout << setw(20) << "mdl_sqrt__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_sqrt__2 << endl;
cout << setw(20) << "mdl_MH__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MH__exp__2 << endl;
cout << setw(20) << "mdl_conjg__CKM1x3 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM1x3 << endl;
cout << setw(20) << "mdl_conjg__CKM2x3 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM2x3 << endl;
cout << setw(20) << "mdl_conjg__CKM3x3 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM3x3 << endl;
cout << setw(20) << "mdl_conjg__CKM2x1 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM2x1 << endl;
cout << setw(20) << "mdl_conjg__CKM3x1 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM3x1 << endl;
cout << setw(20) << "mdl_conjg__CKM2x2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM2x2 << endl;
cout << setw(20) << "mdl_conjg__CKM3x2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM3x2 << endl;
cout << setw(20) << "mdl_conjg__CKM1x1 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM1x1 << endl;
cout << setw(20) << "mdl_conjg__CKM1x2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_conjg__CKM1x2 << endl;
cout << setw(20) << "mdl_aEW " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_aEW << endl;
cout << setw(20) << "mdl_MW " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MW << endl;
cout << setw(20) << "mdl_sqrt__aEW " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_sqrt__aEW << endl;
cout << setw(20) << "mdl_ee " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ee << endl;
cout << setw(20) << "mdl_MW__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_MW__exp__2 << endl;
cout << setw(20) << "mdl_sw2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_sw2 << endl;
cout << setw(20) << "mdl_cw " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_cw << endl;
cout << setw(20) << "mdl_sqrt__sw2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_sqrt__sw2 << endl;
cout << setw(20) << "mdl_sw " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_sw << endl;
cout << setw(20) << "mdl_g1 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_g1 << endl;
cout << setw(20) << "mdl_gw " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_gw << endl;
cout << setw(20) << "mdl_vev " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_vev << endl;
cout << setw(20) << "mdl_vev__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_vev__exp__2 << endl;
cout << setw(20) << "mdl_lam " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_lam << endl;
cout << setw(20) << "mdl_yb " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_yb << endl;
cout << setw(20) << "mdl_yc " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_yc << endl;
cout << setw(20) << "mdl_ye " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ye << endl;
cout << setw(20) << "mdl_ym " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ym << endl;
cout << setw(20) << "mdl_yt " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_yt << endl;
cout << setw(20) << "mdl_ytau " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ytau << endl;
cout << setw(20) << "mdl_muH " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_muH << endl;
cout << setw(20) << "mdl_I1x31 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I1x31 << endl;
cout << setw(20) << "mdl_I1x32 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I1x32 << endl;
cout << setw(20) << "mdl_I1x33 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I1x33 << endl;
cout << setw(20) << "mdl_I2x12 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I2x12 << endl;
cout << setw(20) << "mdl_I2x13 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I2x13 << endl;
cout << setw(20) << "mdl_I2x22 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I2x22 << endl;
cout << setw(20) << "mdl_I2x23 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I2x23 << endl;
cout << setw(20) << "mdl_I2x32 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I2x32 << endl;
cout << setw(20) << "mdl_I2x33 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I2x33 << endl;
cout << setw(20) << "mdl_I3x21 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I3x21 << endl;
cout << setw(20) << "mdl_I3x22 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I3x22 << endl;
cout << setw(20) << "mdl_I3x23 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I3x23 << endl;
cout << setw(20) << "mdl_I3x31 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I3x31 << endl;
cout << setw(20) << "mdl_I3x32 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I3x32 << endl;
cout << setw(20) << "mdl_I3x33 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I3x33 << endl;
cout << setw(20) << "mdl_I4x13 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I4x13 << endl;
cout << setw(20) << "mdl_I4x23 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I4x23 << endl;
cout << setw(20) << "mdl_I4x33 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_I4x33 << endl;
cout << setw(20) << "mdl_ee__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_ee__exp__2 << endl;
cout << setw(20) << "mdl_sw__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_sw__exp__2 << endl;
cout << setw(20) << "mdl_cw__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_cw__exp__2 << endl;
}
void Parameters_sm::printIndependentCouplings(){
cout << "sm model couplings independent of event kinematics:" << endl;
cout << setw(20) << "GC_1 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_1 << endl;
cout << setw(20) << "GC_2 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_2 << endl;
cout << setw(20) << "GC_3 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_3 << endl;
cout << setw(20) << "GC_4 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_4 << endl;
cout << setw(20) << "GC_5 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_5 << endl;
cout << setw(20) << "GC_6 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_6 << endl;
cout << setw(20) << "GC_7 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_7 << endl;
cout << setw(20) << "GC_8 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_8 << endl;
cout << setw(20) << "GC_9 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_9 << endl;
cout << setw(20) << "GC_13 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_13 << endl;
cout << setw(20) << "GC_14 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_14 << endl;
cout << setw(20) << "GC_15 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_15 << endl;
cout << setw(20) << "GC_16 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_16 << endl;
cout << setw(20) << "GC_17 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_17 << endl;
cout << setw(20) << "GC_18 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_18 << endl;
cout << setw(20) << "GC_19 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_19 << endl;
cout << setw(20) << "GC_20 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_20 << endl;
cout << setw(20) << "GC_21 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_21 << endl;
cout << setw(20) << "GC_22 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_22 << endl;
cout << setw(20) << "GC_23 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_23 << endl;
cout << setw(20) << "GC_24 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_24 << endl;
cout << setw(20) << "GC_25 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_25 << endl;
cout << setw(20) << "GC_26 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_26 << endl;
cout << setw(20) << "GC_27 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_27 << endl;
cout << setw(20) << "GC_28 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_28 << endl;
cout << setw(20) << "GC_29 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_29 << endl;
cout << setw(20) << "GC_30 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_30 << endl;
cout << setw(20) << "GC_31 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_31 << endl;
cout << setw(20) << "GC_32 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_32 << endl;
cout << setw(20) << "GC_33 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_33 << endl;
cout << setw(20) << "GC_34 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_34 << endl;
cout << setw(20) << "GC_35 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_35 << endl;
cout << setw(20) << "GC_36 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_36 << endl;
cout << setw(20) << "GC_37 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_37 << endl;
cout << setw(20) << "GC_38 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_38 << endl;
cout << setw(20) << "GC_39 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_39 << endl;
cout << setw(20) << "GC_40 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_40 << endl;
cout << setw(20) << "GC_41 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_41 << endl;
cout << setw(20) << "GC_42 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_42 << endl;
cout << setw(20) << "GC_43 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_43 << endl;
cout << setw(20) << "GC_44 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_44 << endl;
cout << setw(20) << "GC_45 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_45 << endl;
cout << setw(20) << "GC_46 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_46 << endl;
cout << setw(20) << "GC_47 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_47 << endl;
cout << setw(20) << "GC_48 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_48 << endl;
cout << setw(20) << "GC_49 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_49 << endl;
cout << setw(20) << "GC_50 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_50 << endl;
cout << setw(20) << "GC_51 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_51 << endl;
cout << setw(20) << "GC_52 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_52 << endl;
cout << setw(20) << "GC_53 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_53 << endl;
cout << setw(20) << "GC_54 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_54 << endl;
cout << setw(20) << "GC_55 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_55 << endl;
cout << setw(20) << "GC_56 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_56 << endl;
cout << setw(20) << "GC_57 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_57 << endl;
cout << setw(20) << "GC_58 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_58 << endl;
cout << setw(20) << "GC_59 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_59 << endl;
cout << setw(20) << "GC_60 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_60 << endl;
cout << setw(20) << "GC_61 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_61 << endl;
cout << setw(20) << "GC_62 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_62 << endl;
cout << setw(20) << "GC_63 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_63 << endl;
cout << setw(20) << "GC_64 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_64 << endl;
cout << setw(20) << "GC_65 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_65 << endl;
cout << setw(20) << "GC_66 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_66 << endl;
cout << setw(20) << "GC_67 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_67 << endl;
cout << setw(20) << "GC_68 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_68 << endl;
cout << setw(20) << "GC_69 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_69 << endl;
cout << setw(20) << "GC_70 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_70 << endl;
cout << setw(20) << "GC_71 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_71 << endl;
cout << setw(20) << "GC_72 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_72 << endl;
cout << setw(20) << "GC_73 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_73 << endl;
cout << setw(20) << "GC_74 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_74 << endl;
cout << setw(20) << "GC_75 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_75 << endl;
cout << setw(20) << "GC_76 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_76 << endl;
cout << setw(20) << "GC_77 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_77 << endl;
cout << setw(20) << "GC_78 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_78 << endl;
cout << setw(20) << "GC_79 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_79 << endl;
cout << setw(20) << "GC_80 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_80 << endl;
cout << setw(20) << "GC_81 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_81 << endl;
cout << setw(20) << "GC_82 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_82 << endl;
cout << setw(20) << "GC_83 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_83 << endl;
cout << setw(20) << "GC_84 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_84 << endl;
cout << setw(20) << "GC_85 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_85 << endl;
cout << setw(20) << "GC_86 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_86 << endl;
cout << setw(20) << "GC_87 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_87 << endl;
cout << setw(20) << "GC_88 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_88 << endl;
cout << setw(20) << "GC_89 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_89 << endl;
cout << setw(20) << "GC_90 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_90 << endl;
cout << setw(20) << "GC_91 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_91 << endl;
cout << setw(20) << "GC_92 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_92 << endl;
cout << setw(20) << "GC_93 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_93 << endl;
cout << setw(20) << "GC_94 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_94 << endl;
cout << setw(20) << "GC_95 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_95 << endl;
cout << setw(20) << "GC_96 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_96 << endl;
cout << setw(20) << "GC_97 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_97 << endl;
cout << setw(20) << "GC_98 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_98 << endl;
cout << setw(20) << "GC_99 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_99 << endl;
cout << setw(20) << "GC_100 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_100 << endl;
cout << setw(20) << "GC_101 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_101 << endl;
cout << setw(20) << "GC_102 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_102 << endl;
cout << setw(20) << "GC_103 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_103 << endl;
cout << setw(20) << "GC_104 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_104 << endl;
cout << setw(20) << "GC_105 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_105 << endl;
cout << setw(20) << "GC_106 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_106 << endl;
cout << setw(20) << "GC_107 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_107 << endl;
cout << setw(20) << "GC_108 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_108 << endl;
}
void Parameters_sm::printDependentParameters(){
cout << "sm model parameters dependent on event kinematics:" << endl;
cout << setw(20) << "aS " << "= " << setiosflags(ios::scientific) << setw(10) << aS << endl;
cout << setw(20) << "mdl_sqrt__aS " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_sqrt__aS << endl;
cout << setw(20) << "G " << "= " << setiosflags(ios::scientific) << setw(10) << G << endl;
cout << setw(20) << "mdl_G__exp__2 " << "= " << setiosflags(ios::scientific) << setw(10) << mdl_G__exp__2 << endl;
}
void Parameters_sm::printDependentCouplings(){
cout << "sm model couplings dependent on event kinematics:" << endl;
cout << setw(20) << "GC_12 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_12 << endl;
cout << setw(20) << "GC_11 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_11 << endl;
cout << setw(20) << "GC_10 " << "= " << setiosflags(ios::scientific) << setw(10) << GC_10 << endl;
}
""" % misc.get_pkg_info()
file_h, file_cc = self.model_builder.generate_parameters_class_files()
self.assertEqual(file_h.split('\n'), goal_file_h.split('\n'))
self.assertEqual(file_cc.replace('\t', ' ').split('\n'), goal_file_cc.replace('\t', ' ').split('\n'))
#===============================================================================
# IOExportPythia8Test
#===============================================================================
class IOExportMatchBox(unittest.TestCase,
test_file_writers.CheckFileCreate):
"""Test class for the export v4 module"""
def setUp(self):
if not hasattr(self, 'model'):
self.mymodel = base_objects.Model()
self.mymatrixelement = helas_objects.HelasMatrixElement()
test_file_writers.CheckFileCreate.clean_files
# Set up model
mypartlist = base_objects.ParticleList()
myinterlist = base_objects.InteractionList()
# u and c quarkd and their antiparticles
mypartlist.append(base_objects.Particle({'name':'u',
'antiname':'u~',
'spin':2,
'color':3,
'mass':'ZERO',
'width':'ZERO',
'texname':'u',
'antitexname':'\bar u',
'line':'straight',
'charge':2. / 3.,
'pdg_code':2,
'propagating':True,
'is_part':True,
'self_antipart':False}))
u = mypartlist[len(mypartlist) - 1]
antiu = copy.copy(u)
antiu.set('is_part', False)
mypartlist.append(base_objects.Particle({'name':'c',
'antiname':'c~',
'spin':2,
'color':3,
'mass':'MC',
'width':'ZERO',
'texname':'c',
'antitexname':'\bar c',
'line':'straight',
'charge':2. / 3.,
'pdg_code':4,
'propagating':True,
'is_part':True,
'self_antipart':False}))
c = mypartlist[len(mypartlist) - 1]
antic = copy.copy(c)
antic.set('is_part', False)
# A gluon
mypartlist.append(base_objects.Particle({'name':'g',
'antiname':'g',
'spin':3,
'color':8,
'mass':'ZERO',
'width':'ZERO',
'texname':'g',
'antitexname':'g',
'line':'curly',
'charge':0.,
'pdg_code':21,
'propagating':True,
'is_part':True,
'self_antipart':True}))
g = mypartlist[len(mypartlist) - 1]
# A photon
mypartlist.append(base_objects.Particle({'name':'Z',
'antiname':'Z',
'spin':3,
'color':1,
'mass':'MZ',
'width':'WZ',
'texname':'Z',
'antitexname':'Z',
'line':'wavy',
'charge':0.,
'pdg_code':23,
'propagating':True,
'is_part':True,
'self_antipart':True}))
z = mypartlist[len(mypartlist) - 1]
# A gluino
mypartlist.append(base_objects.Particle({'name':'go',
'antiname':'go',
'spin':2,
'color':8,
'mass':'MGO',
'width':'WGO',
'texname':'go',
'antitexname':'go',
'line':'straight',
'charge':0.,
'pdg_code':1000021,
'propagating':True,
'is_part':True,
'self_antipart':True}))
go = mypartlist[len(mypartlist) - 1]
# A sextet diquark
mypartlist.append(base_objects.Particle({'name':'six',
'antiname':'six~',
'spin':1,
'color':6,
'mass':'MSIX',
'width':'WSIX',
'texname':'six',
'antitexname':'sixbar',
'line':'straight',
'charge':4./3.,
'pdg_code':6000001,
'propagating':True,
'is_part':True,
'self_antipart':False}))
six = mypartlist[len(mypartlist) - 1]
antisix = copy.copy(six)
antisix.set('is_part', False)
# Gluon couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 1,
'particles': base_objects.ParticleList(\
[antiu, \
u, \
g]),
'color': [color.ColorString([color.T(2, 1, 0)])],
'lorentz':['FFV1'],
'couplings':{(0, 0):'GC_10'},
'orders':{'QCD':1}}))
# Gamma couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 2,
'particles': base_objects.ParticleList(\
[antiu, \
u, \
z]),
'color': [color.ColorString([color.T(1, 0)])],
'lorentz':['FFV2', 'FFV5'],
'couplings':{(0,0): 'GC_35', (0,1): 'GC_47'},
'orders':{'QED':1}}))
# Gluon couplings to gluinos
myinterlist.append(base_objects.Interaction({
'id': 3,
'particles': base_objects.ParticleList(\
[go, \
go, \
g]),
'color': [color.ColorString([color.f(0,1,2)])],
'lorentz':['FFV1'],
'couplings':{(0, 0):'GC_8'},
'orders':{'QCD':1}}))
# Sextet couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 4,
'particles': base_objects.ParticleList(\
[u, \
u, \
antisix]),
'color': [color.ColorString([color.K6Bar(2, 0, 1)])],
'lorentz':['FFS1'],
'couplings':{(0,0): 'GC_24'},
'orders':{'QSIX':1}}))
myinterlist.append(base_objects.Interaction({
'id': 5,
'particles': base_objects.ParticleList(\
[antiu, \
antiu, \
six]),
'color': [color.ColorString([color.K6(2, 0, 1)])],
'lorentz':['FFS1'],
'couplings':{(0,0): 'GC_24'},
'orders':{'QSIX':1}}))
self.mymodel.set('particles', mypartlist)
self.mymodel.set('interactions', myinterlist)
self.mymodel.set('name', 'sm')
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':2,
'state':False}))
myleglist.append(base_objects.Leg({'id':-2,
'state':False}))
myleglist.append(base_objects.Leg({'id':2,
'state':True}))
myleglist.append(base_objects.Leg({'id':-2,
'state':True}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel,
'orders':{'QSIX':0}})
myamplitude = diagram_generation.Amplitude({'process': myproc})
self.mymatrixelement = helas_objects.HelasMultiProcess(myamplitude)
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':4,
'state':False,
'number' : 1}))
myleglist.append(base_objects.Leg({'id':-4,
'state':False,
'number' : 2}))
myleglist.append(base_objects.Leg({'id':4,
'state':True,
'number' : 3}))
myleglist.append(base_objects.Leg({'id':-4,
'state':True,
'number' : 4}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel,
'orders':{'QSIX':0}})
self.mymatrixelement.get('matrix_elements')[0].\
get('processes').append(myproc)
self.mycppwriter = helas_call_writer.CPPUFOHelasCallWriter(self.mymodel)
# self.pythia8_exporter = export_cpp.ProcessExporterMatchbox(\
# self.mymatrixelement, self.mycppwriter,
# process_string = "q q~ > q q~")
#
# self.cpp_exporter = export_cpp.ProcessExporterCPP(\
# self.mymatrixelement, self.mycppwriter,
# process_string = "q q~ > q q~")
tearDown = test_file_writers.CheckFileCreate.clean_files
def test_fail_on_process_cc_file_uu_six(self):
"""Test writing the .cc Pythia file for u u > six"""
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':2,
'state':False,
'number' : 1}))
myleglist.append(base_objects.Leg({'id':2,
'state':False,
'number' : 2}))
myleglist.append(base_objects.Leg({'id':6000001,
'number' : 3}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel})
myamplitude = diagram_generation.Amplitude({'process': myproc})
mymatrixelement = helas_objects.HelasMultiProcess(myamplitude)
exporter = export_cpp.OneProcessExporterMatchbox( mymatrixelement, self.mycppwriter, process_string="q q > six")
self.assertRaises(export_cpp.OneProcessExporterCPP.ProcessExporterCPPError,
exporter.write_process_cc_file,
writers.CPPWriter(self.give_pos('test.cc')))
def test_write_match_go_process_cc_file(self):
"""Test writing the .cc C++ standalone file for u u~ > go go"""
myleglist = base_objects.LegList()
myleglist.append(base_objects.Leg({'id':2,
'state':False}))
myleglist.append(base_objects.Leg({'id':-2,
'state':False}))
myleglist.append(base_objects.Leg({'id':1000021,
'state':True}))
myleglist.append(base_objects.Leg({'id':1000021,
'state':True}))
myproc = base_objects.Process({'legs':myleglist,
'model':self.mymodel})
myamplitude = diagram_generation.Amplitude({'process': myproc})
matrix_element = helas_objects.HelasMultiProcess(myamplitude)
matrix_element.get('matrix_elements')[0].set('has_mirror_process',
True)
exporter = export_cpp.OneProcessExporterMatchbox(matrix_element,
self.mycppwriter)
exporter.write_process_cc_file(\
writers.CPPWriter(self.give_pos('test.cc')))
goal_string = """int CPPProcess::colorstring(int i, int j)
{
static const double res[2][5] = {{3, 4, 2, 1, 0}, {4, 3, 2, 1, 0}};
return res[i][j];
}"""
#print open(self.give_pos('test.cc')).read()
self.assertFileContains('test.cc', goal_string, partial=True)
|
[
"balashangar.kailasapathy@cern.ch"
] |
balashangar.kailasapathy@cern.ch
|
0156d263e845c44ce2d41e2066fd9cda26b28d2e
|
342da3c20e1d6fb4c2f5bd59de318afa987047b5
|
/quickSort.py
|
38a326bfab12fafa9981d73108aa995861c0db64
|
[] |
no_license
|
Franktian/Algorithms
|
f3cea0050eb2bf156cb9cd0ec028bcfa80b12746
|
ddadab2dc31e034645fb31ae262d5ca07da32b58
|
refs/heads/master
| 2021-01-19T03:13:19.590527
| 2015-10-30T21:55:36
| 2015-10-30T21:55:36
| 19,363,849
| 1
| 1
| null | 2015-10-30T21:55:37
| 2014-05-02T02:19:28
|
Python
|
UTF-8
|
Python
| false
| false
| 688
|
py
|
def quickSort(lst):
global c
c = c + len(lst) - 1
if len(lst) <= 1:
return lst
less = []
equal = []
greater = []
pivot = lst[0]
print "pivot: " + str(pivot)
for item in lst:
if item < pivot:
less.append(item)
if item == pivot:
equal.append(item)
if item > pivot:
greater.append(item)
return quickSort(less) + equal + quickSort(greater)
if __name__ == "__main__":
a = [1, 2, 3, 4, 5, 12]
b = [6, 7, 8, 9, 10, 100]
c = [10000, 967, 87, 91, 117, 819, 403, 597, 1201, 12090]
d = [4, 3, 2, 1]
c = 0
quickSort(numbers)
print c
|
[
"tianyawen201209@hotmail.com"
] |
tianyawen201209@hotmail.com
|
f1614ef1ca2cd40039dd12e808c1521574964990
|
1c01e583b5e2a5003f60593defe6b5f3d0715176
|
/sentry_django_example/sentry_django_example/wsgi.py
|
8626f315d2d604c6f8fd0f6dc2c244c91b0f91b5
|
[
"MIT"
] |
permissive
|
PostHog/posthog-python
|
bf430e71d9efab69f1b2575f3257266352cc73d4
|
139258cacb7956f1bc025ad9c42095125bd6cd81
|
refs/heads/master
| 2023-09-01T17:35:44.747266
| 2023-08-17T17:03:28
| 2023-08-17T17:03:28
| 239,432,717
| 21
| 18
|
MIT
| 2023-08-17T12:14:08
| 2020-02-10T05:11:45
|
Python
|
UTF-8
|
Python
| false
| false
| 419
|
py
|
"""
WSGI config for sentry_django_example project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentry_django_example.settings")
application = get_wsgi_application()
|
[
"noreply@github.com"
] |
PostHog.noreply@github.com
|
ee79c75e29e94296e1066a936f82f309d36d347b
|
264413890b3e1321283885fe247bba642637484a
|
/blog/settings.py
|
0d82e49336bf7331f3d2471c3ebed5b2e3c0e984
|
[] |
no_license
|
yuuki-foll/my_blog_django
|
ec64f07cf7be2f5eb0edc70ab3a38779b17353b2
|
09ec71165cf1ca3b91fdbf4392c5c7c0a139203b
|
refs/heads/main
| 2023-09-03T01:51:08.934616
| 2021-11-21T05:19:01
| 2021-11-21T05:19:01
| 429,872,967
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,737
|
py
|
"""
Django settings for blog project.
Generated by 'django-admin startproject' using Django 3.2.9.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
import json
con_file = open("./blog/config.json")
config = json.load(con_file)
con_file.close()
SECRET_KEY = config["SECRET_KEY"]
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'myapp.apps.MyappConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#'mdeditor', #markdown追加
'markdownx', #markdown追加
]
# X_FRAME_OPTIONS = 'SAMEORIGIN' #markdown追加
# markdown ファイルアップロード用
MEDIA_ROOT = os.path.join(BASE_DIR,'uploads')
MEDIA_URL = '/media/'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'template')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'my_blog', # 作成したDB名
'USER': config["USER"],
'PASSWORD': config["PASSWORD"],
'HOST': 'localhost',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'ja'
TIME_ZONE = 'Asia/Tokyo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
[
"tu.over.ym@gmail.com"
] |
tu.over.ym@gmail.com
|
f95cc22cf0455587084764ef11e134e16ed3df7a
|
39ab08b1dc2097b0f553f59bc67fe2e7c0f672d1
|
/comments_remover
|
9d1eb0c9c7e8c6fcc90ecbafc15ae4f4b4b2b7ab
|
[] |
no_license
|
ramzan545/XComment.io
|
70958260b5f43299887dd5e217770580fb5a751c
|
f7bb49ae13e9959d0a2c56f76f97f1f1dea76757
|
refs/heads/master
| 2021-08-17T20:46:06.735324
| 2017-11-21T17:26:36
| 2017-11-21T17:26:36
| 111,712,472
| 1
| 0
| null | 2017-11-22T17:17:32
| 2017-11-22T17:17:32
| null |
UTF-8
|
Python
| false
| false
| 75
|
#!/usr/bin/env python
from XComment.comments_remover import main
main()
|
[
"bernard@savannahinformatics.com"
] |
bernard@savannahinformatics.com
|
|
7302ef914e929d58793225fc1771f869a4aa86ea
|
c916cccaef3f0b382f8471f201c1b2f78c639d01
|
/script/cut.pic.py
|
7cb7f7d7e2d3b307778a273a208674710fb16166
|
[] |
no_license
|
karlzheng/bashrc
|
ff90dd433e3407679be59e3fdd37d08b91d58dd1
|
c7b862cd82136d72385e9a33dfd6b526596d7afc
|
refs/heads/master
| 2023-02-19T01:22:45.159454
| 2023-02-09T01:22:15
| 2023-02-09T01:22:15
| 6,240,508
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 658
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from PIL import Image
img = Image.open('screenshot.png')
Img = img.convert('L')
Img.save("gray.jpg")
threshold = 200
#threshold = 100
table = []
for i in range(256):
if i < threshold:
table.append(0)
else:
table.append(1)
photo = Img.point(table, '1')
#photo.save("test2.jpg")
# 480 -> 540
# 340 -> 368
x = 480
y = 340
w = 60
h = 28
region = photo.crop((x, y, x + w, y + h))
region.save("./z.jpeg")
#zt 736 344 -> 786,368
zt = (736,344,786,368)
region = photo.crop(zt)
region.save("./zt.jpeg")
x = 430
y = 340
region = photo.crop((x, y, 1100, 430))
region.save("./num.jpeg")
|
[
"Karl.Zheng@anker.com"
] |
Karl.Zheng@anker.com
|
445b9589568b27b49c62d92211be9a0f750b8f95
|
5aa2bcdf6039154ad488b4de9019760d65534936
|
/kbapi/asgi.py
|
be5bc9f97233d487067c67de32138f29d044dbf9
|
[] |
no_license
|
labiod/my_page
|
8774eb4bc148bf7a83b79d83bae182d7d5a5c112
|
f8e1513996e105d2b14b62b4aab24c2e78f74a75
|
refs/heads/master
| 2023-05-28T00:39:04.706538
| 2021-05-05T16:27:26
| 2021-05-05T16:27:26
| 364,639,099
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 387
|
py
|
"""
ASGI config for kbapi project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'kbapi.settings')
application = get_asgi_application()
|
[
"labiod@wp.pl"
] |
labiod@wp.pl
|
342862c87becf3b0599a8ded8a6d3fa73263cf73
|
b672072a6f4323db1d4e24aec97a714c282e8498
|
/virtual/bin/pip3.8
|
9a7eaeb4c259b4084cb016b273eba52fa4e59f19
|
[] |
no_license
|
Daniel-darnell/The-Tribune
|
9e4a3601d4e42ecd00e67e6f31d9708c823fc6cb
|
95f1f0340e366cc60a910daba6ad89d641d637bc
|
refs/heads/master
| 2022-12-31T07:53:54.051494
| 2020-10-22T18:31:52
| 2020-10-22T18:31:52
| 306,426,400
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 267
|
8
|
#!/home/moringa/Desktop/FullStack/Python/News/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"darnelldanny1997@gmail.com"
] |
darnelldanny1997@gmail.com
|
be3f4ee78626bbab9c8b57610dacbf3595b90df9
|
7e2f26d89e2e0116dcac7403e75998701bc11776
|
/Resize.py
|
ae43fe8255d01b1bdd638bd84beb9976436c8118
|
[
"Apache-2.0"
] |
permissive
|
PENGsBIT/PIL-demo
|
952c318272ec5945a781c6d0cc5f3d4eb3fe6e19
|
36181d79ca6ab9fe177d734ab20ec093a9ba2246
|
refs/heads/master
| 2022-11-11T07:44:53.331903
| 2020-06-30T13:10:29
| 2020-06-30T13:10:29
| 275,868,730
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,133
|
py
|
# -*-coding:utf-8-*-
import numpy as np
from PIL import Image
from scipy import misc
def resize(img):
# img = Image.open(cirFileName)
# w, h = img.size
# # 去掉浮点,防报错
# w, h = round(w * 0.2), round(h * 0.2)
img = img.resize((200, 200), Image.ANTIALIAS).convert("RGBA")
return img
def getResultArray():
fileName = "1.png"
imgBeforeExpand = misc.imread(fileName, flatten=False, mode='YCbCr')
imgBeforeExpand = imgBeforeExpand / 255.0
# imgBeforeExpand = np.uint8(imgBeforeExpand*255)
# h, w = imgBeforeExpand.shape[:2]
# print(imgBeforeExpand.shape)
h = 150
w = 160
data = list()
data.append(misc.imresize(imgBeforeExpand[:, :, 0], [h, w], 'bicubic', mode="F")[:, :, None])
data.append(misc.imresize(imgBeforeExpand[:, :, 1], [h, w], 'bicubic', mode="F")[:, :, None])
data.append(misc.imresize(imgBeforeExpand[:, :, 2], [h, w], 'bicubic', mode="F")[:, :, None])
data_out = np.concatenate(data, axis=2)
data_out[data_out > 1] = 1.0
data_out = np.uint8(data_out * 255)
img = misc.toimage(arr=data_out, mode="YCbCr")
return img
|
[
"924824195@qq.com"
] |
924824195@qq.com
|
62609e899e3b0ea2a0d36beb37304e6293577899
|
f746c01c37a0ca8b8ecc2267c44e363586cd092d
|
/mysite/blog/models.py
|
85f712350fd60423cb1886922a123c158f7abd6d
|
[] |
no_license
|
viniciuscunhavcm/my-first-blog
|
1ae53cf3976f9d50844ba419af86df8887b06df0
|
cdf017aa79d094d6ad492cdf4ee2749ed47bef53
|
refs/heads/master
| 2021-05-01T21:06:59.899133
| 2018-02-10T00:42:46
| 2018-02-10T00:42:46
| 120,971,805
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 925
|
py
|
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
class Equipamentos(models.Model):
codigo_equipamento = models.CharField(max_length=200)
marca_equipamento = models.CharField(max_length=200)
modelo_equipamento = models.CharField(max_length=200)
nome_equipamento = models.CharField(max_length=200)
foto_equipamento = models.FileField(upload_to='', null=True, blank=True)
def __unicode__(self):
return "%s" %(self.nome_equipamento)
|
[
"vinicius.cunhamachado@gmail.com"
] |
vinicius.cunhamachado@gmail.com
|
cbe50cecf77e88e734f2da5b2a0435718646c991
|
a3ca521158892710fa786c89ee1c10c1768dcd0e
|
/heroi/migrations/0004_auto_20190930_1841.py
|
112f8187b1bdb1fdabcc6e9be27b09744cc34a2f
|
[] |
no_license
|
LucasBarletta/hero-wiki
|
4fff51a891fe0fd5005f6fa8cfe0cc56f02915d0
|
cfa402b8bead983690e708ff2117b06eb443ef07
|
refs/heads/master
| 2020-09-09T00:41:06.999305
| 2019-10-01T20:56:35
| 2019-10-01T20:56:35
| 221,292,251
| 1
| 0
| null | 2019-11-12T19:10:07
| 2019-11-12T19:10:07
| null |
UTF-8
|
Python
| false
| false
| 519
|
py
|
# Generated by Django 2.2.5 on 2019-09-30 18:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('heroi', '0003_heroi_universo'),
]
operations = [
migrations.AlterField(
model_name='heroi',
name='universo',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='universo', to='universo.Universo', verbose_name='universo'),
),
]
|
[
"foto.hiago@gmail.com"
] |
foto.hiago@gmail.com
|
cd311565cef0393d20b9126f149c01765b2cebdd
|
f44cdfbcc445d3cb910bada228baf16648480f5f
|
/tutoriais/desenho.py
|
d3a8874ba3af1be73b7033e63f88dcb1e04b40e4
|
[
"Apache-2.0"
] |
permissive
|
Camilotk/python-pooii
|
9c4ce4153cf5931643285c4b8775e623979510d6
|
e855d8d00b70ad23b51d2384980055182f5ccdd3
|
refs/heads/master
| 2021-01-24T12:37:05.381967
| 2018-07-01T21:45:00
| 2018-07-01T21:45:00
| 123,143,230
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 290
|
py
|
import turtle
from math import sin, cos, pi
r = 200
inc = 2*pi/100
t = 0
n = 1.5
for i in range (100):
x1 = r * sin(t)
y1 = r * cos(t)
x2 = r * sin(t+n)
y2 = r * cos(t+n)
turtle.penup()
turtle.goto(x1, y1)
turtle.pendown()
turtle.goto(x2, y2)
t += inc
|
[
"noreply@github.com"
] |
Camilotk.noreply@github.com
|
aa11d4728c3011be8136d004c246ccee156d6afa
|
f623bccab362a7513c2744bfaf937463039e2194
|
/python_100day/day01/04-构造程序逻辑.py
|
a7394e7c594a68fd082b403a269c737e3736f874
|
[] |
no_license
|
wangweiwg/python
|
d0fa7438d63814786b5aacb03c8e8417d0dd84e7
|
d1336f02bc9027664212ea9c677bde51a6561f55
|
refs/heads/master
| 2020-06-19T00:09:31.881584
| 2019-09-03T08:18:31
| 2019-09-03T08:18:31
| 196,500,190
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 597
|
py
|
# 分支和循环结构会帮助我们将程序中逻辑建立起来,将来我们的程序无论简单 复杂,都是由顺序结构、分支结构、
# 循环结构构成的。对于编程语言的初学者来说,首先要锻炼的是将人类自然语言描述的解决问题的步骤和方法翻译
# 成代码的能力,其次就是熟练的运用之前学过的运算符、表达式以及最近的两个章节讲解的分支结构和循环结构的知
# 识。有了这些基本的能力才能够通过计算机程序去解决各种各样的现实问题。所以,开始做练习吧!
|
[
"wangwei641@163.com"
] |
wangwei641@163.com
|
9c2dc2d121d2a81c78f74986909faff271784616
|
8815afb5f3f8b87b69419f4d284af7c7b6376b53
|
/venv/bin/rst2odt.py
|
b7c3a57df8af24f043badae862f48d00d661a0e6
|
[
"MIT"
] |
permissive
|
phillip1029/jobpy
|
e5af973cc7288c3f7326de91d9559bb4d1284caf
|
838085f5b107518534c63c2a593827d9a4b7ba60
|
refs/heads/master
| 2022-10-13T16:48:42.611026
| 2020-06-10T01:52:33
| 2020-06-10T01:52:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 791
|
py
|
#!/Users/rodrez/Desktop/jobpy/venv/bin/python
# $Id: rst2odt.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <dkuhlman@rexx.com>
# Copyright: This module has been placed in the public domain.
"""
A front end to the Docutils Publisher, producing OpenOffice documents.
"""
import sys
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline_to_binary, default_description
from docutils.writers.odf_odt import Writer, Reader
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
'standalone reStructuredText sources. ' + default_description)
writer = Writer()
reader = Reader()
output = publish_cmdline_to_binary(reader=reader, writer=writer,
description=description)
|
[
"rodrez@Fabians-MBP.fios-router.home"
] |
rodrez@Fabians-MBP.fios-router.home
|
cc0a8a091b986e8b557b38ddfaa5d4c813b993ee
|
fd390bfa1f471d09cafb72ad85e5143214abf32d
|
/shakecast/tests/server_test.py
|
23ba29999f7e6b943acd162d075061b698d4c994
|
[
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-public-domain-disclaimer"
] |
permissive
|
usgs/shakecast
|
0ac6ac955aaff3029b133c4ce9264adc90004d86
|
e287d697d93467e5e25c99d27b70754a0a5e6e39
|
refs/heads/master
| 2023-05-13T09:22:13.001195
| 2022-03-08T03:26:18
| 2022-03-08T03:26:18
| 52,902,913
| 9
| 24
|
NOASSERTION
| 2023-05-01T22:45:23
| 2016-03-01T19:15:42
|
Python
|
UTF-8
|
Python
| false
| false
| 4,015
|
py
|
import socket
# connect to server
# submit single job
# submit bad job
# job that fails
class Server_Test(object):
def __init__(self):
self.results = {'error_handling': 'Success'}
self.socket = socket.socket()
def conn_test(self):
try:
self.connect_to_server()
self.socket.close()
self.results['conn_test'] = 'Success'
except:
self.results['conn_test'] = 'Failed'
def task_test(self):
try:
self.connect_to_server()
self.socket.send("{'task_test': {'func': task_test}}")
self.results['task_test'] = self.socket.recv(1000)
except:
self.results['task_test'] = 'Failed'
def job_fail(self):
try:
self.connect_to_server()
self.socket.send("{'job_fail_test': {'func': job_fail_test}}")
result = self.socket.recv(1000)
if 'FAILED' in result:
self.results['job_fail'] = 'Success'
else:
self.results['job_fail'] = 'Failed'
except:
self.results['job_fail'] = 'Failed'
def bad_args(self):
try:
self.connect_to_server()
self.socket.send("{'bad_args_test': {'func': task_test, \
'args_in': {'bad_arg': 'bad_input'}}}")
result = self.socket.recv(1000)
if 'failed to run' in result:
self.results['bad_args'] = 'Success'
else:
self.results['bad_args'] = 'Failed'
except:
self.results['bad_args'] = 'Failed'
def bad_command(self):
try:
self.connect_to_server()
self.socket.send("{'bad_command': {'func': fail_test}")
result = self.socket.recv(1000)
if 'Bad Command' in result:
self.results['bad_command'] = 'Success'
else:
self.results['bad_command'] = 'Failed'
except:
self.results['bad_command'] = 'Failed'
def connect_to_server(self):
self.socket = socket.socket()
self.socket.settimeout(10)
self.socket.connect(('', 1981))
def run(self):
self.conn_test()
self.task_test()
self.job_fail()
self.bad_args()
self.bad_command()
self.analyze()
def analyze(self):
failed = [job for job in list(self.results.keys())
if self.results[job] =='Failed']
if failed:
return_str = '\nFailed Tests: %s' % failed
if 'conn_test' in failed:
return_str += '\nServer is most likely down'
else:
return_str += '\nServer is up'
if 'task_test' in failed:
return_str += '\nServer is failing to run tasks'
if ('job_fail' in failed or
'bad_args' in failed or
'bad_command' in failed):
self.results['error_handling'] = 'Failed'
return_str += '\nError handling is failing'
else:
self.results['error_handling'] = 'Success'
else:
return_str = "Passed all tests"
self.results['analysis'] = return_str
def __str__(self):
return_str ='''
#---------------------SERVER TEST---------------------#
Connected to Server: %s
Ran Job: %s
Error Handling: %s
Analysis: %s
#------------------------------------------------------#
''' % (self.results['conn_test'],
self.results['task_test'],
self.results['error_handling'],
self.results['analysis'])
return return_str
if __name__ == '__main__':
st = Server_Test()
st.run()
print(str(st))
|
[
"dslosky@usgs.gov"
] |
dslosky@usgs.gov
|
d6cef67fdc8cab8d72eb7c4ee7b109c14133b181
|
bbd7774ef77ce96a4f0396dfb49d5614e7ffd31f
|
/Competitive Progamming/CodeSignal/Tourements/18_04_2019.py
|
4b9adb1cc525c6921106de41ab7bea6f4b8b5c96
|
[] |
no_license
|
linhnt31/Python
|
95978918a5c08aa11d4c0b955143dfe4e1c6cb0f
|
be2942709ab39166c93152a065d11bc58efdb858
|
refs/heads/master
| 2022-03-25T21:32:16.138624
| 2020-01-18T03:20:38
| 2020-01-18T03:20:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,420
|
py
|
"""
-Link: https://app.codesignal.com/tournaments/TBvuWWoyrhLB57Gax
"""
#
def ball_distribution(colors, ballsPerColor, boxsize):
res = 0
capacity = boxsize
current_box = 0
for _ in range(colors):
start_box = current_box
for _ in range(ballsPerColor):
capacity -= 1
if capacity == 0:
capacity = boxsize
current_box += 1
if (start_box < current_box and capacity < boxsize):
res += 1
return res
#
def sequenceElement(a, n):
MOD = 10**5
seq = []
for i in range(5):
seq.append(a[i])
lastFive = (a[0] * 10**4 + a[1] * 10**3 +
a[2] * 10**2 + a[3] * 10 + a[4])
was = {}
was[lastFive] = 4
i = 5
while True:
seq.append((seq[i - 1] + seq[i - 2] +
seq[i - 3] + seq[i - 4] + seq[i - 5]) % 10)
lastFive = (lastFive * 10 + seq[i]) % MOD
if lastFive in was:
last = was[lastFive]
return seq[n % (i - last)]
else:
was[lastFive] = i
i += 1
#
def myConcat(strings, separator):
res = []
for ch in strings:
res.append(ch+ separator)
return "".join(res)
#
def returnLocalValue():
return "local value"
#
def lastDigitRegExp(inputString):
for ch in inputString[::-1]:
if '0' <= ch <= '9':
return ch
|
[
"nguyenthanhlinh58@gmail.com"
] |
nguyenthanhlinh58@gmail.com
|
23fc3795d1765701ecbbf3f694558038140f22b7
|
42a65e1127fa328775804d25b04a968fac63c7b2
|
/read_utils.py
|
623afd7d9917367f8d659f3ca6c63b24ebfa04f1
|
[] |
no_license
|
hacksman/char_rnn
|
8bd33148d7322207d1a7e42bd87a26c7db4f4093
|
1eb174a4165b635641f3a692f32464496ad03261
|
refs/heads/master
| 2020-03-14T22:18:30.445711
| 2018-05-11T00:34:41
| 2018-05-11T00:34:41
| 131,818,387
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,180
|
py
|
# -*- coding: utf-8 -*-
# @Time : 4/25/18 8:29 AM
import pickle
import numpy as np
import copy
def batch_generator(arr, n_seqs, n_steps):
arr = copy.copy(arr)
print('before:{}\n'.format(arr))
batch_size = n_seqs * n_steps
n_batches = int(len(arr)/batch_size)
arr = arr[:n_batches * batch_size]
# print(arr)
arr = arr.reshape((n_seqs, -1))
print('after:{}\n'.format(arr))
while True:
np.random.shuffle(arr)
print('after shuffle: {}'.format(arr))
for n in range(0, arr.shape[1], n_steps):
x = arr[:, n:n + n_steps]
y = np.zeros_like(x)
y[:, :-1], y[:, -1] = x[:, 1:], x[:, 0]
yield x, y
class TextCoverter(object):
def __init__(self, text=None, max_vocab=5000, filename=None):
if filename is not None:
with open(filename, 'rb') as f:
self.vocab = pickle.load(f)
else:
vocab = set(text)
vocab_count = {}
for word in vocab:
vocab_count[word] = 0
for word in text:
vocab_count[word] += 1
vocab_count_list = [(i, vocab_count[i]) for i in vocab_count]
vocab_count_list.sort(key=lambda x: x[1], reverse=True)
vocab = [x[0] for x in vocab_count_list]
self.vocab = vocab
self.word_to_int_table = {c: i for i, c in enumerate(self.vocab)}
self.int_to_word_table = dict(enumerate(self.vocab))
def text_to_arr(self, text):
arr = [self.word_to_int_table[word] for word in text]
return np.array(arr)
def arr_to_text(self, arr):
words = [self.int_to_word(index) for index in arr]
return ''.join(words)
def int_to_word(self, index):
if index == len(self.vocab):
return '<unk>'
elif index < len(self.vocab):
return self.int_to_word_table[index]
else:
raise Exception('Unknow index')
@property
def vocab_size(self):
return len(self.vocab) + 1
def save_to_file(self, filename):
with open(filename, 'wb') as f:
pickle.dump(self.vocab, f)
|
[
"funblessu@gmail.com"
] |
funblessu@gmail.com
|
e00523fad4aa6a849be2c319113c838147673ca3
|
37b12eae7842a382b267dd333aaa230cc65f8e6f
|
/run.py
|
6feda5fdb0301026c62fd4cfe9d156d204b50a46
|
[] |
no_license
|
rainygirl/forget-my-past
|
13a0163c74a14412737c9e19648f0c971f64c09f
|
b6f8fc494b6f9cbb6a0a7c35a52c1a3e55789aec
|
refs/heads/master
| 2021-01-01T20:34:27.275591
| 2016-03-11T02:56:54
| 2016-03-11T02:56:54
| 8,945,584
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 721
|
py
|
import tweepy
import csv
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
username = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
with open('tweets.csv', 'rb') as csvfile:
csv_=csv.reader(csvfile)
x=0
for row in csv_ :
x += 1
id = row[0]
try:
api.destroy_status(id)
except tweepy.TweepError as e:
print e
if e.message[0]['code'] in [144,179,34] : pass
else : quit()
print x, row[3], ('https://twitter.com/%s/status/%s [DELETED]' % (username,id) )
print "Twitter timeline removed!"
|
[
"rainygirl@gmail.com"
] |
rainygirl@gmail.com
|
550ea16f13df568da8da7fb5cc32fb024eb5edbb
|
99ccd3e1deff489ccd532dbf9f37885ce5cc5e3d
|
/noise204ss.py
|
982f3d3f6a1444a46053be1e0a76355e27e73503
|
[] |
no_license
|
PavelPll/Earthquake-Prediction
|
1d031ee52637761d950fa1301aab1db6ba06def7
|
c96dd5e65f248b8f8bb2af2bbbf98b9dfb691ed5
|
refs/heads/master
| 2020-05-31T08:10:07.333771
| 2019-06-04T19:25:53
| 2019-06-04T19:25:53
| 190,180,845
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,858
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
#Predict the time (single value) remaining before laboratory earthquakes occur
#from real-time seismic data (150000 values)
# In[ ]:
import numpy as np
import pandas as pd
import os
from tensorflow.python.client import device_lib
#print(device_lib.list_local_devices())
# In[ ]:
#read data
float_data = pd.read_csv("train.csv", #nrows=2e100,
dtype={"acoustic_data": np.float32,
"time_to_failure": np.float32})
float_data = float_data.values #np array
# In[ ]:
#divide the segment of 150000 consecutive values into 75 smaller segments
N_STEPS=75
STEP_LENGTH=2000
DEL=8
print("last: ",int(round(STEP_LENGTH/DEL)))
print("last: ",int(round(STEP_LENGTH/(DEL*DEL))))
#SPE is step per epoch, each epochs is trained on 32*1000 segments
SPE=1000
# In[ ]:
#some functions to create augmenters
from scipy.signal import savgol_filter
import pywt
from skimage.restoration import (denoise_wavelet, estimate_sigma)
def running_mean(x, N=3):
cumsum = np.cumsum(np.insert(x, 0, 0))
result=(cumsum[N:] - cumsum[:-N]) / float(N)
result=np.insert(result, 0, x[0])
result=np.append(result,x[len(x)-1])
return np.matrix.round(result,0)
def lowpassfilter(signal, thresh = 0.63, wavelet="db4"):
thresh = thresh*np.nanmax(signal)
coeff = pywt.wavedec(signal, wavelet, mode="per" )
coeff[1:] = (pywt.threshold(i, value=thresh, mode="soft" ) for i in coeff[1:])
reconstructed_signal = pywt.waverec(coeff, wavelet, mode="per" )
return reconstructed_signal
def rolling_window(a, window):
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
strides = a.strides + (a.strides[-1],)
return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)
# In[ ]:
#the idea: convert 150000 values into features to decrease the number of values for RNN input
from scipy.stats import normaltest
from scipy.stats import moment, kurtosis, skew
from tsfresh.feature_extraction import feature_calculators as ts
from statsmodels.tsa.api import ExponentialSmoothing, SimpleExpSmoothing, Holt
from random import randint
def extract_features(z):
#print(z.shape)
#z = z + np.random.normal(0, 0.5, [z.shape[0],z.shape[1]])
#b = normaltest(z, axis=1)
#m3 = np.cbrt( moment(z, 3, axis=1) )
#m21 = autocorr1(z,[1])
#print("m21check ", m21>0)
#print("mean ",z.mean(axis=1).shape)
#print("m21 ",m21.shape)
return np.c_[z.mean(axis=1),
np.median(np.abs(z), axis=1),
z.std(axis=1),
z.max(axis=1),
z.min(axis=1),
#kurtosis(z, axis=1),
#-skew(z, axis=1),
np.quantile(np.abs(z), 0.05, axis=1),
np.quantile(np.abs(z), 0.25, axis=1),
np.quantile(np.abs(z), 0.75, axis=1),
np.quantile(np.abs(z), 0.95, axis=1),
#1-np.quantile(z, 0.75, axis=1),
#b[1],
#-m3,
#m21,
#z.shape[1]
]
# For a given ending position "last_index", we split the last 150'000 values of "x" into 150 pieces of length 1000 each.
# From each piece, 16 features are extracted. This results in a feature matrix of dimension (150 time steps x 16 features).
def create_X(x, last_index=None, n_steps=N_STEPS, step_length=STEP_LENGTH, aug=0):
if last_index == None:
last_index=len(x)
assert last_index - n_steps * step_length >= 0
# Reshaping and approximate standardization with mean 5 and std 3.
per=x[(last_index - n_steps * step_length):last_index]
#print("per", x.shape)
#for data augmentation
if aug==1:
flag=randint(0, 3)
if flag==0:
s=np.random.normal(0, 1, per.shape[0])
s=np.matrix.round(s,0)
per=per+s
if flag==1:
per=running_mean(per)
if flag==2:
per=savgol_filter(per, 5, polyorder=3)
per=np.matrix.round(per,0)
if flag==3:
per=lowpassfilter(per, thresh = 0.01, wavelet="db4")
per=np.matrix.round(per,0)
temp = (per.reshape(n_steps, -1) - 5 ) / 3
# Extracts features of sequences of full length 1000, of the last 100 values and finally also
# of the last 10 observations.
q05_roll_std_10=np.zeros(n_steps)
R_std=np.zeros(n_steps)
av_change_abs_roll_mean_10=np.zeros(n_steps)
Imean=np.zeros(n_steps)
mac=np.zeros(n_steps)
mc=np.zeros(n_steps)
for i in range(n_steps):
#s=pd.DataFrame(temp[i, :])
#x_roll_std=s.rolling(10).std().dropna().values
x_roll_std=rolling_window(temp[i,:], 10).std(axis=1)
#print("rol ", x_roll_std.shape)
q05_roll_std_10[i]=np.quantile(x_roll_std, 0.05)
zc=np.fft.fft(temp[i, :])
realFFT=np.real(zc)
R_std[i]=realFFT.std()
imagFFT=np.imag(zc)
Imean[i]=imagFFT.mean()
#x_roll_mean = s.rolling(10).mean().dropna().values
x_roll_mean=rolling_window(temp[i,:], 10).mean(axis=1)
av_change_abs_roll_mean_10[i] = np.mean(np.diff(x_roll_mean))
mac[i]=ts.mean_abs_change(temp[i,:])
mc[i]=ts.mean_change(temp[i,:])
return np.c_[extract_features(temp),
extract_features(temp[:, ( step_length-int(round(step_length/DEL)) ):]),
extract_features(temp[:, ( step_length-int(round(step_length/(DEL*DEL))) ):]),
q05_roll_std_10,
R_std,
av_change_abs_roll_mean_10,
Imean,
mac,
mc,
temp[:, -1:]]
# We call "extract_features" three times, so the total number of features is 9 * 3 + 7 (last value) = 34
# In[ ]:
#to provide the same input for all epochs based on single random sampling of the segments
#with 150000 length
#It is achieved by adding new generator
batch_size = 64
batch_size1=int(batch_size/2)
min_index=0
max_index = int(len(float_data) - 1)
np.random.seed(seed=1)
arr_rows=[]
for i in range(SPE):
rows = np.random.randint(min_index + N_STEPS * STEP_LENGTH, max_index, size=batch_size1)
arr_rows.append(rows)
def gf(min_index, n_steps, step_length, batch_size1):
while True:
#np.random.seed(seed=1)
i=0
while i<SPE*(1):
#rows = np.random.randint(min_index + n_steps * step_length, max_index, size=batch_size1)
rows=arr_rows[i]
yield rows
i=i+1
gen = gf(min_index, N_STEPS, STEP_LENGTH, int(batch_size/2))
# In[ ]:
#generate input for RNN for real data + augmentation
n_features = create_X(float_data[0:STEP_LENGTH*N_STEPS,0],
n_steps=N_STEPS, step_length=STEP_LENGTH).shape[1]
print("n_features= ",n_features)
# The generator randomly selects "batch_size" ending positions of sub-time series. For each ending position,
# the "time_to_failure" serves as target, while the features are created by the function "create_X".
def generator(data, min_index=0, max_index=None, batch_size=32, n_steps=N_STEPS,
step_length=STEP_LENGTH, val=0):
if max_index is None:
max_index = len(data) - 1
while True:
# Pick indices of ending positions
if val==0:
batch_size1=int(batch_size/2)
rows=next(gen)
else:
batch_size1=batch_size
rows = np.random.randint(min_index + n_steps * step_length, max_index, size=batch_size1)
#np.random.seed(seed=1)
#rows = np.random.randint(min_index + n_steps * step_length, max_index, size=batch_size1)
#rows=next(gen)
samples = np.zeros((batch_size, n_steps, n_features))
targets = np.zeros(batch_size, )
delta=len(rows)
for j, row in enumerate(rows):
samples[j] = create_X(data[:, 0], last_index=row, n_steps=n_steps,
step_length=step_length)
targets[j] = data[row, 1]
if val==0:
samples[j+delta] = create_X(data[:, 0], last_index=row, n_steps=n_steps,
step_length=step_length, aug=1)
targets[j+delta] = data[row, 1]
# if val==0:
# print(targets[0])
yield samples, targets
# In[ ]:
train_gen = generator(float_data, batch_size=batch_size, val=0)
#no data augmentation for validation
valid_gen = generator(float_data, batch_size=batch_size, val=1)
# In[ ]:
# Define model
import keras
from keras.models import Sequential
from keras.layers import Dense, CuDNNGRU, Dropout, GRU
from keras.optimizers import adam
from keras.callbacks import ModelCheckpoint
# In[ ]:
from keras import backend
print(backend.tensorflow_backend._get_available_gpus())
# In[ ]:
# Define model
cb = ModelCheckpoint("model.hdf5", monitor='val_loss', save_weights_only=False, period=1)
model = Sequential()
#model.add(GRU(100, return_sequences=True, input_shape=(None, n_features)))
model.add(GRU(68, input_shape=(None, n_features)))
#model.add(GRU(21))
model.add(Dense(20, activation='relu'))
#model.add(Dense(10, activation='relu'))
#model.add(Dropout(0.1))
model.add(Dense(1))
model.summary()
model.compile(optimizer=adam(lr=0.0005), loss="mae")
# In[ ]:
#define learning rate
from os import rename
from os.path import isfile
def lr_schedule(epoch):
#arr=np.array([0.1e-5, 0.1e-4, 0.1e-3, 0.1e-3])
#lr=arr[epoch]
# if epoch<0:
# lr=0.05e-4
# else:
# lr = 0.5e-04
lr=0.0005
print('Learning rate: ', lr)
if isfile("model.hdf5"):
n="model_noise204_ep"+str(epoch)+".hdf5"
rename("model.hdf5",n)
print("renamed to ",n)
else:
print("no file to rename")
return lr
lr_scheduler = keras.callbacks.LearningRateScheduler(lr_schedule)
# In[ ]:
history = model.fit_generator(train_gen,
steps_per_epoch=SPE,#n_train // batch_size,
epochs=200,
verbose=2,
#callbacks=cb,
validation_data=valid_gen,
validation_steps=100,
callbacks=[cb, lr_scheduler])#n_valid // batch_size)
#val_loss calculation is based on random sampling (32*100 pieces of 150000 consecutive values from 6e6 values)
#this is the way to evaluate the model on the whole dataset
#overfitting is compensated by low number of parameters (22063<<150000) in the model
#and by data augmentation
# In[ ]:
#generating submission file
from keras.models import load_model
bestModel = load_model('model_noise204_ep20.hdf5')
submission = pd.read_csv('sample_submission.csv', index_col='seg_id', dtype={"time_to_failure": np.float32})
from tqdm import tqdm_notebook
# Load each test data, create the feature matrix, get numeric prediction
for i, seg_id in enumerate(tqdm_notebook(submission.index)):
# print(i)
seg = pd.read_csv('test/' + seg_id + '.csv')
x = seg['acoustic_data'].values
submission.time_to_failure[i] = bestModel.predict(np.expand_dims(create_X(x), 0))
submission.head()
# Save
submission.to_csv('submission_noise204_ep20.csv')
# In[ ]:
|
[
"noreply@github.com"
] |
PavelPll.noreply@github.com
|
c43fa7575bd252945df40f6bc8ece43f57f7c26c
|
50008b3b7fb7e14f793e92f5b27bf302112a3cb4
|
/recipes/Python/577583_Special_Range_FunctiDifferent_Kinds_Ranges_int/recipe-577583.py
|
31b649ddc0e268049e2406c133b71adebac3e007
|
[
"MIT"
] |
permissive
|
betty29/code-1
|
db56807e19ac9cfe711b41d475a322c168cfdca6
|
d097ca0ad6a6aee2180d32dce6a3322621f655fd
|
refs/heads/master
| 2023-03-14T08:15:47.492844
| 2021-02-24T15:39:59
| 2021-02-24T15:39:59
| 341,878,663
| 0
| 0
|
MIT
| 2021-02-24T15:40:00
| 2021-02-24T11:31:15
|
Python
|
UTF-8
|
Python
| false
| false
| 3,509
|
py
|
#!/usr/bin/env python
# specialrange.py
"""
Contains a general purpose object for arbitrary ranges
i.e.
a-z = abcdefghijklmnopqrstuvwxyz
A-Z = ABCDEFGHIJKLMNOPQRSTUVWXYZ
1-9 = 123456789
0-9 = 0123456789
0-1000 = 0123456789...1000
Copyright 2011 by Sunjay Varma. All Rights Reserved.
Check out www.sunjay.ca
"""
LOWERCASE = "abcdefghijklmnopqrstuvwxyz"
UPPERCASE = LOWERCASE.upper()
LETTERS = LOWERCASE+UPPERCASE
NUMBERS = "123456789"
try:
basestring
xrange
except NameError:
basestring = str
xrange = range
class irange(object):
def __init__(self, start, stop=None, step=1):
if stop is None:
stop = start
start = 0
if not isinstance(start, (float, int)) and not isinstance(stop, (float, int)) and \
type(start) != type(stop):
raise TypeError("The types of start and stop must be the same!")
try:
if "." in start:
start = float(start)
else:
start = int(start)
if "." in stop:
stop = float(stop)
else:
stop = int(stop)
except (ValueError, TypeError):
pass # will be handled later
if isinstance(start, basestring): # the types of start and stop will be the same
assert len(start) and len(stop), "There must be at least one character!"
if len(start) > 1 or len(stop) > 1:
raise ValueError("Longer start and stop values are unsupported!")
if start in LETTERS and stop in LETTERS:
self.iterrange = self._char_range(start, stop, step)
else:
self._cannot_understand(start, stop, step)
elif isinstance(start, (float, int)):
self.iterrange = self._number_range(start, stop, step)
else:
self._cannot_understand(start, stop, step)
def _cannot_understand(self, start, stop, step):
raise ValueError("Cannot understand: %s, %s, or %s"%(start, stop, step))
@staticmethod
def _in_seq(seq, *args):
for x in args:
if x not in seq:
return False
return True
def _char_range(self, start, stop, step):
is_lower = self._in_seq(LOWERCASE, start, stop)
if not(is_lower or self._in_seq(UPPERCASE, start, stop)):
raise ValueError("start and stop must both be in the uppercase or lowercase letters")
seq = is_lower and LOWERCASE or UPPERCASE
start_i = seq.index(start)
stop_i = seq.index(stop)
delta = abs((start_i - stop_i) // step) #+ 1 # the +1 will give even the last character in the result
if stop_i < start_i and step >= 0 or stop_i > start_i and step <= 0:
# the number will never reach
return iter([])
return (seq[start_i + step * i] for i in xrange(delta))
@staticmethod
def _number_range(start, stop, step):
if stop < start and step >= 0 or stop > start and step <= 0:
# the number will never reach
return iter([])
delta = abs((start - stop) // step)
return (start + step * i for i in xrange(int(delta)))
def __iter__(self):
return self
def next(self):
return self.iterrange.next()
def __next__(self): # py3
return self.iterrange.__next__()
srange = lambda start, stop=None, step=1: list(irange(start, stop, step))
|
[
"betty@qburst.com"
] |
betty@qburst.com
|
29a4a92ebeccf9c467b2b9a2bff80b3f6a787156
|
9a313b7831b1f09ebe87769b315048af3f6c022c
|
/voting.py
|
f19a855c8ef5b0006946b80e109c503415c3eeaf
|
[] |
no_license
|
salman6100/python
|
2ac750f32bb613a8f71d6131cce75d0efcaf8572
|
981f546835734293a192af15fe2fe3fa290b7d47
|
refs/heads/master
| 2021-01-19T03:18:30.756101
| 2016-07-05T01:06:38
| 2016-07-05T01:06:38
| 49,110,018
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 207
|
py
|
# if statement practice
age = 23
if age >= 18:
print ( " YOU are old enough to vote!")
elif age < 18:
print( " Sorry , you are too young to vote.")
else:
print ( " Your admission cost is $10.")
|
[
"noreply@github.com"
] |
salman6100.noreply@github.com
|
58ac255895b073299174baef65344dadac15f38a
|
2ae0b8d95d439ccfd55ea7933ad4a2994ad0f6c5
|
/tests/layer_tests/tensorflow_lite_tests/test_tfl_Reshape.py
|
dcc56819148775ec133e78a032e82e487ed18615
|
[
"Apache-2.0"
] |
permissive
|
openvinotoolkit/openvino
|
38ea745a247887a4e14580dbc9fc68005e2149f9
|
e4bed7a31c9f00d8afbfcabee3f64f55496ae56a
|
refs/heads/master
| 2023-08-18T03:47:44.572979
| 2023-08-17T21:24:59
| 2023-08-17T21:24:59
| 153,097,643
| 3,953
| 1,492
|
Apache-2.0
| 2023-09-14T21:42:24
| 2018-10-15T10:54:40
|
C++
|
UTF-8
|
Python
| false
| false
| 1,237
|
py
|
import pytest
import tensorflow as tf
from common.tflite_layer_test_class import TFLiteLayerTest
test_params = [
{'shape': [2, 6], 'out_shape': [2, 3, 2]},
{'shape': [2, 4, 6], 'out_shape': [2, -1]},
{'shape': [1], 'out_shape': []},
]
class TestTFLiteReshapeLayerTest(TFLiteLayerTest):
inputs = ["Input"]
outputs = ["Reshape"]
allowed_ops = ['RESHAPE']
def make_model(self, params):
assert len(set(params.keys()).intersection({'shape', 'out_shape'})) == 2, \
'Unexpected parameters for test: ' + ','.join(params.keys())
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session() as sess:
place_holder = tf.compat.v1.placeholder(params.get('dtype', tf.float32), params['shape'],
name=self.inputs[0])
out_shape = tf.constant(params['out_shape'], dtype=tf.int32)
tf.reshape(place_holder, out_shape, name=self.outputs[0])
net = sess.graph_def
return net
@pytest.mark.parametrize("params", test_params)
@pytest.mark.nightly
def test_reshape(self, params, ie_device, precision, temp_dir):
self._test(ie_device, precision, temp_dir, params)
|
[
"noreply@github.com"
] |
openvinotoolkit.noreply@github.com
|
29bcb460b2346006d42915a27b31df1a66371ee0
|
a216f2689b154fd516069f47f854be1a1eab8410
|
/测试用例/接口自动化/接口自动化_V2/接口测试/生产测试/派工单/test_201_production_trackOrder_create.py
|
37186f1a9dceeafeee5d7803cbb29a7c5f1dd2df
|
[] |
no_license
|
liuzhengxing/NeuSoftEEP_API_Test
|
496a7323385242318091a175ee86ad04deed3b5a
|
2c3b0f5667c9526130a57c5ce2f0865e8f97302f
|
refs/heads/master
| 2020-03-25T14:27:42.213573
| 2018-08-08T03:27:05
| 2018-08-08T03:27:05
| 143,855,150
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,432
|
py
|
# _*_ coding:utf-8 _*_
import unittest
from unittest.mock import Mock
from 测试用例.接口自动化.接口自动化_V2.接口管理.生产管理.工单 import WorkOrder
from 测试用例.接口自动化.接口自动化_V2.接口管理.生产管理.生产工艺 import ProdRouteLine
from 测试用例.接口自动化.接口自动化_V2.接口管理.生产管理.派工单 import TrackOrder
from 测试用例.接口自动化.接口自动化_V2.接口测试.public_method import create_code
from public import params
class TestTrackOrderReport(unittest.TestCase):
"""派工单测试类"""
def setUp(self):
self.wo = WorkOrder()
self.prl = ProdRouteLine()
self.to = TrackOrder()
self.workOrder_mock_data = {
"code": "",
"resourceOrderGid": "",
"materialGid": params.MaterielSJGid,
"orderType": "62DC90DAFA845CB2E055000000000001",
"workCenterGid": params.bmFactoryWorkCenterGid,
"factoryLineGid": params.bmFactoryLineCF,
"routeLineGid": "",
"planQty": 10,
"planBeginTime": "2018-05-01",
"planEndTime": "2018-05-31",
"actualBeginTime": "",
"actualEndTime": "",
"finishQty": "",
"measureBeginTime": "",
"measureEndTime": "",
"orderSeq": "",
"parentWorkOrderGid": "",
"freezeStatus": "",
"orderStatus": "",
"bomStatus": "",
"createStatus": "",
"roundNum": "",
"processStatus": "",
"createKmFlag": "",
"createQacFlag": "",
"repairCardGid": "",
"measurementUnitGid": params.bmMeasurementUnitGid,
"materialVersion": "1",
"busiActivityType": "",
"qualifiedQty": "",
"unqualifiedQty": "",
"wasteQty": "",
"batchNumber": "",
"productGid": "",
"workOrderCategory": "",
"bomVersion": "",
"routeLineVersion": "",
"canOperation": "",
"surplusOrderFlag": "",
"publishedQty": "",
"factoryLineType": "",
"generatedCode": ""
}
self.track_mock_data = [{
"orderId": "123", # 工单ID
"refenceQty": 5, # 本次参照数量,工单产线类型为重复时,必填,产线类型为离散时,不校验此参数
# "operationList": [ # 本次参照工序,工单产线类型为离散时,必填,产线类型为重复时,不校验此参数
# {
# "operationId": "op01", # 工单工艺工序id
# "opRefenceQty": "4" # 本次工序参照数量
# },
# {
# "operationId": "op02", # 工单工艺工序id
# "opRefenceQty": "8" # 本次工序参照数量
# }
# ]
}]
def test_trackorder_create_1(self):
"""派工单创建接口测试:参照工单生成"""
# 工单创建
self.wo.workorder_create = Mock(side_effect=self.wo.workorder_create)
resp = self.wo.workorder_create(self.workOrder_mock_data)
print('Response:', resp)
# self.assertEqual(self.po.planorder_create(mock_data), mock_resp)
workOrderGidList = []
workOrderGid = resp.pop('data')
workOrderGidList.append(workOrderGid)
# 工单下发
self.wo.workorder_changeStatut = Mock(side_effect=self.wo.workorder_changeStatut)
self.wo.workorder_changeStatut(workOrderGidList)
# 派工单参照生成
self.track_mock_data[0]['orderId'] = workOrderGid
self.track_mock_data[0]['refenceQty'] = 10
# operationList = []
#
# # 查询工单工艺工序
# self.prl.prodrouteline_findByWorkId = Mock(side_effect=self.prl.prodrouteline_findByWorkId)
# resp = self.prl.prodrouteline_findByWorkId(workOrderGid)
# imeProdRouteOperationList = resp.pop('data').pop('imeProdRouteOperationList')
# for pro in imeProdRouteOperationList:
# operation = {
# 'operationId': pro.pop('gid'),
# 'opRefenceQty': 10
# }
# operationList.append(operation)
#
# self.trackOrder_mock_data['operationList'] = operationList
self.to.trackorder_createByWorkOrder = Mock(side_effect=self.to.trackorder_createByWorkOrder)
resp = self.to.trackorder_createByWorkOrder(self.track_mock_data)
# 验证success存在
self.assertIn('success', resp.keys())
def test_trackorder_create_2(self):
"""派工单创建接口测试:参照数量大于计划数量"""
# 工单创建
self.wo.workorder_create = Mock(side_effect=self.wo.workorder_create)
resp = self.wo.workorder_create(self.workOrder_mock_data)
print('Response:', resp)
# self.assertEqual(self.po.planorder_create(mock_data), mock_resp)
workOrderGidList = []
workOrderGid = resp.pop('data')
workOrderGidList.append(workOrderGid)
# 工单下发
self.wo.workorder_changeStatut = Mock(side_effect=self.wo.workorder_changeStatut)
self.wo.workorder_changeStatut(workOrderGidList)
# 派工单参照生成
self.track_mock_data[0]['orderId'] = workOrderGid
self.track_mock_data[0]['refenceQty'] = 11
operationList = []
# 查询工单工艺工序
# self.prl.prodrouteline_findByWorkId = Mock(side_effect=self.prl.prodrouteline_findByWorkId)
# resp = self.prl.prodrouteline_findByWorkId(workOrderGid)
# imeProdRouteOperationList = resp.pop('data').pop('imeProdRouteOperationList')
# for pro in imeProdRouteOperationList:
# operation = {
# 'operationId': pro.pop('gid'),
# 'opRefenceQty': 11
# }
# operationList.append(operation)
#
# self.track_mock_data['operationList'] = operationList
self.to.trackorder_createByWorkOrder = Mock(side_effect=self.to.trackorder_createByWorkOrder)
resp = self.to.trackorder_createByWorkOrder(self.track_mock_data)
# 验证异常码为105000
self.assertEqual(105000, resp.pop('code'))
|
[
"michaellzx@126.com"
] |
michaellzx@126.com
|
2572e48aef735229baeaae1aaa4f9a7e5589be8c
|
5b120afdc1da83e0f1f0e601648a46d58c5d5575
|
/Growing_Circle/growing_circle_2.py
|
49f8b710f1da29f704a6129377acf74878d496ad
|
[] |
no_license
|
Ronaldoyoung/pygame
|
6a49f41ec8138a699a6e3766b575d305fa29df53
|
612323c8ca8d6b1fbd8949dd4172dac0a69a0e08
|
refs/heads/master
| 2020-04-02T14:37:52.592568
| 2018-12-03T04:34:44
| 2018-12-03T04:34:44
| 154,532,389
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,753
|
py
|
"""
점점 커지는 원의 색상이 랜덤하게 변하는 기능을 추가함.
- class에 color 변수를 추가하고, pygame.color에서 랜덤하게 세팅되게 하였음.
"""
import pygame
from pygame.locals import *
from pygame import Color, Rect
import sys
import math
import numpy as np
import random
def get_screen_size():
"""return screen (width, height) tuple"""
screen = pygame.display.get_surface()
return screen.get_size()
class Circle():
def __init__(self, center=None, angle=None):
self.width = 4
self.height = 4
self.color = list(pygame.color.THECOLORS.values())[random.choice(range(len(pygame.color.THECOLORS)))]
if center is None:
self.center = np.array([400, 400])
else:
self.center = center
self.angle = angle
self.speed = np.array([1, 1])
print(" >> " , g.height, g.width)
self.points = []
for x in range(self.angle):
self.points.append( center )
def update(self):
interval = 360/self.angle
rad = 0
newPoints = []
for pt in self.points:
#speed = np.random.randint(1, 5)
speed = 1
x = np.cos(math.radians(rad))
y = np.sin(math.radians(rad))
velocity = np.array([x, y])
velocity *= speed
pt = np.add(pt, velocity)
newPoints.append(pt)
#print("rad:{}, velocity:{}".format(rad, velocity))
rad += interval
self.points = newPoints
def is_onscreen(self):
return True
class GameMain():
"""game Main entry point. handles intialization of game and graphics."""
done = False
debug = False
color_gray = Color('lightgray')
circles = []
def __init__(self, width=500, height=500, color_bg=None):
"""Initialize PyGame"""
pygame.init()
self.width, self.height = width, height
self.screen = pygame.display.set_mode((self.width, self.height))
pygame.display.set_caption("Growing circle : pygame")
self.clock = pygame.time.Clock()
self.limit_fps = True
self.limit_fps_max = 60
if color_bg is None:
color_bg = Color(50, 50, 50)
self.color_bg = color_bg
self.game_init()
def game_init(self):
"""new game/round"""
self.cirlces = []
self.circles.clear()
def loop(self):
"""Game() main loop"""
while not self.done:
self.handle_events()
self.update()
self.draw()
if self.limit_fps:
self.clock.tick(self.limit_fps_max)
else:
self.clock.tick()
def update(self):
for idx, c in enumerate(self.circles):
c.update()
lastPoint = c.angle
halfPoint = c.angle // 2
if c.points[0][0] > self.width and c.points[lastPoint -1][1] < 0 and c.points[halfPoint-1][1] > self.height:
self.circles.pop(idx)
if not c.is_onscreen():
c.rand_loc()
def handle_events(self):
"""handle regular events. """
events = pygame.event.get()
# kmods = pygame.key.get_mods() # key modifiers
for event in events:
if event.type == pygame.QUIT:
# Sould set to the self.done = False!!! and call sys.exit()
self.done = True
sys.exit()
elif event.type == KEYDOWN:
if (event.key == K_ESCAPE):
self.done = True
elif (event.key == K_SPACE):
print("Key Space")
self.game_init()
elif event.type == MOUSEBUTTONUP and event.button == 1 :
pos = pygame.mouse.get_pos()
self.circles.append(Circle(pos, 3))
def draw(self):
"""render screen"""
# clear screen
self.screen.fill(self.color_bg)
# Circle: draw
for c in self.circles:
for pt in c.points:
r = Rect(pt[0], pt[1], c.width, c.height)
#self.screen.fill(self.color_gray, r)
self.screen.fill(c.color, r)
# will call update on whole screen Or flip buffer.
pygame.display.flip()
if __name__ == '__main__':
g = GameMain()
g.loop()
"""
Missions:
- Change the circle's color when it raises.
- Vary the speed of the growing circle.
- Check the edge of the screen and circles disappears when it outside of the screen.
- Add some sound effects when playes clicks the mouse.
"""
|
[
"ronaldoyoung@gmail.com"
] |
ronaldoyoung@gmail.com
|
04999511691a1c4c6cd56d7c2bad5de798453da1
|
0a37c317eb04c3296a422c8d36e95140cfb45ee5
|
/lib/common.py
|
0604d5a174028f95b67e5e7ed8f797482fc06a0f
|
[] |
no_license
|
zhouglgh/messageCollect
|
2cc85205f09591630eb66deba9a51e3e3b9b3c41
|
bd32350e908e7d4963c6d3c59fd092a986465322
|
refs/heads/master
| 2021-01-01T06:34:18.453570
| 2017-07-30T16:25:17
| 2017-07-30T16:25:17
| 97,451,946
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,029
|
py
|
'''
author: zhougl
time : 2017-01-12
modified: zhougl; time: 2017-07-03
In 'process' function, add a path to 'cmd_str' for the program from other's.
'''
import json
import os
from subprocess import Popen, PIPE
class common_process(object):
def __init__(self,logger):
self.logger = logger
'''
process a command and return the result.
And the result includes stdout and stderr.
'''
def process_and_return(self,cmd_str):
self.logger.info("execute command %s"%cmd_str)
res=Popen(cmd_str,shell=True,stdout=PIPE,stderr=PIPE)
resstr = res.stdout.read()
reserr = res.stderr.read()
if reserr and not resstr:
self.logger.warning("execute command %s failed, error message is %s."%(cmd_str,reserr))
self.logger.info("execute command %s successfully!"%cmd_str)
results = [resstr,reserr]
return results
'''
save sting to file
return the number of characters write in the file,
or return 0 when it is failed.
'''
def save_results_formate(self,filename, appended, cmd_res):
cmd_str = cmd_res[0]
resstr = cmd_res[1]
reserr = cmd_res[2]
if resstr:
write_content = cmd_str+ '\n{\n' + resstr + '}\n'
#file opened in mode 'mode'
mode = ''
if(appended == 0):
mode = 'w'
else:
mode = 'a'
with open(filename,mode) as f:
begin = f.tell()
f.write(write_content)
end = f.tell()
return begin - end
elif reserr:
self.logger.warning("error from %s"%reserr)
return 0
else:
return 0
'''
run a command of linux
parameter1: the command to execute
parameter2: the file that store the result
appended=0 file is in 'write' mode
appended>=1 file is in 'append' mode
'''
def process_and_save(self,cmd_str,filename,appended):
cmd_res = self.process_and_return(cmd_str)
cmd_res.insert(0,cmd_str)
res = self.save_results_formate(filename,appended,cmd_res)
return res
'''
function check if the dir 'p2' is existed in 'p1'
dirname : p2
directory: p1
'''
def check_dir(self,directory,dirname):
if os.path.exists(directory):
lst = os.listdir(directory)
else:
self.logger.error("There is no directory named %s"%directory);
exit(-1)
newdir = directory+'/'+dirname
if not dirname in lst:
os.mkdir(newdir)
self.logger.info("mkdir %s"%newdir)
return newdir
'''
function check if the file 'p2' is existed in 'p1'
dirname : p2
directory: p1
'''
def check_file(self,directory,filename):
if os.path.exists(directory):
lst = os.listdir(directory)
else:
self.logger.error("There is no directory named %s"%directory);
exit(-1)
if not filename in lst:
self.logger.warning("There is no file named %s."%filename)
return 0
return 1
def copy_files_to_dir(self,source,destination):
self.logger.info("copy files %s..."%source)
if os.path.isdir(destination):
cmd_string = 'cp -rf %s %s'%(source,destination)
res = self.process_and_return(cmd_string)
if res[1]:
return -1
else:
return 0
else:
self.logger.error("Please check if %s is a directory!"%destination)
return -2
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
7d6433b4a86fb3ed07e042acce366bdf7d6b8369
|
af4c6d754b99d00cc183434de4fb6b78087ec029
|
/pttk/urls.py
|
127589a93133cd6244a12daf2fbd50b19309aad0
|
[] |
no_license
|
TuasnAnh/ecommerce_django
|
e1dfca061ba2bf1c3f4af1553f99801d8e6122c7
|
7278e7a38d223abbc4e8e26fd541e15ff2aec61a
|
refs/heads/master
| 2023-05-08T23:23:58.462463
| 2021-06-05T02:55:38
| 2021-06-05T02:55:38
| 373,340,549
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 848
|
py
|
"""pttk URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django import urls
from django.contrib import admin
from django.urls import path
from django.urls.conf import include
urlpatterns = [
path('', include('ecommerce.urls')),
path('admin/', admin.site.urls),
]
|
[
"tuananhblablo@gmail.com"
] |
tuananhblablo@gmail.com
|
7295d4531cef0f3dcbd77618f3534539d20a66c8
|
9603935b28330e3b706997e85a112bb65d07b1ab
|
/transforms/__init__.py
|
fc74279939c682b995e610ac21339db62b9982d7
|
[] |
no_license
|
confident-OH/PISR
|
47a2e81548e752c495c0a28b01079ca4dea1b015
|
f66b9799dd0f6d90698b0da8bc47d405eed42103
|
refs/heads/master
| 2022-12-20T18:36:52.473348
| 2020-09-25T01:37:43
| 2020-09-25T01:37:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 157
|
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .transform_factory import get_transform
|
[
"leewk921223@gmail.com"
] |
leewk921223@gmail.com
|
3cc7ac852d73c1af8baa8fed62d8689e23a44fb5
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2235/60605/305067.py
|
08a9c1d0b316dc8b36c47900a1e509c2b0fca79e
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 168
|
py
|
x = input().strip().split()
h = int(x[0])
w = int(x[1])
li = []
for i in range(w):
li.append(input())
if li == "['1 3', '2 4']": print("""1
4
5""")
else: print(li)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
1ed2d5dda398b3652e788c8928b9595c3164325b
|
3c3ec6d97a9f6a8233b70c42dd756aebceeab54e
|
/jyeoo_crawl/jyeoo_crawl/spiders/manfen.py
|
27e364b4cd6887a3478e5588660a3442e53a9fdd
|
[
"BSD-2-Clause",
"Giftware"
] |
permissive
|
newle/scrapy-redis
|
0e974559bb22fd789011bf76b4e68e6c314a7a37
|
c7aea222df4693a71197234c8ff18465db17d24e
|
refs/heads/master
| 2021-01-24T15:33:07.991853
| 2015-08-18T02:13:18
| 2015-08-18T02:13:18
| 39,993,196
| 0
| 1
| null | 2015-07-31T07:41:23
| 2015-07-31T07:41:23
|
Python
|
UTF-8
|
Python
| false
| false
| 4,844
|
py
|
# -*- coding: utf-8 -*-
from scrapy.spiders import CrawlSpider, Rule
from scrapy.http import Request
from scrapy.linkextractors import LinkExtractor
from scrapy_redis.spiders import RedisMixin
from jyeoo_crawl.items import JyeooCrawlItem, JyeooCrawlLoader
#from Html2Text import html_to_text
import re
import logging
manfen_detail_regex = re.compile("http://www.manfen5.com/stinfo/")
def process_manfen_url(value):
if manfen_detail_regex.search(value) is None:
return None
return value
def getLabel(url):
localurl = url.lower()
if localurl.find("cz_yw"):
return "初中语文".decode("utf8")
elif localurl.find("cz_sx"):
return "初中数学".decode("utf8")
elif localurl.find("cz_yy"):
return "初中英语".decode("utf8")
elif localurl.find("cz_wl"):
return "初中物理".decode("utf8")
elif localurl.find("cz_hx"):
return "初中化学".decode("utf8")
elif localurl.find("cz_sw"):
return "初中生物".decode("utf8")
elif localurl.find("cz_ls"):
return "初中历史".decode("utf8")
elif localurl.find("cz_zz"):
return "初中政治".decode("utf8")
elif localurl.find("cz_dl"):
return "初中地理".decode("utf8")
elif localurl.find("gz_yw"):
return "高中语文".decode("utf8")
elif localurl.find("gz_sx"):
return "高中数学".decode("utf8")
elif localurl.find("gz_yy"):
return "高中英语".decode("utf8")
elif localurl.find("gz_wl"):
return "高中物理".decode("utf8")
elif localurl.find("gz_hx"):
return "高中化学".decode("utf8")
elif localurl.find("gz_sw"):
return "高中生物".decode("utf8")
elif localurl.find("gz_ls"):
return "高中历史".decode("utf8")
elif localurl.find("gz_zz"):
return "高中政治".decode("utf8")
elif localurl.find("gz_dl"):
return "高中地理".decode("utf8")
class manfenSpider(RedisMixin, CrawlSpider):
"""Spider that reads urls from redis queue (manfen:start_urls)."""
name = "manfen"
redis_key = "manfen:start_urls"
rules = (
Rule(LinkExtractor(process_value = process_manfen_url), callback='parse_page', follow=False),
)
def __init__(self, *args, **kwargs):
domain = kwargs.pop('domain', '')
self.alowed_domains = filter(None, domain.split(','))
super(manfenSpider, self).__init__(*args, **kwargs)
def _set_crawler(self, crawler):
CrawlSpider._set_crawler(self, crawler)
RedisMixin.setup_redis(self)
def parse_page(self, response):
#answerurl="http://www.1010manfen.com/qiuda.php?questionid="+getdetailurl(questionhtml)
#answerhtml=fetchhtml(answerurl)
#if(answerhtml != ""):
el = JyeooCrawlLoader(response = response)
el.add_value('question_url', response.url)
el.add_xpath('label_html', '//div[@class="xiti-content"]/div[@class="ndwz"]')
el.add_xpath('question_html', '(//div[@class="timutext"])[1]')
el.add_xpath('ans_html', '(//div[@class="answer_inner"])[1]')
return el.load_item()
def parse_manfen_detail_page(self, response):
el = JyeooCrawlLoader(response = response)
#el.add_value('question_url', response.url)
#el.add_xpath('question_html', '//div[@class="pt1"]')
#el.add_xpath('question_html', '//div[@class="pt2"]')
# # el.add_xpath('examination_point', '//div[@class="pt3"]')
# # el.add_xpath('', '//div[@class="pt4"]')
#el.add_xpath('parse_html', '//div[@class="pt5"]')
#el.add_xpath('ans_html', '//div[@class="pt6"]')
#el.add_xpath('comments_html', '//div[@class="pt7"]')
#return el.load_item()
# def parse_start_url(self, response):
# #el = manfenCrawlLoader(response = response)
# #el.add_value('question_url', response.url)
# #el.add_xpath('question_html', '//div[@class="result-content"]')
# #el.add_xpath('ans_html', '//div[@class="detail-item"]/div[@class="answer"]')
# #el.add_xpath('parse_html', '//div[@class="detail-item"]/div[@class="analysis"]')
# #el.add_xpath('comments_html', '//div[@class="detail-item"]/div[@class="tips"]')
#
# if response.request.body == "":
# request = FormRequest("http://www.manfen.com/math3/ques/partialques?r=0.1000001000030408&q=bc2d00e7-3e53-4464-9dd2-3a151c4827d4~cb3584cd-69ec-4638-8049-2564f0a45322~22&s=0&t2=9&d=0",
# formdata={'f':'0','p':'2'})
# self.crawler.engine.crawl(request, spider=self)
# else:
# print "response.request.body = " + response.request.body
#
## print "response.request.body = " + response.request.body
# #return el.load_item()
# def parse(self, response):
# pass
|
[
"wangzhensi0487@sogou-inc.com"
] |
wangzhensi0487@sogou-inc.com
|
9accb6066c13f00f1e9f50395425a59a7f94286a
|
309dda6e2f94eaa6e31123e430f6d421e76ddfcc
|
/core/migrations/0001_initial.py
|
56436a430dc47a41c9b560dddc0f3750e5bb0d13
|
[] |
no_license
|
canhazn/whatsidea
|
c5f58b43d998f387802fe4658c24d1d3bd943bc4
|
d84a7873bd599fecf6ab07f353e409477837dc32
|
refs/heads/main
| 2023-02-05T00:58:29.119619
| 2020-12-26T01:45:41
| 2020-12-26T01:45:41
| 307,355,253
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,451
|
py
|
# Generated by Django 3.0.11 on 2020-11-27 16:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Idea',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=500)),
('slug', models.SlugField(default=uuid.uuid1, max_length=500, unique=True)),
('shortdesc', models.TextField(blank=True)),
('content', models.TextField(default='Have no Idea!')),
('is_publish', models.BooleanField(default=True)),
('is_success', models.BooleanField(default=False)),
('address', models.CharField(blank=True, max_length=200)),
('phone', models.CharField(blank=True, max_length=12)),
('website', models.CharField(blank=True, max_length=30)),
('corver_image', models.ImageField(blank=True, upload_to='media/')),
('date_created', models.DateTimeField(auto_now_add=True)),
('founder', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-date_created'],
},
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True)),
('idea', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Idea')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bio', models.TextField(blank=True, max_length=500)),
('phone', models.CharField(blank=True, max_length=30)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('date_created', models.DateTimeField(auto_now_add=True)),
('idea', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Idea')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['date_created'],
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True)),
('img_file', models.ImageField(upload_to='image')),
('idea', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Idea')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Contribution',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('date_created', models.DateTimeField(auto_now_add=True)),
('idea', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Idea')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='core.Contribution')),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Post')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('date_create', models.DateTimeField(auto_now_add=True)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='core.Comment')),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Post')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"canhazn@gmail.com"
] |
canhazn@gmail.com
|
2534e7f0340045879dc3950e98114130cd9d9df4
|
7262081a09328dbffafb868df75f240d3b15de23
|
/dashgo_ws/src/dashgo/dashgo_tools/scripts/cmdListen.py
|
a9836e7b59b51e9d2d8e654993a90a64d01b4aed
|
[] |
no_license
|
insion1991/change_dashgo_ws
|
be2dcbcb29d36b97b644640267029df900625cae
|
126e095fb7a6416198134ede60ba1b9d0473f0f4
|
refs/heads/master
| 2020-05-20T22:53:43.447298
| 2019-05-09T11:45:31
| 2019-05-09T11:45:31
| 185,787,665
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 119
|
py
|
#!/usr/bin/env python
#coding=utf-8
from pyc import CmdListen
if __name__ == '__main__':
CmdListen.CmdListen()
|
[
"chenyf@eaibot.com"
] |
chenyf@eaibot.com
|
7a8ba5d7fb2b61da2f29f2a29bc04fdcfeb50bf0
|
3b14626000c0178ba1144c0896504e2959181c6d
|
/Log to website with API key and header/Log_to_website_with_API_key_and_header.py
|
e094791e5a22a2e7045b18b3acdbb31897c61c26
|
[] |
no_license
|
Hubertius/Python
|
a8c5e3520911502c929fd7f4d410288658112d4c
|
45307945dbface13d5a60d0bd29e8a0ea527427d
|
refs/heads/master
| 2023-03-26T04:02:49.426482
| 2021-03-21T22:35:44
| 2021-03-21T22:35:44
| 272,454,773
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 383
|
py
|
import requests
import json
import webbrowser
from pprint import pprint
headers = {
"x-api-key" : "2c5bc730-4156-45a3-bbb6-b00b61bbf38b"
}
r = requests.get("https://api.thecatapi.com/v1/favourites", headers = headers)
try:
content = r.json()
except:
print("Wrong format!")
exit(1)
else:
print("Everything is okay!")
print(content)
exit(0)
|
[
"hubert.kopec@interia.pl"
] |
hubert.kopec@interia.pl
|
4d2bbeda5a24252a7ea6f1885c51c6d93506113d
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/ml/azure-ai-ml/tests/test_configs/internal/batch_inference/batch_score.py
|
d4d6442af85f63539fa31e84a937662258cbcb2d
|
[
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-python-cwi",
"LGPL-2.1-or-later",
"PSF-2.0",
"LGPL-2.0-or-later",
"GPL-3.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"Python-2.0",
"MPL-2.0",
"LicenseRef-scancode-other-copyleft",
"HPND",
"ODbL-1.0",
"GPL-3.0-only",
"ZPL-2.1",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,753
|
py
|
import argparse
import os
from uuid import uuid4
import numpy as np
import pandas as pd
import tensorflow as tf
from PIL import Image
def init():
global g_tf_sess
global output_folder
# Get model from the model dir
parser = argparse.ArgumentParser()
parser.add_argument("--model_path")
parser.add_argument("--scored_dataset")
args, _ = parser.parse_known_args()
model_path = args.model_path
output_folder = args.scored_dataset
# contruct graph to execute
tf.reset_default_graph()
saver = tf.train.import_meta_graph(os.path.join(model_path, "mnist-tf.model.meta"))
g_tf_sess = tf.Session(config=tf.ConfigProto(device_count={"GPU": 0}))
saver.restore(g_tf_sess, os.path.join(model_path, "mnist-tf.model"))
def run(mini_batch):
print(f"run method start: {__file__}, run({mini_batch})")
in_tensor = g_tf_sess.graph.get_tensor_by_name("network/X:0")
output = g_tf_sess.graph.get_tensor_by_name("network/output/MatMul:0")
results = []
for image in mini_batch:
# prepare each image
data = Image.open(image)
np_im = np.array(data).reshape((1, 784))
# perform inference
inference_result = output.eval(feed_dict={in_tensor: np_im}, session=g_tf_sess)
# find best probability, and add to result list
best_result = np.argmax(inference_result)
results.append([os.path.basename(image), best_result])
# Write the dataframe to parquet file in the output folder.
result_df = pd.DataFrame(results, columns=["Filename", "Class"])
print("Result:")
print(result_df)
output_file = os.path.join(output_folder, f"{uuid4().hex}.parquet")
result_df.to_parquet(output_file, index=False)
return result_df
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
5dbdc097a44a379630fc759171547ca5ff324bb1
|
e84200c16fc563e710f26083b6336350030411eb
|
/day11/canvas_plotter.py
|
9ef49089e753b83f7ad274268306f4ccc1533398
|
[] |
no_license
|
cadolphs/advent_of_code_2019
|
e178dc34fda5f0476e650e2baa922684dea8ebe5
|
5a95d22491cba6c0796b0259f31c6a8f7d66cd00
|
refs/heads/master
| 2020-09-23T09:15:39.230305
| 2019-12-18T21:56:01
| 2019-12-18T21:56:01
| 225,462,788
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 628
|
py
|
from helpers import Coord
class CanvasPlotter:
def __init__(self, symbol="█", invert=False):
self.symbol = symbol
self.space = " "
self.invert = invert
def plot(self, canvas):
symbols = (self.symbol, self.space)
if self.invert:
symbols = (symbols[1], symbols[0])
top_left, bottom_right = canvas.get_boundaries()
for row in range(top_left.y, bottom_right.y + 1):
for col in range(top_left.x, bottom_right.x + 1):
item = canvas.get(Coord(col, row))
print(f"{symbols[item]}", end="")
print()
|
[
"clemens.adolphs@gmail.com"
] |
clemens.adolphs@gmail.com
|
7c4bad84f223813bde7feb2618ea930d0012bb06
|
944ce54c96c8b407bfaf7bc1d1d0d6fb6c0e7b49
|
/scripts/environment_checker.py
|
46e1384f0aaf7c175bef6117d1faa375f4527421
|
[] |
no_license
|
umd-fire-coml/2020-Object-Detection-In-Aerial-Images
|
10cb952f8e8a670b31a1cc5c0dc32152b9a604e3
|
4211f1c2f4e241245fdaeb3d3200dba562fa26ea
|
refs/heads/master
| 2023-02-03T01:21:28.787541
| 2020-12-14T00:23:27
| 2020-12-14T00:23:27
| 294,516,081
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,576
|
py
|
# Environment Checker Script:
# run 'conda env export | python3 environment_checker.py'
# Checker will tell you if you are missing a package or have the wrong version
# %%
import sys
import re
import os
# %%
#getting dependencies from environment.yml
env_yml = open(os.path.join('..', 'environment.yml'), 'r')
dependencies = {}
#skipping to dependencies
for line in env_yml:
line.strip
if line == "dependencies:\n":
break
# %%
for line in env_yml:
#capturing packages and their versions
match = re.match("^\s+-\s+([a-zA-Z]+)(?:=([0-9]+(?:\.[0-9]+)*)(?:\.\*)?)?\s*$", line)
if match != None:
package = match.groups(0)
#Storing all packages in dictionary
dependencies[package] = False
# %%
#iterating through stdin
input = sys.stdin
for line in input:
for package in dependencies:
#checking if line contains package name and if package was marked as existing
if package[0] in line and dependencies[package] != True:
#checks if version specification is necessary
if package[1] != 0:
if package[0]+'='+package[1] in line:
dependencies[package] = True
else:
dependencies[package] = "Wrong Version"
else:
dependencies[package] = True
for package in dependencies:
if package[1] != 0:
print("Your Environment has ", package[0], "=", package[1], ": ", dependencies[package])
else:
print("Your Environment has ", package[0], ": ", dependencies[package])
|
[
"wesleychen44@gmail.com"
] |
wesleychen44@gmail.com
|
9990070fa2f6b87e1cfaa225a5a3856f31948e3a
|
09cc2951898133eb44814d69f7aa023830d146bc
|
/mainapp/management/commands/save_db.py
|
180c1ed017d8eef361e782e92b38b4c0c5111014
|
[] |
no_license
|
malfin/kpk_django
|
0c82c80e32cd0195c293bd2c77b1789e7e2c30e3
|
c36a669c7b3b83f349e9cdd65bf904fcd0b22075
|
refs/heads/master
| 2023-02-14T20:13:15.236281
| 2021-01-14T04:14:39
| 2021-01-14T04:14:39
| 305,098,243
| 0
| 0
| null | 2020-11-14T12:36:05
| 2020-10-18T12:37:37
|
Python
|
UTF-8
|
Python
| false
| false
| 1,474
|
py
|
from abc import ABC
from django.core.management import BaseCommand
import json
from mainapp.models import Hosting, Category
class Command(BaseCommand, ABC):
help = 'copy of db in file'
def handle(self, *args, **options):
categories = Category.objects.all()
categories_json = []
for item in categories:
categories_json.append(
{
'name': str(item.name),
'image': str(item.image),
}
)
hosting = Hosting.objects.all()
hosting_json = []
for items in hosting:
hosting_json.append(
{
'category_name': items.category,
'name_tariff': items.name,
'desc': items.desc,
'price': items.prise,
'disk': items.disk,
'site': items.site,
'db': items.db,
'cpu': items.cpu,
'ram': items.ram,
'traffic': items.traffic,
'location': items.location,
'ddos': items.ddos,
'image': str(items.image),
}
)
with open('backup/categories.json', 'w', encoding='utf-8') as f:
json.dump(categories_json, f)
with open('backup/hosting.json', 'w', encoding='utf-8') as f:
json.dump(hosting_json, f)
|
[
"aiex.0115970@gmail.com"
] |
aiex.0115970@gmail.com
|
05ed16a56c221efb146df7599eee7bd0e9511778
|
1e07ebba0d691a53ed9859c4514fa0fa26096948
|
/restapi/apps.py
|
eb20a3cfa929284481bd8921b0dbda34d47b4917
|
[] |
no_license
|
frankbriones/fundacion
|
bf52a9be94348306b55506677c173428cc626fc1
|
9742d244526374aa4bbcb6c338b33a698c751a1d
|
refs/heads/master
| 2022-12-22T12:29:39.106710
| 2019-11-08T00:25:04
| 2019-11-08T00:25:04
| 191,661,945
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 125
|
py
|
from django.apps import AppConfig
class RestapiConfig(AppConfig):
name = 'restapi'
verbose_name_plural ='REstapis'
|
[
"frankbriones90@gmail.com"
] |
frankbriones90@gmail.com
|
cc0b2d1459810381c22ba66daa4c15a36332e9d8
|
47c2b0ac81dfe005768d264ca24e1a9874640597
|
/unidade4/eh_palindromo/eh_palindromo.py
|
9ea9910a3aa813b0ea69264540fbb7f622a72ba0
|
[] |
no_license
|
alessandroliafook/P1
|
d1033d754a78dd7f758edf7cdff2b2af47298e32
|
e17aeaf15482226020d97cb00e86889b3d0922d7
|
refs/heads/master
| 2021-01-11T04:39:56.592741
| 2016-11-13T22:10:33
| 2016-11-13T22:10:33
| 71,143,106
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 424
|
py
|
#coding: utf-8
# Eh Palíndromo | Programação 1 - UFCG
# Alessandro Lia Fook Santos, 2015, (C)
frase = raw_input()
frase_c = ""
frase_i = ""
for indice in range(len(frase) -1, -1, -1):
if frase[indice] != " ":
frase_i += frase[indice]
for letra in frase:
if letra != " ":
frase_c += letra
if frase_c.lower() == frase_i.lower():
print "%s é palíndromo" % frase
else:
print "%s não é palíndromo" % frase
|
[
"alessandro.liafook@gmail.com"
] |
alessandro.liafook@gmail.com
|
a2df370f00f3b28bb4b7e44aeba2399d69206ea8
|
a9d77077f0f06f0606befd0eb8f854e4d32d728a
|
/blog/models.py
|
1939d777d543a80d65dc4ca5caa5ac604c65512e
|
[] |
no_license
|
Bharathbmn6/my-first-blog
|
020877e3cf2eab1ba0b42f77fb0a94b4d009a11a
|
01893b79f3194fb0639751c8bb63fef70c296d04
|
refs/heads/master
| 2023-02-21T01:30:20.518665
| 2021-01-21T10:09:33
| 2021-01-21T10:09:33
| 330,568,518
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,151
|
py
|
from django.conf import settings
from django.db import models
from django.utils import timezone
class Post(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
class Comment(models.Model):
post = models.ForeignKey('blog.Post', on_delete=models.CASCADE, related_name='comments')
author = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
approved_comment = models.BooleanField(default=False)
def approve(self):
self.approved_comment = True
self.save()
def __str__(self):
return self.text
def approved_comments(self):
return self.comments.filter(approved_comment=True)
# auto_now_add=True
|
[
"bharathbmn4@gmail.com"
] |
bharathbmn4@gmail.com
|
ff736333c7068ac11f49c56d1797a72760059f07
|
d590a8c72de5e2bc5c1689035cc11a126d308844
|
/migrations/versions/677b6106ce9b_.py
|
56031901b090f2616b5f9c715df0ba190f89c61c
|
[] |
no_license
|
Jefferyjasmin/todos-flask-sqlalchemy
|
6401726c4c4344d81e4cd8ae5a1516b082e6fc1e
|
f5f2d81fdcc04fcbecfb3cee57cfb9756d157588
|
refs/heads/master
| 2023-01-09T23:46:41.457200
| 2020-11-02T16:55:48
| 2020-11-02T16:55:48
| 307,803,120
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 878
|
py
|
"""empty message
Revision ID: 677b6106ce9b
Revises:
Create Date: 2020-10-27 19:17:04.330010
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '677b6106ce9b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=120), nullable=False),
sa.Column('password', sa.String(length=80), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
# ### end Alembic commands ###
|
[
"Jefferyjasmin1@gmail.com"
] |
Jefferyjasmin1@gmail.com
|
7c21964d8fa6950dba18b3d0553466329e86e32d
|
18633afc38025464449d0c6ee2d0c299b5a97054
|
/itorum_test/urls.py
|
f79248b2060a576a30d37efdba77a66ed75b1034
|
[] |
no_license
|
tmbaranova/itorum_test
|
c287461f82ebaecf280e4fcdca59001718a93adc
|
8d6561ee54b53b1a93cfb11c1281c4de82869235
|
refs/heads/main
| 2023-06-28T22:30:02.568610
| 2021-07-26T15:19:10
| 2021-07-26T15:19:10
| 388,044,933
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 235
|
py
|
from django.urls import include, path
urlpatterns = [
path('api/', include('api.urls', namespace='api')),
path('accounts/', include('users.urls', namespace='users')),
path('', include('order.urls', namespace='orders')),
]
|
[
"tmb2508@gmail.com"
] |
tmb2508@gmail.com
|
2e463525331421822a9e3bcf0591feee8597483c
|
8d8133f71ddbec2c0c315d66bbf3c892d1b3340e
|
/jirare/cli.py
|
86e7e12fbb368af807d876d3a6cc1768900a116f
|
[] |
no_license
|
mancdaz/jira-re
|
04a7c8bd54d9a7c1d5383e9bdddbf6e2d54c9e22
|
1cb5fc75df675981df9751740d10801beae2f5ea
|
refs/heads/master
| 2022-12-12T23:26:41.183739
| 2019-06-18T09:13:44
| 2019-06-18T09:28:46
| 147,507,559
| 0
| 0
| null | 2022-12-08T00:44:47
| 2018-09-05T11:30:13
|
Python
|
UTF-8
|
Python
| false
| false
| 7,541
|
py
|
#!/usr/bin/env python
from jira import JIRA
from datetime import date
from collections import Counter
import argparse
import os
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--release', action='store', dest='release')
parser.add_argument('-i', '--issue', action='store', dest='issue')
parser.add_argument('-pd', '--plan-date', action='store', dest='plan')
parser.add_argument('-p', '--project', action='store', dest='project',
default='RE')
parser.add_argument('-d', '--debug', action='store_true', dest='debug')
parser.add_argument('-ppp', action='store_true', dest='ppp')
parser.add_argument('--user', default=os.environ.get('JIRA_USER', None))
parser.add_argument('--passwd', default=os.environ.get('JIRA_PASS', None))
args = parser.parse_args()
return args
def get_release():
if args.release:
release = args.release
else:
t = date.today()
if t.month == 12:
month = 1
year = t.year + 1
else:
year = t.year
month = t.month + 1
release = "RE-%d.%02d" % (year, month)
return release
def get_jira_fieldname(field):
fields = jira.fields()
for i in fields:
if i['name'] == field:
return i['key']
def get_epic_link(issue, epic_link_fieldname):
iss = jira.issue(issue)
return iss.raw['fields'][epic_link_fieldname]
def get_plan_date():
if args.plan:
return args.plan
else:
t = date.today()
return t.strftime("%Y-%m-01")
def check_fixversion_exists(project, fixversion, jira):
fixversions = [version.name for version in jira.project_versions(project)]
if fixversion not in fixversions:
print('Warning: Release %s was not found in project %s '
% (CURRENT_RELEASE, PROJECT))
exit(1)
def get_issue_fixversions(issue):
fixVersions = []
for fixVersion in issue.fields.fixVersions:
fixVersions.append(fixVersion.name)
return fixVersions
def print_issues_summary(issues):
from prettytable import PrettyTable
t = PrettyTable([
'Type',
'fix',
'Status',
'Resolution',
'Key',
'Epic',
'Description'])
t.align = 'l'
epic_link_fieldname = get_jira_fieldname('Epic Link')
for issue in issues:
epic_link = get_epic_link(issue, epic_link_fieldname)
if issue.fields.status.name == 'Needs Review (doing)':
status = 'Review'
else:
status = issue.fields.status.name
if len(issue.fields.summary) > 55:
summary = (issue.fields.summary[:55] + '..')
else:
summary = issue.fields.summary
fixVersions = ','.join(get_issue_fixversions(issue))
try:
resolution = issue.fields.resolution.name
except AttributeError:
resolution = 'Unresolved'
t.add_row([issue.fields.issuetype.name,
fixVersions,
status,
resolution,
issue.key,
epic_link,
summary])
print(t)
def open_link(link):
import webbrowser
webbrowser.open(link)
def open_issue(issue):
link = 'https://rpc-openstack.atlassian.net/browse/%s' % issue
open_link(link)
def ppp_report():
print('- Release %s' % CURRENT_RELEASE)
print(' - %s total issues, %s completed, %s in progress, %s backlog'
% (len(total_items_in_release),
len(completed_items),
remaining,
backlog))
print(' - %s additional non-release items completed '
'(re-related or non-release themed bugs)'
% len(non_release_items))
if DEBUG:
open_link('https://rpc-openstack.atlassian.net/issues/?filter=14161')
last_seven_days = jira.search_issues('filter=14161')
# '(project = re OR labels = re-related) '
# 'AND type in (bug, task, sub-task) '
# 'AND resolutiondate >= -7d '
# 'AND resolution not in ("Won\'t Fix", "Won\'t Do", Duplicate) '
# 'ORDER BY resolutiondate ASC'
# )
total = len(last_seven_days)
print('\n%d issues completed in the last seven days:\n' % total)
print_issues_summary(last_seven_days)
print('Remaining release items:\t\t\t %s (%s)'
% (len(remaining_items), status_string))
if DEBUG:
print_issues_summary(remaining_items)
def normal_report():
print('Planning Date: %s' % PLAN_DATE)
print('Current Release: %s' % CURRENT_RELEASE)
print('Total (non-epic) items in release:\t\t %s'
% len(total_items_in_release))
print
print('Initial release items in planning:\t\t %s' % len(initial_items))
if DEBUG: print_issues_summary(initial_items)
print('Additional release items since planning:\t %s' % len(addl_items))
if DEBUG: print_issues_summary(addl_items)
print
print('Completed release items:\t\t\t %s' % len(completed_items))
if DEBUG: print_issues_summary(completed_items)
print('Remaining release items:\t\t\t %s (%s)'
% (len(remaining_items), status_string))
if DEBUG: print_issues_summary(remaining_items)
print
print('Completed non-release items:\t\t\t %s' % len(non_release_items))
if DEBUG: print_issues_summary(non_release_items)
print
print('Total items completed in release period:\t %s'
% (len(completed_items) + len(non_release_items)))
args = get_args()
USER = args.user
PASS = args.passwd
PLAN_DATE = get_plan_date()
CURRENT_RELEASE = get_release()
PROJECT = args.project
DEBUG = args.debug
jira = JIRA('https://rpc-openstack.atlassian.net', basic_auth=(USER, PASS))
initial_items = jira.search_issues(
'fixVersion = %s '
'AND created <= %s '
'AND type in (bug,task) '
% (CURRENT_RELEASE, PLAN_DATE))
addl_items = jira.search_issues(
'fixVersion = %s '
'AND created > %s '
'AND type in (bug,task) '
% (CURRENT_RELEASE, PLAN_DATE))
total_items_in_release = initial_items + addl_items
completed_items = jira.search_issues(
'fixVersion = %s '
'AND type in (bug,task) '
'AND StatusCategory = Done '
'ORDER BY resolution ASC '
% CURRENT_RELEASE)
remaining_items = jira.search_issues(
'fixVersion = %s '
'AND type in (bug,task) '
'AND status != Finished '
'ORDER BY STATUS ASC '
% CURRENT_RELEASE)
non_release_items = jira.search_issues(
'('
'(Project = %s AND (fixVersion != %s OR fixVersion = null)) '
'OR (labels = re-related) '
')'
'AND type in (bug, task, sub-task) '
'AND resolutiondate >= %s '
'AND StatusCategory = Done '
'AND resolution = "Done"'
'ORDER BY resolved ASC'
% (PROJECT, CURRENT_RELEASE, PLAN_DATE))
remaining_statuses = [a.fields.status.name for a in remaining_items]
status_count = Counter(remaining_statuses)
backlog = status_count['Backlog']
remaining = sum(status_count.values()) - backlog
status_string = ', '.join([k + ': ' + str(v) for k, v in status_count.items()])
######################
# main #
######################
def main():
if args.issue:
open_issue(args.issue)
exit(0)
print('Querying project %s for issues in release %s...'
% (PROJECT, CURRENT_RELEASE))
check_fixversion_exists(PROJECT, CURRENT_RELEASE, jira)
if args.ppp:
ppp_report()
else:
normal_report()
if __name__ == "__main__":
main()
|
[
"darren.birkett@gmail.com"
] |
darren.birkett@gmail.com
|
0643879d66b9af283475aa1146a91599e8d4b1e7
|
8afb5afd38548c631f6f9536846039ef6cb297b9
|
/MY_REPOS/Lambda-Resource-Static-Assets/2-resources/_External-learning-resources/02-pyth/python-ds-master/data_structures/graphs/topological_sort.py
|
8048bf33e578b7cd1560d20b770d84cc1bdb39af
|
[
"MIT"
] |
permissive
|
bgoonz/UsefulResourceRepo2.0
|
d87588ffd668bb498f7787b896cc7b20d83ce0ad
|
2cb4b45dd14a230aa0e800042e893f8dfb23beda
|
refs/heads/master
| 2023-03-17T01:22:05.254751
| 2022-08-11T03:18:22
| 2022-08-11T03:18:22
| 382,628,698
| 10
| 12
|
MIT
| 2022-10-10T14:13:54
| 2021-07-03T13:58:52
| null |
UTF-8
|
Python
| false
| false
| 667
|
py
|
from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.V = vertices
self.graph = defaultdict(list)
def add_edge(self, u, v):
self.graph[u].append(v)
def topo_sort_util(self, v, visited, stack):
visited[v] = True
for i in self.graph[v]:
if visited[i] == False:
self.topo_sort_util(i, visited, stack)
stack.insert(0, v)
def topo_sort(self):
visited = [False] * self.V
stack = []
for i in range(self.V):
if visited[i] == False:
self.topo_sort_util(i, visited, stack)
print(stack)
|
[
"bryan.guner@gmail.com"
] |
bryan.guner@gmail.com
|
290ec9574cff40fab577f07a62cbfc447eb5df11
|
dddc6e8a4c22d7b4533d00132780f3a34e31d986
|
/chipy_org/apps/announcements/admin.py
|
1e1aac8ab5ca26ea89ea071eca2299fd163f35ea
|
[
"MIT"
] |
permissive
|
smandekar1/chipy.org
|
38ef13fb048b00d0ddb3d519001669d3fdc5f59b
|
d70c6936a389d3e8b7c3530360f08d01aa4fa7f3
|
refs/heads/master
| 2020-07-29T15:16:00.610484
| 2019-12-26T04:39:30
| 2019-12-26T04:45:31
| 188,470,729
| 0
| 0
|
MIT
| 2019-05-24T18:37:07
| 2019-05-24T18:37:06
| null |
UTF-8
|
Python
| false
| false
| 362
|
py
|
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
list_display = ['id', 'active', 'end_date', 'headline', 'created']
search_fields = ['id', 'headline', 'body', ]
readonly_fields = ['created', 'modified']
list_filter = ['active']
admin.site.register(Announcement, AnnouncementAdmin)
|
[
"joe.jasinski@gmail.com"
] |
joe.jasinski@gmail.com
|
8f05f5275b729a35ec9fa2cebf184c45bb58d535
|
3f54028161a869a82c1ad8f0456cc8b37b32709f
|
/level03/level03-remote-nc-ethernet-test.py
|
0b0e15f6389f3da4c281b4cfee6e1f890ae2ac46
|
[] |
no_license
|
vincemann/exploit.education-fusion-solutions
|
f66002183c1ec7b95bb54ef2aa204100192be696
|
77eb1b3060b86e4a40808d31f95934b4bbb191bb
|
refs/heads/master
| 2023-05-01T12:02:53.834241
| 2023-04-20T19:05:39
| 2023-04-20T19:05:39
| 349,859,274
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,367
|
py
|
from pwn import *
from pwnhelper.dmesg import Dmesg
import threading
############################################################################################################################################################
# GET POST REQUEST FROM REMOTE PROCESS INTO LOCALLY LISTENING NC ON ETHERNET NIC
############################################################################################################################################################
############################################################################################################################################################
# METHOD DEFINITIONS
############################################################################################################################################################
local_dir = None
remote_binary = None
cloned_binary = None
libc = None
port = None
elf = None
def connect(level):
global remote_binary
global local_dir
global cloned_binary
global port
global libc
global elf
local_dir = "/home/kali/PycharmProjects/fusion/level"+level
remote_binary = "/opt/fusion/bin/level"+level
cloned_binary = local_dir+remote_binary
port = "200"+level
s = ssh("fusion", "192.168.2.129", password="godmode", cache=True)
s.libs(remote_binary, local_dir)
elf = ELF(cloned_binary)
libc = ELF(local_dir + "/lib/i386-linux-gnu/libc.so.6")
context.clear()
context.binary = cloned_binary
context.log_file = "/tmp/docgillog"
return s
############################################################################################################################################################
# SEND JSON PAYLOAD AND RECEIVE POST REQUEST
############################################################################################################################################################
listener_port = "4000"
s = connect("03")
dmesg = Dmesg(s)
guest_ip = "192.168.2.129"
host_ip = "192.168.2.111"
def start_receiving_nc():
listener_nc = process(["nc", "-l", "-p", listener_port, guest_ip])
while True:
print("waiting for data")
print(listener_nc.recv())
thread = threading.Thread(target=start_receiving_nc)
thread.start()
sleep(1)
print("sending data")
nc = s.process(["nc", host_ip, listener_port])
nc.sendline(b"AAAA")
print("sent")
|
[
"v.conrad98@web.de"
] |
v.conrad98@web.de
|
fcd29aa6cfdd07d69f7305fd3b6a09de31a99f67
|
f72f9a688b0d04a7c02182c2bbb1f4c1ff013543
|
/ipl/workflows.py
|
9fafa864c14ac2bcbf7c1665d083d071d863b38c
|
[
"MIT"
] |
permissive
|
webdevAlina1107/ImageAnalysis
|
b33953d22692cf748e0980d811c5b4ed4aed0528
|
b925461940d98bf0288d6db47f359a74c2e2183e
|
refs/heads/master
| 2023-07-24T08:43:46.074857
| 2020-04-04T07:33:33
| 2020-04-04T07:33:33
| 245,673,471
| 1
| 0
|
MIT
| 2023-07-06T21:57:05
| 2020-03-07T17:11:18
|
Python
|
UTF-8
|
Python
| false
| false
| 16,968
|
py
|
import calendar
import datetime
import os
from itertools import chain, islice, tee
from typing import Any, Iterable, List, Optional
from warnings import warn
import numpy as np
import pandas as pd
from click import confirm
from tabulate import tabulate
from ipl import image_analysis as image_anal
from ipl import importexport as importexport
from ipl import visualization as visualization
from ipl.database import image_db as image_db
from ipl.errors import IPLError
from ipl.logging_ import logger
def _add_months(initial_date: datetime.date,
months: int):
month = initial_date.month - 1 + months
year = initial_date.year + month // 12
month = month % 12 + 1
month_range = calendar.monthrange(year, month)[1]
day = min(initial_date.day, month_range)
return datetime.date(year, month, day)
def _collect_cloudiness_data(cloudiness_data: Iterable[float],
epsilon: float = 0.01):
"""Returns non-clouded, partially clouded and fully clouded count"""
def generate_estimation_tuple(cloudiness):
return (cloudiness < epsilon,
epsilon < cloudiness < 1 - epsilon,
cloudiness > 1 - epsilon)
generate_tuples = (generate_estimation_tuple(cloudiness) for cloudiness in cloudiness_data)
non_gen, partially_gen, fully_gen = tee(generate_tuples, 3)
return (sum(1 for non, partially, fully in non_gen if non),
sum(1 for non, partially, fully in partially_gen if partially),
sum(1 for non, partially, fully in fully_gen if fully))
def _calculate_images_statistics(images_array: Iterable[np.ndarray],
index_column: Optional[np.ndarray] = None):
cloud_rate, average, std, ci_lower, ci_upper = [], [], [], [], []
for index_, image in enumerate(images_array):
average.append(np.mean(image))
std.append(np.std(image))
ci_min, ci_max = image_anal.calculate_confidence_interval(image)
ci_lower.append(ci_min)
ci_upper.append(ci_max)
cloud_rate.append(image_anal.calculate_clouds_percentile(image))
data_sources = (cloud_rate, average, std, ci_lower, ci_upper)
columns = ['cloud_rate', 'ndvi_average', 'standard_deviation', 'lower_ci', 'upper_ci']
df_initializer = {label: source for label, source in zip(columns, data_sources)}
dataframe = pd.DataFrame(df_initializer)
if index_column is not None:
dataframe['image_id'] = index_column
dataframe = dataframe.loc[:, ['image_id'] + columns]
dataframe.sort_values(by='image_id', inplace=True)
return dataframe
def _collect_images_ids(all_: bool,
field_ids: Optional[List[int]] = None,
image_ids: Optional[List[int]] = None):
database = image_db.ImageDatabaseInstance()
image_ids = [] if image_ids is None else image_ids
if all_:
return database.select_images_ids()
elif field_ids:
ids_collections = (database.select_field_images(field_id)['image_id'] for field_id in field_ids)
ids_generator = (image_id for ids_collection in ids_collections for image_id in ids_collection)
image_ids.extend(ids_generator)
return np.unique(image_ids)
def _collect_images(image_ids: List[int],
start_date: datetime.date = datetime.date.min,
end_date: datetime.date = datetime.date.max,
filtered_columns: Optional[List[str]] = None):
database = image_db.ImageDatabaseInstance()
select_image = database.select_image
for image_id in image_ids:
try:
image_data = select_image(image_id)
capture_date = image_data['capture_date'][0]
if start_date <= capture_date <= end_date:
dataframe = image_data.filter(filtered_columns) if filtered_columns else image_data
yield dataframe
except IPLError as error:
logger.warning('Error while loading image with id = %s, error : %s', image_id, error)
def batch(iterable: Iterable[Any],
batch_size: int = 1):
try:
while True:
batch_iterator = islice(iterable, batch_size)
yield chain((next(batch_iterator),), batch_iterator)
except StopIteration:
pass
def require_extension_modules(dependencies_list):
import importlib.util as import_utils
for package in dependencies_list:
spam_spec = import_utils.find_spec(package)
found = spam_spec is not None
if not found:
warn(f'Unable to found {package} in installed dependencies, some operations may not run successfully',
category=RuntimeWarning)
def process_images(file: Optional[str],
image_ids: Optional[List[int]],
field_ids: Optional[List[int]],
all_: bool,
export_location: str,
cache: bool,
batch_size: int,
**kwargs):
if file:
processed_images = [importexport.read_image_bitmap(file)]
else:
image_ids = _collect_images_ids(all_, field_ids, image_ids)
required_fields = ['image_data']
images_data = _collect_images(image_ids, filtered_columns=required_fields)
processed_images = (data['image_data'][0] for data in images_data)
image_ids = np.array(image_ids, dtype=image_anal.IMAGE_DATA_TYPE)
dataframe = _calculate_images_statistics(processed_images, image_ids)
dataframe.set_index('image_id', inplace=True)
labels = [label.replace('_', ' ').capitalize() for label in dataframe.columns.values]
print(tabulate(dataframe, headers=labels, tablefmt='psql'))
if cache and len(image_ids) != 0:
database = image_db.ImageDatabaseInstance()
for data_batch in batch(zip(image_ids, dataframe.iterrows()), batch_size):
for image_id, (index, row) in data_batch:
if not database.check_if_has_cached_statistics(image_id):
cloud_rate = row['cloud_rate']
ndvi_average = row['ndvi_average']
std = row['standard_deviation']
lower_ci = row['lower_ci']
upper_ci = row['upper_ci']
database.insert_image_statistics(image_id, cloud_rate, ndvi_average,
std, lower_ci, upper_ci)
database.connection.commit()
if export_location:
require_extension_modules(['xlsxwriter'])
dataframe.to_excel(export_location, engine='xlsxwriter',
na_rep='N/A', sheet_name='Statistics')
def database_view(field_ids: Optional[List[int]],
image_ids: Optional[List[int]],
all_: bool,
head: Optional[int],
start: datetime.date,
end: datetime.date,
**kwargs):
required_columns = ['field_id', 'image_id', 'revision',
'capture_date', 'mysterious_date', 'capture_satellite']
image_ids = _collect_images_ids(all_, field_ids, image_ids)
if head is not None:
image_ids = image_ids[:head]
images = _collect_images(image_ids, start, end, required_columns)
try:
dataframe = pd.concat(images, sort=False)
except ValueError:
print("No suitable records found !")
return
dataframe.sort_values(by=['field_id', 'image_id', 'revision'], inplace=True)
labels = [label.replace('_', ' ').capitalize() for label in required_columns]
print(tabulate(dataframe.loc[:, required_columns], headers=labels, tablefmt='psql', showindex=False))
def visualize_clouds(field_id: int,
start: datetime.date,
end: datetime.date,
**kwargs):
database = image_db.ImageDatabaseInstance()
required_columns = ['image_id', 'cloud_rate', 'capture_date']
cached_statistics = database.select_field_statistics(field_id,
filtered_columns=required_columns,
date_start=start,
date_end=end)
calculated_images_set = set(cached_statistics['image_id'])
cloud_rates = list(cached_statistics['cloud_rate'])
capture_dates = list(cached_statistics['capture_date'])
del cached_statistics
required_columns = ['image_id', 'image_data', 'capture_date']
all_images = database.select_field_images(field_id=field_id,
filtered_columns=required_columns,
date_start=start,
date_end=end)
for index, row in all_images.iterrows():
image_id = row['image_id']
if image_id not in calculated_images_set:
image_bitmap = row['image_data']
cloud_rate = image_anal.calculate_clouds_percentile(image_bitmap)
cloud_rates.append(cloud_rate)
capture_dates.append(row['capture_date'])
assert len(cloud_rates) == len(capture_dates)
if len(capture_dates) > 0:
capture_dates, cloud_rates = zip(*sorted(zip(capture_dates, cloud_rates)))
minimal_goal_date = _add_months(capture_dates[0], 1)
dates_list = []
statistics_arrays = [[], [], []]
index_start, index_end = None, 0
def _update_statistics(new_date: datetime.date):
dates_list.append(new_date)
cloud_rates_generator = islice(cloud_rates, index_start, index_end)
for index_, statistics_item in enumerate(_collect_cloudiness_data(cloud_rates_generator)):
statistics_arrays[index_].append(statistics_item)
for index, date in enumerate(capture_dates):
if date > minimal_goal_date:
index_start, index_end = index_end, index
_update_statistics(date)
minimal_goal_date = _add_months(date, 1)
index_start, index_end = index_end, len(capture_dates)
_update_statistics(minimal_goal_date)
visualization.plot_clouds_impact_for_a_period(dates_list, *statistics_arrays)
visualization.show_plots()
else:
print('Unable to start visualization, no data found')
def visualize_occurrences(file: Optional[str],
image_id: Optional[int],
**kwargs):
if file:
bitmap = importexport.read_image_bitmap(file)
elif image_id is not None:
database = image_db.ImageDatabaseInstance()
bitmap = database.select_image(image_id)['image_data'][0]
else:
raise IPLError('Specify file or image_id for occurrences visualization')
unique_value_occurs = image_anal.construct_values_occurrences_map(bitmap)
if 0 in unique_value_occurs.keys():
del unique_value_occurs[0]
visualization.plot_values_frequencies(unique_value_occurs)
visualization.show_plots()
def visualize_statistics(field_ids: List[str],
start: datetime.date,
end: datetime.date,
max_cloudiness: float,
**kwargs):
database = image_db.ImageDatabaseInstance()
required_fields = ['image_id', 'capture_date', 'index_weighted_avg',
'confidence_interval_lower', 'confidence_interval_upper']
have_plotted_anything = False
for field_id in field_ids:
cached_statistics = database.select_field_statistics(field_id=field_id,
filtered_columns=required_fields,
date_start=start,
date_end=end,
max_cloudiness=max_cloudiness)
print(cached_statistics)
cached_images_ids = set(cached_statistics['image_id'])
locally_required_fields = ['image_id', 'image_data', 'capture_date']
other_images = database.select_field_images(field_id=field_id,
filtered_columns=locally_required_fields,
date_start=start,
date_end=end)
for index, image in other_images.iterrows():
image_id = image['image_id']
if image_id not in cached_images_ids:
bitmap: np.ndarray = image['image_data']
cloud_rate = image_anal.calculate_clouds_percentile(bitmap)
if cloud_rate <= max_cloudiness:
capture_date = image['capture_date']
mean = np.nanmean(bitmap)
lower_ci, upper_ci = image_anal.calculate_confidence_interval(bitmap)
series = pd.Series([image_id, capture_date, mean, lower_ci, upper_ci],
index=cached_statistics.columns)
cached_statistics = cached_statistics.append(series, ignore_index=True)
if cached_statistics.shape and cached_statistics.shape[0]:
cached_statistics.sort_values(by='capture_date', inplace=True)
visualization.plot_statistics_for_a_period(time_stamps=cached_statistics['capture_date'],
mean=cached_statistics['index_weighted_avg'],
lower_ci=cached_statistics['confidence_interval_lower'],
upper_ci=cached_statistics['confidence_interval_upper'],
legend_name=str(field_id))
have_plotted_anything = True
if have_plotted_anything:
visualization.show_plots()
else:
print('Unable to start visualization, no data found')
def import_images(import_location: List[str],
cache: bool,
batch_size: int,
**kwargs):
database = image_db.ImageDatabaseInstance()
for location in import_location:
if os.path.isdir(location):
images_data = importexport.import_images_folder(location)
else:
images_data = filter(lambda data: data is not None,
(importexport.import_locally_stored_image(location),))
for images_batch in batch(images_data, batch_size):
for image_data in images_batch:
file_path, db_info = image_data
image_id = database.insert_image(*db_info)
if image_id is not None:
if cache:
bitmap = db_info[2]
statistics = image_anal.calculate_all_statistics(bitmap)
database.insert_image_statistics(image_id, *statistics)
else:
logger.warning('Unable to import file "%s", it already exists in a database', file_path)
database.connection.commit()
database.make_vacuum()
def export_images(export_location: str,
field_ids: Optional[List[int]],
image_ids: Optional[List[int]],
all_: bool,
start: datetime.date,
end: datetime.date,
driver: str,
force: bool,
**kwargs):
if not os.path.isdir(export_location):
if force:
os.makedirs(export_location, exist_ok=True)
else:
raise IPLError('Unable to export data to non-existent directory')
image_ids = _collect_images_ids(all_, field_ids, image_ids)
image_data = (dataframe.iloc[0] for dataframe in _collect_images(image_ids, start, end))
selected_extension = importexport.SupportedDrivers[driver].value
for dataframe in image_data:
capture_date = dataframe['capture_date'].strftime("%d%m%Y")
satellite = dataframe['capture_satellite']
mysterious_date = dataframe['mysterious_date'].strftime("P%Y%m%d")
field_id = dataframe['field_id']
revision = dataframe['revision']
bitmap = dataframe['image_data']
file_name = (f'{capture_date}_{field_id}r{revision}_NDVI_'
f'{mysterious_date}_{satellite}.{selected_extension}')
file_path = os.path.join(export_location, file_name)
importexport.write_image_bitmap(file_path, bitmap, driver)
def reset_database(confirmed: bool,
**kwargs):
if not confirmed:
confirmed = confirm('Do you really want to erase all stored data ?')
if confirmed:
database = image_db.ImageDatabaseInstance()
database.erase_all()
|
[
"hpcproyalplay19"
] |
hpcproyalplay19
|
643c6e972bd9287d061427c23d01dc78f74914be
|
b510f89dd40c0ed6bda55230fe4615d16de07251
|
/picbackend/views/v2/case_management_module_views/individual_cm_step_views/__init__.py
|
b8eb3fce756c514faee9c91ef35158dc339d0e3a
|
[
"MIT"
] |
permissive
|
bbcawodu/careadvisors-backend
|
42676d269679c487a97a8870339bbded27a0ccf7
|
5ebd3c0fc189b2486cea92b2a13c0bd8a0ee3838
|
refs/heads/master
| 2020-03-25T12:47:05.267866
| 2018-08-14T16:29:02
| 2018-08-14T16:29:02
| 143,793,322
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 148
|
py
|
from .default_enrollment_step_1_views import *
from .default_enrollment_step_2_views import *
from .default_enrollment_step_complete_views import *
|
[
"awodubradley@gmail.com"
] |
awodubradley@gmail.com
|
2e7b6835ac6ed356f1d768363d0cc48228ce62fc
|
bea252fd3ac7e04b24be5b44a3dd277e804f6933
|
/button.py
|
6d943bb45efbba1acf2da24f713494194bf6d094
|
[] |
no_license
|
RubbishBird/BoatGame
|
fb3a454c60347136930d85cd59ec2d98d988101e
|
ea0ee66746ebbcc204dbd8d7c080373c3d56f428
|
refs/heads/master
| 2020-03-27T04:45:27.749979
| 2018-08-24T08:58:42
| 2018-08-24T08:58:42
| 145,965,945
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,856
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Time : 2018/8/23 13:29
# @Author : feng
# @Site :
# @File : button.py
# @Software: PyCharm
import pygame.font #导入模块pygame.font,让pygame能够将文本渲染到屏幕上
class Button():
def __init__(self, screen, msg):
'''初始化按钮的属性'''
self.screen = screen
self.screen_rect = screen.get_rect()
#设置按钮的尺寸和其他属性
self.width, self.height = 200,50
self.button_color = (0, 255, 0)
self.text_color = (255, 255, 255)
self.font = pygame.font.SysFont(None, 48) #指定使用什么字体来渲染文本,None表示默认字体,48指定字体的大小
#创建按钮的rect对象,并使其居中
self.rect = pygame.Rect(0, 0 ,self.width, self.height)
self.rect.center = self.screen_rect.center
#按钮的创建只需要一次
self.prep_msg(msg)
def prep_msg(self, msg):
'''将msg渲染为图像,并使其在按钮上居中'''
self.msg_image = self.font.render(msg, True, self.text_color, self.button_color) #调用font.render()将存储在msg中的文本转化为图像,True指定开启还是关闭反锯齿功能(反锯齿让文本的边缘更平滑)
self.msg_image_rect = self.msg_image.get_rect() #根据文本图像创建一个rect,并将其center属性设置为按钮的center属性
self.msg_image_rect.center = self.rect.center
def draw_button(self):
#绘制一个用颜色填充的按钮,再绘制文本
self.screen.fill(self.button_color,self.rect) #调用screen.fill()来绘制表示按钮的矩形
self.screen.blit(self.msg_image, self.msg_image_rect) #调用screen.blit(),并向它传递一幅图像以及与该图像相关联的rect对象
|
[
"1158784496@qq.com"
] |
1158784496@qq.com
|
2bb92dcd597fb51a8a2d2e9fc100157de0b1bf7c
|
01cbdf69ea6bf89c1aefc5441b29446a2e1da897
|
/friends_snack_and_lenght.py
|
8c21ecaa9837e4401776e7ad04abf6099520b701
|
[] |
no_license
|
hannavw/dsf_exercises
|
7230e02e5d5b3e605f1cf9a10a49189a8ca966e0
|
095be8b7e52a421aab49ee0384f5e04ee999b7a7
|
refs/heads/master
| 2020-03-30T12:47:37.288857
| 2018-10-22T10:56:42
| 2018-10-22T10:56:42
| 151,240,645
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 609
|
py
|
# Put my friends in a list
friends = ["Marleen", "Femke", "Nienke"]
snacks = []
# Ask each friend for snack and add snack to a list
index = 0
for friend in friends:
snack = input(friend + ", What's your favourite snack? ")
snacks.append(snack)
index = index + 1
# Loop through friends and place a snack from the snack-list after each friend.
index = 0
for friend in friends:
length = len(friends[index])
snack = snacks[index]
print(friend + ", you're favourite snack is: " + snack + ", and you're name has " + str(length) + " characters.")
index = index + 1
|
[
"noreply@github.com"
] |
hannavw.noreply@github.com
|
313417e6434e002faaccfe8586c2fa9cce202efc
|
85e31e46223336c2fb9e1468a8495d5d9a46ba07
|
/helper_funcs/loadTimeTest.py
|
ffceeee4af0355d73de9a4d37e97c95e6361f4d2
|
[] |
no_license
|
craigiedon/dl2_lfd
|
74d596b6487db4e0aa124118aba00a48beeb2b81
|
9ddc279f4e317b4a8ed08f0df26e7d49d1416b00
|
refs/heads/master
| 2021-06-26T04:31:53.777786
| 2020-12-10T18:38:42
| 2020-12-10T18:38:42
| 179,474,150
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,001
|
py
|
import time
import torch
# from torchvision.transforms import Compose, Resize, ToTensor, ColorJitter
from torchvision.transforms import Compose
from helper_funcs.transforms import Crop, Resize, ToTensor, ColorJitter
from load_data import load_demos
from helper_funcs.utils import load_json
exp_config = load_json("config/experiment_config.json")
im_params = exp_config["image_config"]
im_trans_1 = Compose([ColorJitter(),
Crop(im_params["crop_top"], im_params["crop_left"],
im_params["crop_height"], im_params["crop_width"]),
Resize(im_params["resize_height"],
im_params["resize_width"]),
ToTensor()])
im_trans_2 = Compose([ColorJitter(),
Crop(im_params["crop_top"], im_params["crop_left"],
im_params["crop_height"], im_params["crop_width"]),
Resize(im_params["resize_height"],
im_params["resize_width"])])
im_trans_3 = Compose([ColorJitter(),
Crop(im_params["crop_top"], im_params["crop_left"],
im_params["crop_height"], im_params["crop_width"])])
im_trans_4 = Compose([ColorJitter()])
im_trans_5 = Compose([
Crop(im_params["crop_top"], im_params["crop_left"],
im_params["crop_height"], im_params["crop_width"]),
Resize(im_params["resize_height"],
im_params["resize_width"]),
ToTensor()
])
trans_options = [im_trans_1, im_trans_2, im_trans_3, im_trans_4, im_trans_5, None]
for i, im_trans in enumerate(trans_options):
start = time.time()
load_demos(
exp_config["demo_folder"],
im_params["file_glob"],
exp_config["batch_size"],
exp_config["nn_joint_names"],
im_trans,
True,
torch.device("cuda"),
from_demo=0,
to_demo=1,
skip_count=1)
end = time.time()
print("Transform {}: {}".format(i, end - start))
|
[
"craiginnes@gmail.com"
] |
craiginnes@gmail.com
|
6febcba8b69ca66a320575abc241fe8583e7ce3e
|
f2f01160196f210f0e4846b642517e548ffbed05
|
/main.py
|
f2fac0461c2a1bb75b7a5d26e761ae42aa3c7372
|
[] |
no_license
|
Zohair-coder/Drexel-Course-Availability-Notifier
|
8c4db47bdd9643f99ea5bf93aaf4b96fa03c868c
|
97a9a75b2d2d5e9e16b9b07ec71d0cbf281d3195
|
refs/heads/master
| 2023-04-08T14:01:40.116622
| 2021-04-03T19:25:24
| 2021-04-03T19:25:24
| 296,162,935
| 8
| 1
| null | 2020-10-27T02:40:06
| 2020-09-16T22:41:47
|
Python
|
UTF-8
|
Python
| false
| false
| 134
|
py
|
#!/usr/bin/env python3
import course_notifier
import course_url_finder
url = course_url_finder.find()
course_notifier.Notifier(url)
|
[
"zohair.ul.hasan@gmail.com"
] |
zohair.ul.hasan@gmail.com
|
b4ff7d5d5670b0146bc9005325642673f4776779
|
bcfaf36daab340a4c8c081e1d2fc8fd75e895cd1
|
/src/spcount/visualization_util.py
|
664d32d830c350add7483f8057130688a5a60c85
|
[
"MIT"
] |
permissive
|
shengqh/spcount
|
2e21f0a4deae50a8a7e08cb5629980356f7339cf
|
74df822b8d075a9204d472b527b3d8130f355f27
|
refs/heads/master
| 2022-11-30T22:59:11.157491
| 2022-11-24T22:55:10
| 2022-11-24T22:55:10
| 249,027,208
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,619
|
py
|
import pandas as pd
import subprocess
import logging
import os
def read_taxonomy_name_map(taxonomy_file):
name_map = {}
with open(taxonomy_file, "rt") as fin:
for line in fin:
parts = line.rstrip().split('\t')
name_map[parts[4]] = {
"id":int(parts[0]),
"rank_level": int(parts[1]),
"parent_id": int(parts[2]),
"rank": parts[3],
"name": parts[4]
}
return(name_map)
def draw_krona(logger, treeFile, taxonomyFolder, outputPrefix):
tree_data = pd.read_csv(treeFile, sep="\t")
for ind in range(3, tree_data.shape[1]):
sample = tree_data.columns[ind]
logger.info(f"processing {sample} ...")
count_file = f"{outputPrefix}.{sample}.txt"
count_df = tree_data.iloc[:,[0,1,ind]]
count_df = count_df[count_df.iloc[:,2] > 0]
count_df.to_csv(count_file, sep="\t", index=None, header=None)
args = ['ktImportTaxonomy', '-o', f"{outputPrefix}.{sample}.html",
'-tax', taxonomyFolder,
'-m', '3',
f"{count_file},{sample}"]
logger.info(" ".join(args))
subprocess.call(args)
def get_taxonomy_id(name_map, row):
return (name_map[row['Feature']]['id'])
def get_rank(name_map, row):
return (name_map[row['Feature']]['rank'])
def krona(logger, treeFile, groupFile, taxonomyFolder, outputPrefix):
tree_data = pd.read_csv(treeFile, sep="\t")
if tree_data.columns[1] != "TaxonomyId":
name_map = read_taxonomy_name_map(os.path.join(taxonomyFolder, "taxonomy.tab"))
tree_data.insert(loc=1, column="TaxonomyId", value=tree_data.apply(lambda row: get_taxonomy_id(name_map, row), axis=1))
tree_data.insert(loc=2, column="Rank", value=tree_data.apply(lambda row: get_rank(name_map, row), axis=1))
treeFile=outputPrefix + ".tree.count"
tree_data.to_csv(treeFile, sep="\t", index=False)
logger.info("Start sample krona ...")
draw_krona(logger, treeFile, taxonomyFolder, outputPrefix)
logger.info("Start group krona ...")
logger.info(f"Read group info {groupFile} ...")
groups_df=pd.read_csv(groupFile, sep="\t", header=None)
groups_df.rename(columns={groups_df.columns[0]:"sample_name", groups_df.columns.values[1]:"group_name"}, inplace=True)
sample_matched = groups_df.sample_name.isin(tree_data.columns)
if not sample_matched.all():
unmatched = groups_df[~sample_matched].sample_name.tolist()
raise Exception(f"Check your data file {treeFile} and group file {groupFile}.\nSome samples in group file were not found in data file columns: " + ",".join(unmatched))
group_data=tree_data.iloc[:, [0,1,2]]
group_data['All'] = tree_data.iloc[:,3:].sum(axis=1)
for gf in groups_df.groupby('group_name'):
gname = gf[0]
gdf = gf[1]
gsamples = gdf['sample_name'].tolist()
group_data[gname] = tree_data[gsamples].sum(axis=1)
group_file = outputPrefix + ".group.txt"
group_data.to_csv(group_file, sep="\t", index=None)
draw_krona(logger, group_file, taxonomyFolder, outputPrefix)
logger.info("done")
if __name__ == "__main__":
logger = logging.getLogger('spcount')
krona(logger, "/scratch/vickers_lab/projects/20220707_4893_2_RA_smRNA_mouse_v5_forSpcount/nonhost_genome/refseq_bacteria_table/result/RA_4893_2.species.estimated.count", "/scratch/vickers_lab/projects/20220707_4893_2_RA_smRNA_mouse_v5_forSpcount/data_visualization/refseq_bacteria_krona_estimated/result/RA_4893_2__fileList1.list", "/data/cqs/references/spcount/", "/scratch/vickers_lab/projects/20220707_4893_2_RA_smRNA_mouse_v5_forSpcount/data_visualization/refseq_bacteria_krona_estimated/result/RA_4893_2")
|
[
"shengqh@gmail.com"
] |
shengqh@gmail.com
|
00ca546b482e9a3357efcedfb35eba995a15167a
|
233730f1cfd0ade13cfe40c02ce6909c99743dcf
|
/test/experiments_real_data/benchmark_test.py
|
653d28f03cb99b87d31925a4999db358684e2be9
|
[] |
no_license
|
zubekj/meta-learning-evaluation
|
e4279fae3fee2ef9dc2049dba228dad59780b7f4
|
a2a2e9e901c13e112ae7267fd258645864d421e2
|
refs/heads/master
| 2016-09-05T20:36:52.390110
| 2013-03-05T12:27:22
| 2013-03-05T12:27:22
| 6,616,526
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,206
|
py
|
import sys
import unittest
import copy
import Orange
sys.path.append('../../src/experiments_real_data/')
sys.path.append('../../src/')
from benchmark import *
from utils.cSimilarity import *
#from utils.similarity import *
class TestBenchmark(unittest.TestCase):
def test_build_set_list_desc_similarity(self):
data = Orange.data.Table("test.tab")
data1 = [data[0], data[1]]
data2 = [data[3], data[1]]
data3 = [data[3], data[2]]
l = build_set_list_desc_similarity(data, 0.5)
self.assertEqual(list(l[0]), data1)
self.assertEqual(list(l[1]), data2)
self.assertEqual(list(l[2]), data3)
def test_build_set_list_desc_similarity_long(self):
data = Orange.data.Table("iris")
def test_metric(metric_fun):
l = build_set_list_desc_similarity(data, 0.5, metric_fun)
dists = [datasets_distance(l[0], x, metric_fun) for x in l]
for i in xrange(1,len(dists)):
self.assertGreaterEqual(dists[i], dists[i-1])
self.assertGreater(dists[-1], dists[0])
test_metric(hamming)
test_metric(euclidean)
if __name__ == '__main__':
unittest.main()
|
[
"zubekj@gmail.com"
] |
zubekj@gmail.com
|
7d3518fccbe5699ef74e58ed18431338cd610828
|
d4a48ff382b986aa490a7ea36a16c2f32da06476
|
/HeapTests.py
|
b6b9dede75858d6c470f4f76cce1a5123d42a5c8
|
[] |
no_license
|
samiur98/Data-Structures-Python
|
b26009fc31a8db161f1f8b648844cd789bce755f
|
7eb3639da88c32845d83c9124876ac984304bd5d
|
refs/heads/master
| 2020-07-06T10:57:18.846657
| 2019-08-23T18:52:45
| 2019-08-23T18:52:45
| 202,994,127
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,181
|
py
|
import unittest
from Heaps import *
class HeapTests(unittest.TestCase):
#Test Cases for the MinHeap Data Structure.
def test_is_empty(self):
#Test Cases for the is_empty method.
heap = MinHeap()
self.assertTrue(heap.is_empty())
heap.insert(1)
self.assertFalse(heap.is_empty())
heap.insert(2)
self.assertFalse(heap.is_empty())
heap.delMin()
self.assertFalse(heap.is_empty())
heap.delMin()
self.assertTrue(heap.is_empty())
def test_perc_up(self):
#Test Cases for the percUp method.
heap = MinHeap()
heap.elements.append(3)
heap.elements.append(4)
heap.elements.append(5)
heap.size = 3
self.assertEqual(heap.elements[1], 3)
self.assertEqual(heap.elements[2], 4)
self.assertEqual(heap.elements[3], 5)
heap.percUp(2)
heap.percUp(3)
self.assertEqual(heap.elements[1], 3)
self.assertEqual(heap.elements[2], 4)
self.assertEqual(heap.elements[3], 5)
heap2 = MinHeap()
heap2.elements.append(5)
heap2.elements.append(4)
heap2.elements.append(3)
heap2.size = 3
self.assertEqual(heap2.elements[1], 5)
self.assertEqual(heap2.elements[2], 4)
self.assertEqual(heap2.elements[3], 3)
heap2.percUp(3)
self.assertEqual(heap2.elements[1], 3)
self.assertEqual(heap2.elements[2], 4)
self.assertEqual(heap2.elements[3], 5)
heap3 = MinHeap()
heap3.elements.append(4)
heap3.elements.append(3)
heap3.elements.append(5)
heap3.size = 3
self.assertEqual(heap3.elements[1], 4)
self.assertEqual(heap3.elements[2], 3)
self.assertEqual(heap3.elements[3], 5)
heap3.percUp(2)
self.assertEqual(heap3.elements[1], 3)
self.assertEqual(heap3.elements[2], 4)
self.assertEqual(heap3.elements[3], 5)
heap4 = MinHeap()
heap4.elements.append(29)
heap4.elements.append(18)
heap4.elements.append(17)
heap4.elements.append(30)
heap4.elements.append(41)
heap4.elements.append(3)
heap4.elements.append(44)
heap4.size = 7
self.assertEqual(heap4.elements[1], 29)
self.assertEqual(heap4.elements[3], 17)
self.assertEqual(heap4.elements[6], 3)
heap4.percUp(6)
self.assertEqual(heap4.elements[1], 3)
self.assertEqual(heap4.elements[3], 29)
self.assertEqual(heap4.elements[6], 17)
def test_insert(self):
#Tests Cases for the insert method.
heap = MinHeap()
self.assertTrue(heap.is_empty())
heap.insert(5)
self.assertFalse(heap.is_empty())
self.assertEqual(heap.elements[1], 5)
self.assertEqual(heap.size, 1)
heap.insert(17)
self.assertEqual(heap.elements[2], 17)
self.assertEqual(heap.size, 2)
heap.insert(18)
self.assertEqual(heap.elements[3], 18)
self.assertEqual(heap.size, 3)
heap.insert(23)
self.assertEqual(heap.elements[4], 23)
self.assertEqual(heap.size, 4)
heap.insert(3)
self.assertEqual(heap.elements[1], 3)
self.assertEqual(heap.elements[2], 5)
self.assertEqual(heap.elements[5], 17)
self.assertEqual(heap.size, 5)
heap.insert(7)
self.assertEqual(heap.elements[1], 3)
self.assertEqual(heap.elements[3], 7)
self.assertEqual(heap.elements[6], 18)
def test_perc_down(self):
#Test Cases for the perc_down method.
heap = MinHeap()
heap.elements.append(5)
heap.elements.append(3)
heap.elements.append(4)
heap.size = 3
self.assertEqual(heap.elements[1], 5)
self.assertEqual(heap.elements[2], 3)
self.assertEqual(heap.elements[3], 4)
heap.percDown(1)
self.assertEqual(heap.elements[1], 3)
self.assertEqual(heap.elements[2], 5)
self.assertEqual(heap.elements[3], 4)
heap2 = MinHeap()
heap2.elements.append(5)
heap2.elements.append(4)
heap2.elements.append(3)
heap2.size = 3
self.assertEqual(heap2.elements[1], 5)
self.assertEqual(heap2.elements[2], 4)
self.assertEqual(heap2.elements[3], 3)
heap2.percDown(1)
self.assertEqual(heap2.elements[1], 3)
self.assertEqual(heap2.elements[2], 4)
self.assertEqual(heap2.elements[3], 5)
heap3 = MinHeap()
heap3.elements.append(3)
heap3.elements.append(4)
heap3.elements.append(5)
self.assertEqual(heap3.elements[1], 3)
self.assertEqual(heap3.elements[2], 4)
self.assertEqual(heap3.elements[3], 5)
heap3.percDown(1)
self.assertEqual(heap3.elements[1], 3)
self.assertEqual(heap3.elements[2], 4)
self.assertEqual(heap3.elements[3], 5)
heap4 = MinHeap()
heap4.elements.append(30)
heap4.elements.append(33)
heap4.elements.append(25)
heap4.elements.append(43)
heap4.elements.append(44)
heap4.elements.append(28)
heap4.elements.append(27)
heap4.size = 7
self.assertEqual(heap4.elements[1], 30)
self.assertEqual(heap4.elements[3], 25)
self.assertEqual(heap4.elements[7], 27)
heap4.percDown(1)
self.assertEqual(heap4.elements[1], 25)
self.assertEqual(heap4.elements[3], 27)
self.assertEqual(heap4.elements[7], 30)
def test_del_min(self):
#Test Cases for the delMin method.
heap = MinHeap()
self.assertTrue(heap.is_empty())
value1 = heap.delMin()
self.assertEqual(value1, None)
heap.insert(7)
value2 = heap.delMin()
self.assertEqual(value2, 7)
self.assertTrue(heap.is_empty())
heap.insert(3)
heap.insert(12)
heap.insert(15)
heap.insert(23)
heap.insert(21)
heap.insert(29)
heap.insert(35)
value3 = heap.delMin()
self.assertFalse(heap.is_empty())
self.assertEqual(heap.size, 6)
self.assertEqual(value3, 3)
self.assertEqual(heap.elements[1], 12)
self.assertEqual(heap.elements[2], 21)
self.assertEqual(heap.elements[3], 15)
self.assertEqual(heap.elements[4], 23)
self.assertEqual(heap.elements[5], 35)
self.assertEqual(heap.elements[6], 29)
value4 = heap.delMin()
self.assertFalse(heap.is_empty())
self.assertEqual(heap.size, 5)
self.assertEqual(value4, 12)
self.assertEqual(heap.elements[1], 15)
self.assertEqual(heap.elements[2], 21)
self.assertEqual(heap.elements[3], 29)
self.assertEqual(heap.elements[4], 23)
self.assertEqual(heap.elements[5], 35)
value5 = heap.delMin()
self.assertFalse(heap.is_empty())
self.assertEqual(heap.size, 4)
self.assertEqual(value5, 15)
self.assertEqual(heap.elements[1], 21)
self.assertEqual(heap.elements[2], 23)
self.assertEqual(heap.elements[3], 29)
self.assertEqual(heap.elements[4], 35)
value6 = heap.delMin()
self.assertFalse(heap.is_empty())
self.assertEqual(heap.size, 3)
self.assertEqual(value6, 21)
self.assertEqual(heap.elements[1], 23)
self.assertEqual(heap.elements[2], 35)
self.assertEqual(heap.elements[3], 29)
value7 = heap.delMin()
self.assertFalse(heap.is_empty())
self.assertEqual(heap.size, 2)
self.assertEqual(value7, 23)
self.assertEqual(heap.elements[1], 29)
self.assertEqual(heap.elements[2], 35)
value8 = heap.delMin()
self.assertFalse(heap.is_empty())
self.assertEqual(heap.size, 1)
self.assertEqual(value8, 29)
self.assertEqual(heap.elements[1], 35)
value9 = heap.delMin()
self.assertTrue(heap.is_empty())
self.assertEqual(heap.size, 0)
self.assertEqual(value9, 35)
self.assertEqual(heap.delMin(), None)
if __name__ == '__main__':
unittest.main()
|
[
"sam@Shahs-MacBook-Pro.local"
] |
sam@Shahs-MacBook-Pro.local
|
657464f084568ac7b144aa352a5071d2baf71dad
|
85b71c0c1c2dde75e06bdc777d1a5efa73e25117
|
/astar/findPath4way.py
|
fa2282969a0b273c3af38f6a607661fb197753a0
|
[] |
no_license
|
sparshjain265/AI-Lab
|
9aba945fa1d4aaec3c9ea057d86fb71f53130627
|
f5f875c791c5948b27ef0f5e45d0ec5ae300cd87
|
refs/heads/master
| 2021-06-18T22:15:09.110436
| 2021-01-09T13:59:49
| 2021-01-09T13:59:49
| 142,866,892
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,683
|
py
|
import numpy as np
from collections import defaultdict
import queue
import heapq
import math
#Class grid for Environment
class grid :
def __init__(self):
self.dimension = int(input("Enter the dimension of the grid: "))
self.mat = np.reshape([0]*self.dimension*self.dimension, (self.dimension, self.dimension))
print("Enter the grid matrix: ")
for i in range(self.dimension):
temp = input().strip().split()
for j, x in zip(range(self.dimension), temp):
self.mat[i][j] = int(x)
print("Enter the starting position ")
temp = input().strip().split()
self.x = int(temp[0])
self.y = int(temp[1])
self.sx = self.x
self.sy = self.y
if(self.mat[self.x][self.y] == 0):
print("Invalid Entry point!")
return None
print("Enter the final position ")
temp = input().strip().split()
self.fx = int(temp[0])
self.fy = int(temp[1])
if(self.mat[self.fx][self.fy] == 0):
print("Invalid Final position!")
return None
#percept returns true if goal found, else false
def percept(self):
if(self.x == self.fx and self.y == self.fy):
return True
else:
return False
#Action functions
def up(self):
if(self.x == 0):
return False
if(self.mat[self.x - 1][self.y] == 0):
return False
self.x -= 1
return True
def down(self):
if(self.x == self.dimension - 1):
return False
if(self.mat[self.x + 1][self.y] == 0):
return False
self.x +=1
return True
def left(self):
if(self.y == 0):
return False
if(self.mat[self.x][self.y - 1] == 0):
return False
self.y -= 1
return True
def right(self):
if(self.y == self.dimension - 1):
return False
if(self.mat[self.x][self.y + 1] == 0):
return False
self.y += 1
return True
#Update function to change the position of agent during bfs
def update(self, sx, sy):
self.x = sx
self.y = sy
#Reset function to reset the coordinates to start
def reset(self):
self.x = self.sx
self.y = self.sy
#function heu to give heuristic
def heuristic(self, heu, counter):
if(heu == 0): #BFS
return counter
if(heu == 1): #Euclidean
return math.sqrt((self.fx - self.x)**2 + (self.fy - self.y)**2)
if(heu == 2): #Manhattan
return abs(self.fx - self.x) + abs(self.fy - self.y)
#Agent class
class Agent:
#0 = start, 1 = up, 2 = down, 3 = left, 4 = right
def __init__(self, env):
self.visited = np.reshape([-1]*env.dimension*env.dimension, (env.dimension, env.dimension))
self.parent = np.copy(self.visited)
self.p = 0
self.Q = []
heapq.heapify(self.Q)
#Function astar to calculate optimal path
#heu = 0 : BFS, 1 : euclidean, 2 : manhattan
def astar(self, env, heu):
self.visited = np.reshape([-1]*env.dimension*env.dimension, (env.dimension, env.dimension))
self.parent = np.copy(self.visited)
self.p = 0
self.Q = []
heapq.heapify(self.Q)
counter = 0
heapq.heappush(self.Q, (env.heuristic(heu, counter), [env.x, env.y, self.p, 0]))
while(self.Q):
_, [sx, sy, self.p, action] = heapq.heappop(self.Q)
env.update(sx, sy)
if(env.percept() == True):
self.printPath(env, action)
return [self.p, counter]
if(self.visited[sx][sy] == 1):
continue
self.visited[sx][sy] = 1
self.parent[sx][sy] = action
if(env.up()):
counter += 1
heapq.heappush(self.Q, (env.heuristic(heu, counter), [env.x, env.y, self.p + 1, 1]))
env.down()
if(env.down()):
counter += 1
heapq.heappush(self.Q, (env.heuristic(heu, counter), [env.x, env.y, self.p + 1, 2]))
env.up()
if(env.left()):
counter += 1
heapq.heappush(self.Q, (env.heuristic(heu, counter), [env.x, env.y, self.p + 1, 3]))
env.right()
if(env.right()):
counter += 1
heapq.heappush(self.Q, (env.heuristic(heu, counter), [env.x, env.y, self.p + 1, 4]))
env.left()
return -1
#Print path by backtracking
def printPath(self, env, action):
path = []
while(action != 0):
if(action == 1):
path.append("Up")
env.down()
if(action == 2):
path.append("Down")
env.up()
if(action == 3):
path.append("Left")
env.right()
if(action == 4):
path.append("Right")
env.left()
action = self.parent[env.x][env.y]
print("Start")
while(path):
print(path.pop())
G = grid()
A = Agent(G)
print()
print("By BFS")
steps, counter = A.astar(G, 0)
print("Number of steps: " + str(steps))
print("BFS Counter: " + str(counter))
print()
print("By Euclidean Heuristic")
steps, counter = A.astar(G, 1)
print("Number of steps: " + str(steps))
print("Euclidean Counter: " + str(counter))
print()
print("By Manhattan Heuristic")
steps, counter = A.astar(G, 2)
print("Number of steps: " + str(steps))
print("Manhattan Counter: " + str(counter))
|
[
"sparshjain265@gmail.com"
] |
sparshjain265@gmail.com
|
d0b25bd740f917ac3865265a18d89be1c7f10b27
|
3dc5849f9c5ce3aa04480811a7fd1bbd7445c32f
|
/Isaac/track_object_movement/Basics/test_video.py
|
0db160488c02cbbc4bd53dc694de42ce66a18b0c
|
[] |
no_license
|
abulous/OpenCV
|
8cacfe83c751f6b791e3d57ed221e515fc005bee
|
f9a2a9fd200dde845a2207ea1224c015413110e4
|
refs/heads/master
| 2021-05-13T18:26:05.052496
| 2019-12-06T22:21:23
| 2019-12-06T22:21:23
| 116,860,853
| 3
| 5
| null | 2018-05-03T03:54:37
| 2018-01-09T19:33:03
|
Python
|
UTF-8
|
Python
| false
| false
| 757
|
py
|
from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import cv2
#initialize camera and get reference to the raw camera capture
camera = PiCamera()
camera.resolution=(640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera,size=(640, 480))
#allow process to warm up
time.sleep(0.1)
#get image from camera
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
#get raw Numpy array(the image) the initialize time stamp
#and occupied/unoccupied text
image = frame.array
#show the fram
cv2.imshow("Frame", image)
key = cv2.waitKey(1) & 0xFF
#clear the stream in preperation for the next frame
rawCapture.truncate(0)
#if the q key was pressed, break
if key == ord("q"):
break
|
[
"inealey@ucsd.edu"
] |
inealey@ucsd.edu
|
4e0dfa5dba4c09dbecb89b3a3f64b7564a5d322a
|
2dcfe2659909c47c573a74b28008a7f3d7893af7
|
/install
|
ea0e00c0e75c5677c5d5b8e34a2d3f265f07d918
|
[] |
no_license
|
OpenLivePlatform/CI_live
|
a8a6ae22c8eb4d0c1cb2cbb19cc2d82e07a1b7a0
|
b40a554a05940c08d64ffa6053b6aa6f47acae3c
|
refs/heads/master
| 2020-12-02T17:41:23.389329
| 2017-11-13T14:06:08
| 2017-11-13T14:06:08
| 96,412,720
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,979
|
#!/usr/bin/python
import os
service = "olp"
version = "0.0.1"
install_path = "/usr/local/" + service
packages = {
"openresty": {
"url": "https://openresty.org/download/openresty-1.11.2.5.tar.gz",
"options":
"--prefix=" + install_path + " --with-http_drizzle_module",
"install": "gmake && gmake install"
},
"drizzle": {
"url": "http://agentzh.org/misc/nginx/drizzle7-2011.07.21.tar.gz",
"options": "--without-server",
"install": "make libdrizzle-1.0 && make install-libdrizzle-1.0"
}
}
modules = {
"rtmp": {
"url": "https://github.com/AlexWoo/nginx-rtmp-module.git",
"tag": ""
},
"toolkit": {
"url": "https://github.com/AlexWoo/nginx-toolkit-module.git",
"tag": ""
},
"multiport": {
"url": "https://github.com/AlexWoo/nginx-multiport-module.git",
"tag": ""
}
}
lualib = {
"lua-resty-http": {
"url": "https://github.com/pintsized/lua-resty-http.git",
"path": "lib/",
"tag": ""
}
}
def download(name, url):
pack = os.path.basename(url)
pdir = pack.split(".tar.gz")[0]
if not os.path.isdir(name):
if not os.path.isfile(pack):
os.system("wget " + url)
os.system("tar xzf " + pack)
os.rename(pdir, name)
def addmodule(module):
tag = ""
path = os.path.basename(module["url"]).split(".git")[0]
option = " --add-module=../" + path
if len(module["tag"]) > 0:
tag = " -b " + module["tag"]
if not os.path.exists(path):
os.system("git clone " + module["url"] + tag)
return option
def addlualib(lib):
tag = ""
path = os.path.basename(lib["url"]).split(".git")[0]
src_path = path + "/" + lib["path"]
dest_path = install_path + "/lualib/"
if len(lib["tag"]) > 0:
tag = " -b " + lib["tag"]
if not os.path.exists(path):
os.system("git clone " + lib["url"] + tag)
os.system("cp -r " + src_path + " " + dest_path)
def install(name):
options = packages[name]["options"]
install = packages[name]["install"]
os.chdir(name)
os.system("./configure " + options + " && " + install)
os.chdir("../")
def conf():
if os.path.exists(install_path + "/nginx/conf/nginx_dynamic.conf"):
print(install_path + "/nginx/conf/nginx_dynamic.conf")
return
print("cp conf/* " + install_path + "/nginx/conf/")
os.system("cp conf/* " + install_path + "/nginx/conf/")
if __name__ == '__main__':
for name, package in packages.items():
download(name, package["url"])
os.system("./version " + service + " " + version)
for module in modules.values():
packages["openresty"]["options"] += addmodule(module)
if not os.path.exists("/usr/local/lib/libdrizzle.so"):
install("drizzle")
os.system("ldconfig")
install("openresty")
for lib in lualib.values():
addlualib(lib)
conf()
|
[
"wj19840501@gmail.com"
] |
wj19840501@gmail.com
|
|
5827f9a7ac8ce8e9972718a5940bf0cbb01b5c4c
|
d10550752b30517c6ffcb21a2a9c69b5caac583a
|
/socketprogramming/server.py
|
29f747626f00a896ea9ebc2c657736dcdc09567d
|
[] |
no_license
|
AryaAtharva/pythonprojects
|
77931c3ca20bc051d3bda16f0e24d25bb95f5cc6
|
40bbe3e848c9922717efb2af38fe65580c165622
|
refs/heads/master
| 2021-04-05T12:46:34.779897
| 2020-06-07T19:22:14
| 2020-06-07T19:22:14
| 248,558,416
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,990
|
py
|
import socket
import threading
import hashlib
class BankAccount :
def __init__(self, username, password, balance, id):
ghj = hashlib.sha256(username.encode())
self.roger = ghj.hexdigest()
self.username = username
result = hashlib.sha256(password.encode())
self.id = id
self.password = result.hexdigest()
self.balance = balance
self.status = False
def returnbalance(self):
return self.balance
# creating some sample accounts for bank to bank transfer
acc =[BankAccount("arya","pass",1000,0) , BankAccount("atharva","pass1",100,1)]
PORT = 5050
SERVER = socket.gethostbyname(socket.gethostname())
HEADER = 64
ADDR = (SERVER, PORT)
FORMAT = 'utf-8'
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
DISCONNECT_MESSAGE = "!disconnect"
server.bind(ADDR)
def handle_client(conn, addr):
print(f"[NEW CONNECTION] {addr} connected.")
client_id = -1
connected = True
while connected:
msg_length = conn.recv(HEADER).decode(FORMAT)
if msg_length:
msg_length = int(msg_length)
msg = conn.recv(msg_length).decode(FORMAT)
arr = msg.split(' ')
if arr[0] == DISCONNECT_MESSAGE:
connected = False
client_id = -1
print(f"[{addr}] {msg}")
conn.send("Disconneted".encode(FORMAT))
elif arr[0]== "login":
for i in acc :
if (arr[1]==i.roger and arr[2]==i.password) :
i.status =True
client_id = i.id
# conn.send("LOGIN SUCCESSFULL".encode(FORMAT))
print(f"[{addr}] {msg}")
break
if(acc[client_id].status == True):
conn.send("...LOGIN SUCCESSFULL...".encode(FORMAT))
else:
conn.send("INVALID USERNAME/PASSWORD".encode(FORMAT))
elif arr[0] == "accbalance":
if acc[client_id].status == True:
bal = "YOUR ACCOUNT BALANCE IS : RS"+str(acc[client_id].returnbalance())
conn.send(bal.encode(FORMAT))
else:
conn.send("[UNAUTHORIZED REQUEST : PLEASE LOGIN]".encode(FORMAT))
elif arr[0] == "transfer" :
if acc[client_id].status == True :
totransfer = -1
flag = False
for i in acc :
if (arr[1]==i.roger) :
totransfer =i.id
flag = True
print(f"[{addr}] {msg}")
break
if flag:
if int(arr[2]) > acc[client_id].balance:
conn.send("[INSUFFICIENT FUNDS]".encode(FORMAT))
else:
acc[client_id].balance = acc[client_id].balance - int(arr[2])
acc[totransfer].balance = acc[totransfer].balance + int(arr[2])
conn.send("[TRANSFER SUCCESSFUL]".encode(FORMAT))
else:
conn.send("[ACCOUNT NOT FOUND]".encode(FORMAT))
else:
conn.send("[UNAUTHORIZED REQUEST : PLEASE LOGIN]".encode(FORMAT))
else :
conn.send("[INVALID REQUEST]".encode(FORMAT))
conn.close()
def start():
server.listen()
print(f"[LISTENING] server listening on {SERVER}")
while True:
conn, addr = server.accept()
thread = threading.Thread(target=handle_client , args=(conn, addr))
thread.start()
print(f"[ACTIVE CONNECTIONS] {threading.activeCount()-1}")
print("[STARTING] server is starting")
start()
|
[
"noreply@github.com"
] |
AryaAtharva.noreply@github.com
|
dc21d156d5ad604a938d98723ddc0d86c9010747
|
986ca36fc5e6ab0e30faa0edb93103553abc66ff
|
/screenshot_cmd.py
|
51ddaf541a272501b44cb87ed6cdc4651a6ad012
|
[] |
no_license
|
tsukudamayo/python
|
159afdb736bc5fd1a13f28f3e03fee15053613bf
|
764e5a5054af7f4cfc95da34f5425cc899b8a390
|
refs/heads/master
| 2022-12-03T03:17:24.708704
| 2018-06-08T13:37:45
| 2018-06-08T13:37:45
| 112,599,153
| 0
| 1
| null | 2022-11-18T06:26:37
| 2017-11-30T10:38:58
|
Python
|
UTF-8
|
Python
| false
| false
| 580
|
py
|
from PIL import ImageGrab
import sys
from datetime import datetime as dt
import time
def loop():
now = dt.now()
f_name = now.strftime('%Y%m%d_%H%M%S')
for i in range(int(sys.argv[1])):
im = ImageGrab.grab((int(sys.argv[3]),
int(sys.argv[4]),
int(sys.argv[5]),
int(sys.argv[6])))
im.save('C:/Users/lx15120118/Pictures/' + str(f_name) + '_' + str('{0:04d}'.format(i)) + '.png')
time.sleep(float(sys.argv[2]))
if __name__ == '__main__':
loop()
|
[
"tsukudamayo@gmail.com"
] |
tsukudamayo@gmail.com
|
1613dec4a43b0208f01872c4ea78f3b5dc9c0cc2
|
6bdad555fd073e8b1c73e373782249c364a0b5bd
|
/expenses/constants.py
|
3e5b2e5173211dd013a87efacbd2c9c3af3a38fc
|
[] |
no_license
|
h4k1m0u/gistutorials
|
35383f5e1197553c528bc75405094118815e72fd
|
795da31428a469becb948deefe57c725116266be
|
refs/heads/master
| 2023-01-05T20:38:39.552302
| 2019-04-29T01:03:34
| 2019-04-29T01:03:34
| 309,973,786
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 246
|
py
|
months = {
'1': 'January',
'2': 'February',
'3': 'March',
'4': 'April',
'5': 'May',
'6': 'June',
'7': 'July',
'8': 'August',
'9': 'September',
'10': 'October',
'11': 'November',
'12': 'December',
}
|
[
"h.benoudjit@gmail.com"
] |
h.benoudjit@gmail.com
|
aab74d67d1a3e78ff9d734e0eac16b5412b9da91
|
c7a86a12ff51ba86c758214559ffa1979b140237
|
/1. Python - Basics/OOPS/overlaod_duck_typing.py
|
f1150aaa68824acc656cbf8c0cae6f6c3aaebf01
|
[] |
no_license
|
pankaj890/Python
|
a2086b64ee7a138afd4a42778cde6cf2aaacbd14
|
7bcb9ebb1f986b75f96dd0c9f04948ba3fc0ddba
|
refs/heads/master
| 2021-06-30T10:51:01.109003
| 2020-05-05T12:26:06
| 2020-05-05T12:26:06
| 201,756,541
| 0
| 0
| null | 2019-10-25T08:50:00
| 2019-08-11T11:34:22
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 524
|
py
|
class Pycharm:
def execute(self):
print('compiling')
print('running')
class MyEditor:
def execute(self):
print('Spell Checking')
print('Naming Convention Checking')
print('compiling')
print('running')
# Here we have the same method name and we are not concern about its class
# execute() method is in Pycharm and MyEditor
class Laptop:
def code(self, ide):
ide.execute()
ide = Pycharm()
lap1 = Laptop()
lap1.code(ide)
|
[
"noreply@github.com"
] |
pankaj890.noreply@github.com
|
2c1b8296f5014ddc48703e61c8d0a2e8c0417864
|
5c267dc9493d3452e4cf2c8c31977b870663d651
|
/MiddlewareApi/DocumentMiddleware/DocumentMiddleware/urls.py
|
b8ab73ceb24f6cf8404095d5d63bb61af3f3c66f
|
[] |
no_license
|
gitCORAPE/covid
|
52edef8f365ff211ba2d38e1e8cf3cc68448de82
|
c8962a65111c9582c2319278ef46856a09c64844
|
refs/heads/master
| 2022-12-11T01:21:19.512164
| 2020-09-15T21:11:53
| 2020-09-15T21:11:53
| 295,849,982
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('api/', include('api.urls')),
]
|
[
"enriquezer91@gmail.com"
] |
enriquezer91@gmail.com
|
59653f12cf4af86cdd76fdce685e1657a6b655f7
|
7ba36ae8005834b53901919e0b262d809207b0f7
|
/ilisa/__init__.py
|
0ccfe5cfd3f5ac18a10ff50fe595fec0b4db452b
|
[
"ISC"
] |
permissive
|
David-McKenna/iLiSA
|
f63834c3933a86fbf43063f42d80a0ed52cfd224
|
4cbd1d50933b6e0a4e99894c24b70b101c5d77b1
|
refs/heads/master
| 2021-06-14T10:35:13.400533
| 2021-03-03T16:29:44
| 2021-03-03T16:29:44
| 153,136,624
| 0
| 0
| null | 2018-10-15T15:22:52
| 2018-10-15T15:22:52
| null |
UTF-8
|
Python
| false
| false
| 21
|
py
|
__version__ = '3.18'
|
[
"tobia@chalmers.se"
] |
tobia@chalmers.se
|
a150ba71f03557fe2645e4d0bcf5046354bb37ce
|
4f4a73f7368df75f8ffc248e90e8b7994ba45645
|
/Lesson_5/shop_storage/shop_storage/wsgi.py
|
99ee49e74730c2555f3d5ee066b8cc5f9ab912ff
|
[] |
no_license
|
ss2576/Interview
|
11908b3dbc565603be909b41839fa05250d9e77b
|
39dca268d571cc02fb36d5d911cece4fab89d510
|
refs/heads/master
| 2023-02-13T07:21:55.844662
| 2021-01-18T09:49:52
| 2021-01-18T09:49:52
| 321,565,464
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 400
|
py
|
"""
WSGI config for shop_storage project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'shop_storage.settings')
application = get_wsgi_application()
|
[
"ss2576@mail.ru"
] |
ss2576@mail.ru
|
d9e3d680e965cbc487b7c0c7393e7763cbd448f0
|
b4179b0f5294e27f7431743ee14af35379098d30
|
/Homework/hw01/hw01.py
|
7dc94c76a39a37a5b02681585f04303e9005fe98
|
[] |
no_license
|
Clairezhang29/cs61a
|
e0744cd609c861caa21262f7ecd024cc3ca23683
|
0b3e5b4d1672e682ef4b29a6e38f6505cc295607
|
refs/heads/master
| 2022-02-25T11:14:34.045216
| 2019-10-08T14:40:03
| 2019-10-08T14:40:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,716
|
py
|
""" Homework 1: Control """
from operator import add, sub
def a_plus_abs_b(a, b):
"""Return a+abs(b), but without calling abs.
>>> a_plus_abs_b(2, 3)
5
>>> a_plus_abs_b(2, -3)
5
"""
if b < 0:
f = sub
else:
f = add
return f(a, b)
def two_of_three(a, b, c):
"""Return x*x + y*y, where x and y are the two largest members of the
positive numbers a, b, and c.
>>> two_of_three(1, 2, 3)
13
>>> two_of_three(5, 3, 1)
34
>>> two_of_three(10, 2, 8)
164
>>> two_of_three(5, 5, 5)
50
"""
return (lambda max_num, min_num: (lambda middle_num: max_num * max_num + middle_num * middle_num)(a ^ b ^ c ^ max_num ^ min_num))(max(a, b, c), min(a, b, c))
def largest_factor(n):
"""Return the largest factor of n that is smaller than n.
>>> largest_factor(15) # factors are 1, 3, 5
5
>>> largest_factor(80) # factors are 1, 2, 4, 5, 8, 10, 16, 20, 40
40
>>> largest_factor(13) # factor is 1 since 13 is prime
1
"""
import math
return next((n // i for i in range(2, math.ceil(math.sqrt(n)) + 1) if n % i == 0), 1)
def if_function(condition, true_result, false_result):
"""Return true_result if condition is a true value, and
false_result otherwise.
>>> if_function(True, 2, 3)
2
>>> if_function(False, 2, 3)
3
>>> if_function(3==2, 3+2, 3-2)
1
>>> if_function(3>2, 3+2, 3-2)
5
"""
if condition:
return true_result
else:
return false_result
def with_if_statement():
"""
>>> result = with_if_statement()
2
>>> print(result)
None
"""
if c():
return t()
else:
return f()
def with_if_function():
"""
>>> result = with_if_function()
1
2
>>> print(result)
None
"""
return if_function(c(), t(), f())
def c():
return False
def t():
print(1)
def f():
print(2)
def hailstone(n):
"""Print the hailstone sequence starting at n and return its
length.
>>> a = hailstone(10)
10
5
16
8
4
2
1
>>> a
7
"""
from itertools import accumulate, repeat, takewhile
return sum(1 for _ in accumulate(repeat(n), lambda x, _: print(x) or (next(iter(())) if x == 1 else x // 2 if x % 2 == 0 else x * 3 + 1)))
"""The explaination:
gen_next_value = lambda x: x // 2 if x % 2 == 0 else x * 3 + 1
throw_stop_iter = next(iter(()))
construct_generator_with_print = lambda x, _: print(x) or (throw_stop_iter if x == 1 else gen_next_value(x))
all_numbers = accumulate(repeat(n), construct_generator_with_print)
length = sum(1 for _ in all_numbers)
return length
"""
|
[
"32255369+chromezh@users.noreply.github.com"
] |
32255369+chromezh@users.noreply.github.com
|
1282a604cee25466eb64443709a3037bd42b19fc
|
7be145ad70fa03c63ac348849532a19fc2285720
|
/webcdi/cdi_forms/migrations/0073_merge_20200610_0733.py
|
50c40e5d34048605a1a20a1d2ee68abb2b1b9541
|
[] |
no_license
|
langcog/web-cdi
|
8834ae260e7b6d41411bafbfaa80519fd88a9e4d
|
27a1bfb342c0e533ba99d197db3b47dc4443b170
|
refs/heads/master
| 2023-08-30T07:39:50.557502
| 2023-07-31T05:59:16
| 2023-07-31T05:59:16
| 41,515,374
| 9
| 6
| null | 2023-08-28T17:29:04
| 2015-08-27T22:56:31
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 281
|
py
|
# Generated by Django 2.2.13 on 2020-06-10 07:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cdi_forms', '0072_auto_20200506_1029'),
('cdi_forms', '0071_choices_choice_set_ko'),
]
operations = [
]
|
[
"noreply@github.com"
] |
langcog.noreply@github.com
|
f69d2e9b412c95845470cfd44659f888e5c4bb03
|
b5d6219ac738ed05485439540f38d63d21694c51
|
/DAT/ED6_DT01/T0045.调试地图.py
|
20ebbe0391d9c0169a1719461f0b7da449694e37
|
[] |
no_license
|
otoboku/ED6-FC-Steam-CN
|
f87ffb2ff19f9272b986fa32a91bec360c21dffa
|
c40d9bc5aaea9446dda27e7b94470d91cb5558c5
|
refs/heads/master
| 2021-01-21T02:37:30.443986
| 2015-11-27T07:41:41
| 2015-11-27T07:41:41
| 46,975,651
| 1
| 0
| null | 2015-11-27T10:58:43
| 2015-11-27T10:58:42
| null |
UTF-8
|
Python
| false
| false
| 31,403
|
py
|
from ED6ScenarioHelper import *
def main():
# 调试地图
CreateScenaFile(
FileName = 'T0045 ._SN',
MapName = 'map1',
Location = 'T0030.x',
MapIndex = 1,
MapDefaultBGM = "ed60010",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'10150待机', # 9
'10151移动', # 10
'10152攻击', # 11
'10153挨打', # 12
'10154倒下', # 13
'10160待机', # 14
'10161移动', # 15
'10162攻击', # 16
'10163挨打', # 17
'10164倒下', # 18
'10170待机', # 19
'10171移动', # 20
'10172攻击', # 21
'10173挨打', # 22
'10174倒下', # 23
'10180待机', # 24
'10181移动', # 25
'10182攻击', # 26
'10183挨打', # 27
'10184倒下', # 28
'10190待机', # 29
'10191移动', # 30
'10192攻击', # 31
'10193挨打', # 32
'10194倒下', # 33
'10200待机', # 34
'10201移动', # 35
'10202攻击', # 36
'10203挨打', # 37
'10204倒下', # 38
'10210待机', # 39
'10211移动', # 40
'10212攻击', # 41
'10213挨打', # 42
'10214倒下', # 43
'10151', # 44
'10151', # 45
'10161', # 46
'10161', # 47
'10171', # 48
'10171', # 49
'10181', # 50
'10181', # 51
'10191', # 52
'10191', # 53
'10201', # 54
'10201', # 55
'10211', # 56
'10211', # 57
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 0,
Unknown_0C = 4,
Unknown_0E = 5,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 315,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT09/CH10150 ._CH', # 00
'ED6_DT09/CH10151 ._CH', # 01
'ED6_DT09/CH10152 ._CH', # 02
'ED6_DT09/CH10153 ._CH', # 03
'ED6_DT09/CH10154 ._CH', # 04
'ED6_DT09/CH10160 ._CH', # 05
'ED6_DT09/CH10161 ._CH', # 06
'ED6_DT09/CH10162 ._CH', # 07
'ED6_DT09/CH10163 ._CH', # 08
'ED6_DT09/CH10164 ._CH', # 09
'ED6_DT09/CH10170 ._CH', # 0A
'ED6_DT09/CH10171 ._CH', # 0B
'ED6_DT09/CH10172 ._CH', # 0C
'ED6_DT09/CH10173 ._CH', # 0D
'ED6_DT09/CH10174 ._CH', # 0E
'ED6_DT09/CH10180 ._CH', # 0F
'ED6_DT09/CH10181 ._CH', # 10
'ED6_DT09/CH10182 ._CH', # 11
'ED6_DT09/CH10183 ._CH', # 12
'ED6_DT09/CH10184 ._CH', # 13
'ED6_DT09/CH10190 ._CH', # 14
'ED6_DT09/CH10191 ._CH', # 15
'ED6_DT09/CH10192 ._CH', # 16
'ED6_DT09/CH10193 ._CH', # 17
'ED6_DT09/CH10194 ._CH', # 18
'ED6_DT09/CH10200 ._CH', # 19
'ED6_DT09/CH10201 ._CH', # 1A
'ED6_DT09/CH10202 ._CH', # 1B
'ED6_DT09/CH10203 ._CH', # 1C
'ED6_DT09/CH10204 ._CH', # 1D
'ED6_DT09/CH10210 ._CH', # 1E
'ED6_DT09/CH10211 ._CH', # 1F
'ED6_DT09/CH10212 ._CH', # 20
'ED6_DT09/CH10213 ._CH', # 21
'ED6_DT09/CH10214 ._CH', # 22
)
AddCharChipPat(
'ED6_DT09/CH10150P._CP', # 00
'ED6_DT09/CH10151P._CP', # 01
'ED6_DT09/CH10152P._CP', # 02
'ED6_DT09/CH10153P._CP', # 03
'ED6_DT09/CH10154P._CP', # 04
'ED6_DT09/CH10160P._CP', # 05
'ED6_DT09/CH10161P._CP', # 06
'ED6_DT09/CH10162P._CP', # 07
'ED6_DT09/CH10163P._CP', # 08
'ED6_DT09/CH10164P._CP', # 09
'ED6_DT09/CH10170P._CP', # 0A
'ED6_DT09/CH10171P._CP', # 0B
'ED6_DT09/CH10172P._CP', # 0C
'ED6_DT09/CH10173P._CP', # 0D
'ED6_DT09/CH10174P._CP', # 0E
'ED6_DT09/CH10180P._CP', # 0F
'ED6_DT09/CH10181P._CP', # 10
'ED6_DT09/CH10182P._CP', # 11
'ED6_DT09/CH10183P._CP', # 12
'ED6_DT09/CH10184P._CP', # 13
'ED6_DT09/CH10190P._CP', # 14
'ED6_DT09/CH10191P._CP', # 15
'ED6_DT09/CH10192P._CP', # 16
'ED6_DT09/CH10193P._CP', # 17
'ED6_DT09/CH10194P._CP', # 18
'ED6_DT09/CH10200P._CP', # 19
'ED6_DT09/CH10201P._CP', # 1A
'ED6_DT09/CH10202P._CP', # 1B
'ED6_DT09/CH10203P._CP', # 1C
'ED6_DT09/CH10204P._CP', # 1D
'ED6_DT09/CH10210P._CP', # 1E
'ED6_DT09/CH10211P._CP', # 1F
'ED6_DT09/CH10212P._CP', # 20
'ED6_DT09/CH10213P._CP', # 21
'ED6_DT09/CH10214P._CP', # 22
)
DeclNpc(
X = 4000,
Z = 0,
Y = 2000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x100,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 6000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x100,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 10000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x2,
NpcIndex = 0x100,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 14000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 3,
ChipIndex = 0x3,
NpcIndex = 0x100,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 18000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 4,
ChipIndex = 0x4,
NpcIndex = 0x100,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 2000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 5,
ChipIndex = 0x5,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 6000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 10000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 7,
ChipIndex = 0x7,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 14000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 8,
ChipIndex = 0x8,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 18000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 2000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 10,
ChipIndex = 0xA,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 6000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 11,
ChipIndex = 0xB,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 10000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 12,
ChipIndex = 0xC,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 14000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 13,
ChipIndex = 0xD,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 18000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 14,
ChipIndex = 0xE,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 2000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 15,
ChipIndex = 0xF,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 6000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 10000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 17,
ChipIndex = 0x11,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 14000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 18,
ChipIndex = 0x12,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 18000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 19,
ChipIndex = 0x13,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 20000,
Z = 0,
Y = 2000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 20,
ChipIndex = 0x14,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 20000,
Z = 0,
Y = 6000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 21,
ChipIndex = 0x15,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 20000,
Z = 0,
Y = 10000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 22,
ChipIndex = 0x16,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 20000,
Z = 0,
Y = 14000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 23,
ChipIndex = 0x17,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 20000,
Z = 0,
Y = 18000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 24,
ChipIndex = 0x18,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 24000,
Z = 0,
Y = 2000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 25,
ChipIndex = 0x19,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 24000,
Z = 0,
Y = 6000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 26,
ChipIndex = 0x1A,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 24000,
Z = 0,
Y = 10000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 27,
ChipIndex = 0x1B,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 24000,
Z = 0,
Y = 14000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 28,
ChipIndex = 0x1C,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 24000,
Z = 0,
Y = 18000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 29,
ChipIndex = 0x1D,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 28000,
Z = 0,
Y = 2000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 30,
ChipIndex = 0x1E,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 28000,
Z = 0,
Y = 6000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 31,
ChipIndex = 0x1F,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 28000,
Z = 0,
Y = 10000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 32,
ChipIndex = 0x20,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 28000,
Z = 0,
Y = 14000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 33,
ChipIndex = 0x21,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 28000,
Z = 0,
Y = 18000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 34,
ChipIndex = 0x22,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 21000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 21000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 21000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 11,
ChipIndex = 0xB,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 11,
ChipIndex = 0xB,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 20000,
Z = 0,
Y = 21000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 20000,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 28000,
Z = 0,
Y = 21000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 21,
ChipIndex = 0x15,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 28000,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 21,
ChipIndex = 0x15,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 32000,
Z = 0,
Y = 21000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 26,
ChipIndex = 0x1A,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 32000,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 26,
ChipIndex = 0x1A,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 36000,
Z = 0,
Y = 21000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 31,
ChipIndex = 0x1F,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 36000,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 31,
ChipIndex = 0x1F,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
ScpFunction(
"Function_0_7E2", # 00, 0
"Function_1_7E3", # 01, 1
"Function_2_7E4", # 02, 2
"Function_3_7FA", # 03, 3
"Function_4_810", # 04, 4
"Function_5_834", # 05, 5
)
def Function_0_7E2(): pass
label("Function_0_7E2")
Return()
# Function_0_7E2 end
def Function_1_7E3(): pass
label("Function_1_7E3")
Return()
# Function_1_7E3 end
def Function_2_7E4(): pass
label("Function_2_7E4")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_7F9")
OP_99(0xFE, 0x0, 0x7, 0x5DC)
Jump("Function_2_7E4")
label("loc_7F9")
Return()
# Function_2_7E4 end
def Function_3_7FA(): pass
label("Function_3_7FA")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_80F")
OP_99(0xFE, 0x0, 0x7, 0x578)
Jump("Function_3_7FA")
label("loc_80F")
Return()
# Function_3_7FA end
def Function_4_810(): pass
label("Function_4_810")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_833")
OP_8D(0xFE, 4000, 20000, 24000, 30000, 1500)
Jump("Function_4_810")
label("loc_833")
Return()
# Function_4_810 end
def Function_5_834(): pass
label("Function_5_834")
TalkBegin(0xFE)
ChrTalk(
0xFE,
"喝~\x02",
)
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_5_834 end
SaveToFile()
Try(main)
|
[
"Hiromi.Kaede@gmail.com"
] |
Hiromi.Kaede@gmail.com
|
1e56394528485801c15afd6e6f1eaee270a50790
|
64d7b6f613234cc69af0b1821e531fca44d65ddb
|
/threads_concurrent/13-ex_real.py
|
708559731c5b548f7e26a2924bf96f7538d8ff40
|
[] |
no_license
|
jccramos/My-first-repository
|
1442d8f9c54ac9f6e65f570756e80fbc9ddcab9c
|
b97cec73dfc375849633bccee490b0962b5851a8
|
refs/heads/master
| 2022-11-09T21:17:51.497840
| 2020-07-02T19:33:45
| 2020-07-02T19:33:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,076
|
py
|
#Tentaremos melhorar o tempo de execução do código no arquivo 12
import requests
import time
import concurrent.futures
img_urls = [
'https://images.unsplash.com/photo-1516117172878-fd2c41f4a759',
'https://images.unsplash.com/photo-1532009324734-20a7a5813719',
'https://images.unsplash.com/photo-1524429656589-6633a470097c',
'https://images.unsplash.com/photo-1530224264768-7ff8c1789d79',
'https://images.unsplash.com/photo-1564135624576-c5c88640f235',
'https://images.unsplash.com/photo-1541698444083-023c97d3f4b6',
'https://images.unsplash.com/photo-1522364723953-452d3431c267',
'https://images.unsplash.com/photo-1513938709626-033611b8cc03',
'https://images.unsplash.com/photo-1507143550189-fed454f93097',
'https://images.unsplash.com/photo-1493976040374-85c8e12f0c0e',
'https://images.unsplash.com/photo-1504198453319-5ce911bafcde',
'https://images.unsplash.com/photo-1530122037265-a5f1f91d3b99',
'https://images.unsplash.com/photo-1516972810927-80185027ca84',
'https://images.unsplash.com/photo-1550439062-609e1531270e',
'https://images.unsplash.com/photo-1549692520-acc6669e2f0c'
]
t1 = time.perf_counter()
def download_image(img_url):
img_bytes = requests.get(img_url).content
img_name = img_url.split('/')[3]
img_name = f'{img_name}.jpg'
with open(img_name, 'wb') as img_file:
img_file.write(img_bytes)
print(f'{img_name} was downloaded...')
with concurrent.futures.ThreadPoolExecutor() as executor:
executor.map(download_image, img_urls)
t2 = time.perf_counter()
print(f'Finished in {t2-t1} seconds')
#Pois bem, utilizando o método map levamos apenas 67segundos para
#fazer o download das mesmas fotos. Apesar desse método nos devolver
#Os retorno da função (que são as fotos) na ordem dos argumentos que
#passamos para função (url das fotos), isso não significa que ele
#esteja fazendo um download de cada vez.
#Esse método inicia todos os downloads, e nos devolve os retornos
#de acordo com uma ordem pré-estabelecida (que é a ordem das url's)
|
[
"ramosjulio771@gmail.com"
] |
ramosjulio771@gmail.com
|
0e659b6895f4c47c42455b63745231cf437b6471
|
4ba3f4c37a8f58fb9ca48fd6fbaa07e7267f4397
|
/main.py
|
c549feb53bfbf9eb1aa7f3a5199d005d72c38758
|
[] |
no_license
|
Geneonosis/HogwartsDiscordBot
|
d193443cc4f64db4438fb3b74dcea3463b655890
|
3963dc4205cdf3cc2393e22ae7ba7125ecd9af47
|
refs/heads/master
| 2022-04-20T19:38:07.304603
| 2020-04-24T04:38:53
| 2020-04-24T04:38:53
| 258,405,017
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 83
|
py
|
#hello you wanna write this in python
#also i don't know how to make a discord bot
|
[
"jadaebong@gmail.com"
] |
jadaebong@gmail.com
|
8ba5cb760b032b8a9e063bf44fa89047527e62a1
|
5c279b92063481363037841f5677154ed70a8738
|
/Global/util/inception.py
|
5eea739b1d6742ade30bda2ec9aabb834fca0f15
|
[
"MIT"
] |
permissive
|
ImportPaddle/Old2Life
|
8fdc97a16207fb5bfcc66e5dc93d84961a40efba
|
424a2433e9a00c7eaeb660c40d22f6168dc8f576
|
refs/heads/master
| 2023-08-26T22:37:23.985176
| 2021-11-03T03:57:11
| 2021-11-03T03:57:11
| 421,377,673
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 31,951
|
py
|
#Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import math
import paddle
import paddle.nn as nn
from paddle.nn import Conv2D, AvgPool2D, MaxPool2D, BatchNorm, Linear, AdaptiveAvgPool2D
__all__ = ['InceptionV3']
class InceptionV3(nn.Layer):
DEFAULT_BLOCK_INDEX = 3
BLOCK_INDEX_BY_DIM = {
64: 0, # First max pooling features
192: 1, # Second max pooling featurs
768: 2, # Pre-aux classifier features
2048: 3 # Final average pooling features
}
def __init__(self,
output_blocks=[DEFAULT_BLOCK_INDEX],
class_dim=1000,
aux_logits=False,
resize_input=True,
normalize_input=True):
super(InceptionV3, self).__init__()
self.resize_input = resize_input
self.normalize_input = normalize_input
self.output_blocks = sorted(output_blocks)
self.last_needed_block = max(output_blocks)
self.class_dim = class_dim
self.aux_logits = aux_logits
assert self.last_needed_block <= 3, 'Last possible output block index is 3'
self.blocks = []
self.Conv2d_1a_3x3 = ConvBNLayer(3,
32,
3,
stride=2,
name='Conv2d_1a_3x3')
self.Conv2d_2a_3x3 = ConvBNLayer(32, 32, 3, name='Conv2d_2a_3x3')
self.Conv2d_2b_3x3 = ConvBNLayer(32,
64,
3,
padding=1,
name='Conv2d_2b_3x3')
self.maxpool1 = MaxPool2D(kernel_size=3, stride=2)
block0 = [
self.Conv2d_1a_3x3, self.Conv2d_2a_3x3, self.Conv2d_2b_3x3,
self.maxpool1
]
self.blocks.append(nn.Sequential(*block0))
### block1
if self.last_needed_block >= 1:
self.Conv2d_3b_1x1 = ConvBNLayer(64, 80, 1, name='Conv2d_3b_1x1')
self.Conv2d_4a_3x3 = ConvBNLayer(80, 192, 3, name='Conv2d_4a_3x3')
self.maxpool2 = MaxPool2D(kernel_size=3, stride=2)
block1 = [self.Conv2d_3b_1x1, self.Conv2d_4a_3x3, self.maxpool2]
self.blocks.append(nn.Sequential(*block1))
### block2
### Mixed_5b 5c 5d
if self.last_needed_block >= 2:
self.Mixed_5b = Fid_inceptionA(192,
pool_features=32,
name='Mixed_5b')
self.Mixed_5c = Fid_inceptionA(256,
pool_features=64,
name='Mixed_5c')
self.Mixed_5d = Fid_inceptionA(288,
pool_features=64,
name='Mixed_5d')
### Mixed_6
self.Mixed_6a = InceptionB(288, name='Mixed_6a')
self.Mixed_6b = Fid_inceptionC(768, c7=128, name='Mixed_6b')
self.Mixed_6c = Fid_inceptionC(768, c7=160, name='Mixed_6c')
self.Mixed_6d = Fid_inceptionC(768, c7=160, name='Mixed_6d')
self.Mixed_6e = Fid_inceptionC(768, c7=192, name='Mixed_6e')
block2 = [
self.Mixed_5b, self.Mixed_5c, self.Mixed_5d, self.Mixed_6a,
self.Mixed_6b, self.Mixed_6c, self.Mixed_6d, self.Mixed_6e
]
self.blocks.append(nn.Sequential(*block2))
if self.aux_logits:
self.AuxLogits = InceptionAux(768, self.class_dim, name='AuxLogits')
### block3
### Mixed_7
if self.last_needed_block >= 3:
self.Mixed_7a = InceptionD(768, name='Mixed_7a')
self.Mixed_7b = Fid_inceptionE_1(1280, name='Mixed_7b')
self.Mixed_7c = Fid_inceptionE_2(2048, name='Mixed_7c')
self.avgpool = AdaptiveAvgPool2D(output_size=1)
block3 = [self.Mixed_7a, self.Mixed_7b, self.Mixed_7c, self.avgpool]
self.blocks.append(nn.Sequential(*block3))
def forward(self, x):
out = []
aux = None
if self.resize_input:
x = nn.functional.interpolate(x,
size=[299, 299],
mode='bilinear',
align_corners=False,
align_mode=0)
if self.normalize_input:
x = x * 2 - 1
for idx, block in enumerate(self.blocks):
x = block(x)
if self.aux_logits and (idx == 2):
aux = self.AuxLogits(x)
if idx in self.output_blocks:
out.append(x)
if idx == self.last_needed_block:
break
return out, aux
class InceptionA(nn.Layer):
def __init__(self, in_channels, pool_features, name=None):
super(InceptionA, self).__init__()
self.branch1x1 = ConvBNLayer(in_channels,
64,
1,
name=name + '.branch1x1')
self.branch5x5_1 = ConvBNLayer(in_channels,
48,
1,
name=name + '.branch5x5_1')
self.branch5x5_2 = ConvBNLayer(48,
64,
5,
padding=2,
name=name + '.branch5x5_2')
self.branch3x3dbl_1 = ConvBNLayer(in_channels,
64,
1,
name=name + '.branch3x3dbl_1')
self.branch3x3dbl_2 = ConvBNLayer(64,
96,
3,
padding=1,
name=name + '.branch3x3dbl_2')
self.branch3x3dbl_3 = ConvBNLayer(96,
96,
3,
padding=1,
name=name + '.branch3x3dbl_3')
self.branch_pool0 = AvgPool2D(kernel_size=3,
stride=1,
padding=1,
exclusive=True)
self.branch_pool = ConvBNLayer(in_channels,
pool_features,
1,
name=name + '.branch_pool')
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch5x5 = self.branch5x5_1(x)
branch5x5 = self.branch5x5_2(branch5x5)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)
branch_pool = self.branch_pool0(x)
branch_pool = self.branch_pool(branch_pool)
return paddle.concat(
[branch1x1, branch5x5, branch3x3dbl, branch_pool], axis=1)
class InceptionB(nn.Layer):
def __init__(self, in_channels, name=None):
super(InceptionB, self).__init__()
self.branch3x3 = ConvBNLayer(in_channels,
384,
3,
stride=2,
name=name + '.branch3x3')
self.branch3x3dbl_1 = ConvBNLayer(in_channels,
64,
1,
name=name + '.branch3x3dbl_1')
self.branch3x3dbl_2 = ConvBNLayer(64,
96,
3,
padding=1,
name=name + '.branch3x3dbl_2')
self.branch3x3dbl_3 = ConvBNLayer(96,
96,
3,
stride=2,
name=name + '.branch3x3dbl_3')
self.branch_pool = MaxPool2D(kernel_size=3, stride=2)
def forward(self, x):
branch3x3 = self.branch3x3(x)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)
branch_pool = self.branch_pool(x)
return paddle.concat([branch3x3, branch3x3dbl, branch_pool],
axis=1)
class InceptionC(nn.Layer):
def __init__(self, in_channels, c7, name=None):
super(InceptionC, self).__init__()
self.branch1x1 = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch1x1')
self.branch7x7_1 = ConvBNLayer(in_channels,
c7,
1,
name=name + '.branch7x7_1')
self.branch7x7_2 = ConvBNLayer(c7,
c7, (1, 7),
padding=(0, 3),
name=name + '.branch7x7_2')
self.branch7x7_3 = ConvBNLayer(c7,
192, (7, 1),
padding=(3, 0),
name=name + '.branch7x7_3')
self.branch7x7dbl_1 = ConvBNLayer(in_channels,
c7,
1,
name=name + '.branch7x7dbl_1')
self.branch7x7dbl_2 = ConvBNLayer(c7,
c7, (7, 1),
padding=(3, 0),
name=name + '.branch7x7dbl_2')
self.branch7x7dbl_3 = ConvBNLayer(c7,
c7, (1, 7),
padding=(0, 3),
name=name + '.branch7x7dbl_3')
self.branch7x7dbl_4 = ConvBNLayer(c7,
c7, (7, 1),
padding=(3, 0),
name=name + '.branch7x7dbl_4')
self.branch7x7dbl_5 = ConvBNLayer(c7,
192, (1, 7),
padding=(0, 3),
name=name + '.branch7x7dbl_5')
self.branch_pool0 = AvgPool2D(kernel_size=3,
stride=1,
padding=1,
exclusive=True)
self.branch_pool = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch_pool')
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch7x7 = self.branch7x7_1(x)
branch7x7 = self.branch7x7_2(branch7x7)
branch7x7 = self.branch7x7_3(branch7x7)
branch7x7dbl = self.branch7x7dbl_1(x)
branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)
branch_pool = self.branch_pool0(x)
branch_pool = self.branch_pool(branch_pool)
return paddle.concat(
[branch1x1, branch7x7, branch7x7dbl, branch_pool], axis=1)
class InceptionD(nn.Layer):
def __init__(self, in_channels, name=None):
super(InceptionD, self).__init__()
self.branch3x3_1 = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch3x3_1')
self.branch3x3_2 = ConvBNLayer(192,
320,
3,
stride=2,
name=name + '.branch3x3_2')
self.branch7x7x3_1 = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch7x7x3_1')
self.branch7x7x3_2 = ConvBNLayer(192,
192, (1, 7),
padding=(0, 3),
name=name + '.branch7x7x3_2')
self.branch7x7x3_3 = ConvBNLayer(192,
192, (7, 1),
padding=(3, 0),
name=name + '.branch7x7x3_3')
self.branch7x7x3_4 = ConvBNLayer(192,
192,
3,
stride=2,
name=name + '.branch7x7x3_4')
self.branch_pool = MaxPool2D(kernel_size=3, stride=2)
def forward(self, x):
branch3x3 = self.branch3x3_1(x)
branch3x3 = self.branch3x3_2(branch3x3)
branch7x7x3 = self.branch7x7x3_1(x)
branch7x7x3 = self.branch7x7x3_2(branch7x7x3)
branch7x7x3 = self.branch7x7x3_3(branch7x7x3)
branch7x7x3 = self.branch7x7x3_4(branch7x7x3)
branch_pool = self.branch_pool(x)
return paddle.concat([branch3x3, branch7x7x3, branch_pool],
axis=1)
class InceptionE(nn.Layer):
def __init__(self, in_channels, name=None):
super(InceptionE, self).__init__()
self.branch1x1 = ConvBNLayer(in_channels,
320,
1,
name=name + '.branch1x1')
self.branch3x3_1 = ConvBNLayer(in_channels,
384,
1,
name=name + '.branch3x3_1')
self.branch3x3_2a = ConvBNLayer(384,
384, (1, 3),
padding=(0, 1),
name=name + '.branch3x3_2a')
self.branch3x3_2b = ConvBNLayer(384,
384, (3, 1),
padding=(1, 0),
name=name + '.branch3x3_2b')
self.branch3x3dbl_1 = ConvBNLayer(in_channels,
448,
1,
name=name + '.branch3x3dbl_1')
self.branch3x3dbl_2 = ConvBNLayer(448,
384,
3,
padding=1,
name=name + '.branch3x3dbl_2')
self.branch3x3dbl_3a = ConvBNLayer(384,
384, (1, 3),
padding=(0, 1),
name=name + '.branch3x3dbl_3a')
self.branch3x3dbl_3b = ConvBNLayer(384,
384, (3, 1),
padding=(1, 0),
name=name + '.branch3x3dbl_3b')
self.branch_pool0 = AvgPool2D(kernel_size=3,
stride=1,
padding=1,
exclusive=True)
self.branch_pool = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch_pool')
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch3x3_1 = self.branch3x3_1(x)
branch3x3_2a = self.branch3x3_2a(branch3x3_1)
branch3x3_2b = self.branch3x3_2b(branch3x3_1)
branch3x3 = paddle.concat([branch3x3_2a, branch3x3_2b], axis=1)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl_3a = self.branch3x3dbl_3a(branch3x3dbl)
branch3x3dbl_3b = self.branch3x3dbl_3b(branch3x3dbl)
branch3x3dbl = paddle.concat([branch3x3dbl_3a, branch3x3dbl_3b],
axis=1)
branch_pool = self.branch_pool0(x)
branch_pool = self.branch_pool(branch_pool)
return paddle.concat(
[branch1x1, branch3x3, branch3x3dbl, branch_pool], axis=1)
class InceptionAux(nn.Layer):
def __init__(self, in_channels, num_classes, name=None):
super(InceptionAux, self).__init__()
self.num_classes = num_classes
self.pool0 = AvgPool2D(kernel_size=5, stride=3)
self.conv0 = ConvBNLayer(in_channels, 128, 1, name=name + '.conv0')
self.conv1 = ConvBNLayer(128, 768, 5, name=name + '.conv1')
self.pool1 = AvgPool2D(global_pooling=True)
def forward(self, x):
x = self.pool0(x)
x = self.conv0(x)
x = self.conv1(x)
x = self.pool1(x)
x = paddle.flatten(x, axis=1)
x = paddle.static.nn.fc(x, size=self.num_classes)
return x
class Fid_inceptionA(nn.Layer):
""" FID block in inception v3
"""
def __init__(self, in_channels, pool_features, name=None):
super(Fid_inceptionA, self).__init__()
self.branch1x1 = ConvBNLayer(in_channels,
64,
1,
name=name + '.branch1x1')
self.branch5x5_1 = ConvBNLayer(in_channels,
48,
1,
name=name + '.branch5x5_1')
self.branch5x5_2 = ConvBNLayer(48,
64,
5,
padding=2,
name=name + '.branch5x5_2')
self.branch3x3dbl_1 = ConvBNLayer(in_channels,
64,
1,
name=name + '.branch3x3dbl_1')
self.branch3x3dbl_2 = ConvBNLayer(64,
96,
3,
padding=1,
name=name + '.branch3x3dbl_2')
self.branch3x3dbl_3 = ConvBNLayer(96,
96,
3,
padding=1,
name=name + '.branch3x3dbl_3')
self.branch_pool0 = AvgPool2D(kernel_size=3,
stride=1,
padding=1,
exclusive=True)
self.branch_pool = ConvBNLayer(in_channels,
pool_features,
1,
name=name + '.branch_pool')
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch5x5 = self.branch5x5_1(x)
branch5x5 = self.branch5x5_2(branch5x5)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)
branch_pool = self.branch_pool0(x)
branch_pool = self.branch_pool(branch_pool)
return paddle.concat(
[branch1x1, branch5x5, branch3x3dbl, branch_pool], axis=1)
class Fid_inceptionC(nn.Layer):
""" FID block in inception v3
"""
def __init__(self, in_channels, c7, name=None):
super(Fid_inceptionC, self).__init__()
self.branch1x1 = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch1x1')
self.branch7x7_1 = ConvBNLayer(in_channels,
c7,
1,
name=name + '.branch7x7_1')
self.branch7x7_2 = ConvBNLayer(c7,
c7, (1, 7),
padding=(0, 3),
name=name + '.branch7x7_2')
self.branch7x7_3 = ConvBNLayer(c7,
192, (7, 1),
padding=(3, 0),
name=name + '.branch7x7_3')
self.branch7x7dbl_1 = ConvBNLayer(in_channels,
c7,
1,
name=name + '.branch7x7dbl_1')
self.branch7x7dbl_2 = ConvBNLayer(c7,
c7, (7, 1),
padding=(3, 0),
name=name + '.branch7x7dbl_2')
self.branch7x7dbl_3 = ConvBNLayer(c7,
c7, (1, 7),
padding=(0, 3),
name=name + '.branch7x7dbl_3')
self.branch7x7dbl_4 = ConvBNLayer(c7,
c7, (7, 1),
padding=(3, 0),
name=name + '.branch7x7dbl_4')
self.branch7x7dbl_5 = ConvBNLayer(c7,
192, (1, 7),
padding=(0, 3),
name=name + '.branch7x7dbl_5')
self.branch_pool0 = AvgPool2D(kernel_size=3,
stride=1,
padding=1,
exclusive=True)
self.branch_pool = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch_pool')
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch7x7 = self.branch7x7_1(x)
branch7x7 = self.branch7x7_2(branch7x7)
branch7x7 = self.branch7x7_3(branch7x7)
branch7x7dbl = self.branch7x7dbl_1(x)
branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)
branch_pool = self.branch_pool0(x)
branch_pool = self.branch_pool(branch_pool)
return paddle.concat(
[branch1x1, branch7x7, branch7x7dbl, branch_pool], axis=1)
class Fid_inceptionE_1(nn.Layer):
""" FID block in inception v3
"""
def __init__(self, in_channels, name=None):
super(Fid_inceptionE_1, self).__init__()
self.branch1x1 = ConvBNLayer(in_channels,
320,
1,
name=name + '.branch1x1')
self.branch3x3_1 = ConvBNLayer(in_channels,
384,
1,
name=name + '.branch3x3_1')
self.branch3x3_2a = ConvBNLayer(384,
384, (1, 3),
padding=(0, 1),
name=name + '.branch3x3_2a')
self.branch3x3_2b = ConvBNLayer(384,
384, (3, 1),
padding=(1, 0),
name=name + '.branch3x3_2b')
self.branch3x3dbl_1 = ConvBNLayer(in_channels,
448,
1,
name=name + '.branch3x3dbl_1')
self.branch3x3dbl_2 = ConvBNLayer(448,
384,
3,
padding=1,
name=name + '.branch3x3dbl_2')
self.branch3x3dbl_3a = ConvBNLayer(384,
384, (1, 3),
padding=(0, 1),
name=name + '.branch3x3dbl_3a')
self.branch3x3dbl_3b = ConvBNLayer(384,
384, (3, 1),
padding=(1, 0),
name=name + '.branch3x3dbl_3b')
self.branch_pool0 = AvgPool2D(kernel_size=3,
stride=1,
padding=1,
exclusive=True)
self.branch_pool = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch_pool')
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch3x3_1 = self.branch3x3_1(x)
branch3x3_2a = self.branch3x3_2a(branch3x3_1)
branch3x3_2b = self.branch3x3_2b(branch3x3_1)
branch3x3 = paddle.concat([branch3x3_2a, branch3x3_2b], axis=1)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl_3a = self.branch3x3dbl_3a(branch3x3dbl)
branch3x3dbl_3b = self.branch3x3dbl_3b(branch3x3dbl)
branch3x3dbl = paddle.concat([branch3x3dbl_3a, branch3x3dbl_3b],
axis=1)
branch_pool = self.branch_pool0(x)
branch_pool = self.branch_pool(branch_pool)
return paddle.concat(
[branch1x1, branch3x3, branch3x3dbl, branch_pool], axis=1)
class Fid_inceptionE_2(nn.Layer):
""" FID block in inception v3
"""
def __init__(self, in_channels, name=None):
super(Fid_inceptionE_2, self).__init__()
self.branch1x1 = ConvBNLayer(in_channels,
320,
1,
name=name + '.branch1x1')
self.branch3x3_1 = ConvBNLayer(in_channels,
384,
1,
name=name + '.branch3x3_1')
self.branch3x3_2a = ConvBNLayer(384,
384, (1, 3),
padding=(0, 1),
name=name + '.branch3x3_2a')
self.branch3x3_2b = ConvBNLayer(384,
384, (3, 1),
padding=(1, 0),
name=name + '.branch3x3_2b')
self.branch3x3dbl_1 = ConvBNLayer(in_channels,
448,
1,
name=name + '.branch3x3dbl_1')
self.branch3x3dbl_2 = ConvBNLayer(448,
384,
3,
padding=1,
name=name + '.branch3x3dbl_2')
self.branch3x3dbl_3a = ConvBNLayer(384,
384, (1, 3),
padding=(0, 1),
name=name + '.branch3x3dbl_3a')
self.branch3x3dbl_3b = ConvBNLayer(384,
384, (3, 1),
padding=(1, 0),
name=name + '.branch3x3dbl_3b')
### same with paper
self.branch_pool0 = MaxPool2D(kernel_size=3,
stride=1,
padding=1)
self.branch_pool = ConvBNLayer(in_channels,
192,
1,
name=name + '.branch_pool')
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch3x3_1 = self.branch3x3_1(x)
branch3x3_2a = self.branch3x3_2a(branch3x3_1)
branch3x3_2b = self.branch3x3_2b(branch3x3_1)
branch3x3 = paddle.concat([branch3x3_2a, branch3x3_2b], axis=1)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl_3a = self.branch3x3dbl_3a(branch3x3dbl)
branch3x3dbl_3b = self.branch3x3dbl_3b(branch3x3dbl)
branch3x3dbl = paddle.concat([branch3x3dbl_3a, branch3x3dbl_3b],
axis=1)
branch_pool = self.branch_pool0(x)
branch_pool = self.branch_pool(branch_pool)
return paddle.concat(
[branch1x1, branch3x3, branch3x3dbl, branch_pool], axis=1)
class ConvBNLayer(nn.Layer):
def __init__(self,
in_channels,
num_filters,
filter_size,
stride=1,
padding=0,
groups=1,
act='relu',
name=None):
super(ConvBNLayer, self).__init__()
self.conv = Conv2D(in_channels=in_channels,
out_channels=num_filters,
kernel_size=filter_size,
stride=stride,
padding=padding,
groups=groups,
weight_attr=paddle.ParamAttr(name=name + ".conv.weight"),
bias_attr=False)
self.bn = BatchNorm(num_filters,
act=act,
epsilon=0.001,
param_attr=paddle.ParamAttr(name=name + ".bn.weight"),
bias_attr=paddle.ParamAttr(name=name + ".bn.bias"),
moving_mean_name=name + '.bn.running_mean',
moving_variance_name=name + '.bn.running_var')
def forward(self, inputs):
y = self.conv(inputs)
y = self.bn(y)
return y
|
[
"yrz1016@163.com"
] |
yrz1016@163.com
|
ba828bef67d461eaeca4b8d9466ff89c0bad702d
|
5692e6a1889a87bd6c6b241ddb8a9d53b464bb3f
|
/src/host/mqtt.py
|
018c3ab854de334f15aa4bce5f6749885d77f648
|
[
"MIT"
] |
permissive
|
h4xxel/kanelbulle
|
0cbccfbbf8debfb288ebb42a5bbccca9ae86b3bd
|
0e588c3d752fea3c62e94919422d1b5a46a38c4b
|
refs/heads/master
| 2020-05-18T19:07:30.579579
| 2015-03-17T15:47:41
| 2015-03-17T15:47:41
| 29,966,398
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 987
|
py
|
#!/usr/bin/env python
import json
import mosquitto
# The callback for when the client receives a CONNACK response from the server.
def on_connect(obj, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("Testsites/MunktellSiencePark/meterevent")
# The callback for when a PUBLISH message is received from the server.
def on_message(obj, msg):
x = json.loads(str(msg.payload))
print(x['power'])
a = float(x['power'])
print a/10.0
client = mosquitto.Mosquitto("kanelbulle")
client.on_connect = on_connect
client.on_message = on_message
if client.connect("op-en.se", 1883, 60)>0:
print "kanelbulle"
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
while 1:
client.loop()
|
[
"shemani@kth.se"
] |
shemani@kth.se
|
9d9a3a67e4e579bb8bbd2d88810a81d10778ab0a
|
b6a52446c73fbe837220e5e82cc39579efdc7201
|
/venv/bin/gunicorn_paster
|
5ca143bf826f280fe9c42b17768a3d5a88f30ee9
|
[] |
no_license
|
billyggroves/prospectRoulette
|
e29e3a39edf1640f7cda6ba75da64b6cddadcd79
|
5f1757765e38014f95cc59b66d1d40c3b8fc9638
|
refs/heads/master
| 2021-05-03T21:27:07.103334
| 2018-05-22T01:36:31
| 2018-05-22T01:36:31
| 120,383,639
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 265
|
#!/Users/newuser/Desktop/prospectRoulette/venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from gunicorn.app.pasterapp import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run())
|
[
"newuser@Dirks-MacBook-Air.local"
] |
newuser@Dirks-MacBook-Air.local
|
|
26e3f726b2a432bebbd1302405d43c656284ae34
|
fc0bc3f652fe86b8e7cc01c29f58428de2a1fd27
|
/plugins/plugin_example/plugin_test.py
|
3ea592ea92cf40041947a6a634f567224a4b4003
|
[] |
no_license
|
simanwhite/FlowAssistant
|
677438e705e079fc27b3b7a87ecde12004825a36
|
ad90bcfaa5e6ab70971d647815148b3daebfcfcb
|
refs/heads/master
| 2021-01-11T20:05:48.304441
| 2017-01-16T02:20:56
| 2017-01-16T02:20:56
| 79,037,744
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 981
|
py
|
import os
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from plugins.Categories import *
from gui.WebView import WebView
class PluginOne(TabularPlugin):
name = 'Tabular Plugin 1'
_this_directory = os.path.dirname(__file__)
icon = '%s/res/bottom-top.gif' % _this_directory
form = WebView.WebViewForm('http://twiki.amd.com/twiki/bin/view/DesignCAD/CVSRDCTaskDFP')
def __init__(self):
super(PluginOne, self).__init__()
print 'init of tabular plugin 1'
# actions in action dictionary will be added in Plugins menu bar
self.action_info_lst = [
# (description, function_handle, icon, key-shortcut),
{
'description': 'Print Name',
'function_handle': self.print_name,
'icon': '../res/tb_back.gif',
'shortcut': QKeySequence.Print,
},
]
@staticmethod
def print_name():
print 'This is Tabular Plugin 1!'
|
[
"simanwhite@126.com"
] |
simanwhite@126.com
|
8fae8f078c0646796dc561828590d3f6ab538b0d
|
c9d444386d8c8b33915a25155b17e8532fd64c59
|
/03-persistent_data/sql_queries.py
|
bc5ee12cd33ac0e51f2cdf650b924313dcac51e2
|
[] |
no_license
|
xkustan/PV248
|
45ca890786399ab2671b34bf0776bd24ab91f917
|
e3e97a1fea3d0a4e65368facf28f4a758e9609a8
|
refs/heads/master
| 2020-03-29T19:53:55.781537
| 2019-01-14T22:09:06
| 2019-01-14T22:09:06
| 150,284,380
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,951
|
py
|
"""SQL queries."""
CREATE_EMPTY_DB = [
"""create table person ( id integer primary key not null,
born integer,
died integer,
name varchar not null);""",
"""create table score ( id integer primary key not null,
name varchar,
genre varchar,
key varchar,
incipit varchar,
year integer);""",
"""create table voice ( id integer primary key not null,
number integer not null,
score integer references score( id ) not null,
range varchar,
name varchar );""",
"""create table edition ( id integer primary key not null,
score integer references score( id ) not null,
name varchar,
year integer );""",
"""create table score_author( id integer primary key not null,
score integer references score( id ) not null,
composer integer references person( id ) not null );""",
"""create table edition_author( id integer primary key not null,
edition integer references edition( id ) not null,
editor integer references person( id ) not null );""",
"""create table print ( id integer primary key not null,
partiture char(1) default 'N' not null,
edition integer references edition( id ) );"""
]
DEFINED_CONSTRAINTS = [
"""CREATE UNIQUE INDEX person_name_unique_index ON person(name);""",
"""CREATE UNIQUE INDEX score_author_unique_index ON score_author(score, composer);""",
"""CREATE UNIQUE INDEX voice_unique_index ON voice(number, score, ifnull(range, ''), ifnull(name, ''));""",
"""CREATE UNIQUE INDEX print_unique_index ON print(id);""",
]
|
[
"xenia.kustanova@gmail.com"
] |
xenia.kustanova@gmail.com
|
b199c9e4092bb8ae5cb5399e65a6b090b247103f
|
25404f4cfb9be3e6f1b3fe31a1554459eb200813
|
/my_data_structure/my_dict/chang_dict_and_pass_the_input_value.py
|
ca8d669cd32aa1e3d1d97049b25a8c3b70504f3e
|
[] |
no_license
|
nightimero/annal_report_test
|
1c6eb4b71482f870c753f5084212afd071929f57
|
7bbc76ba703527ba8f4b84fbdb94fd57b37b9887
|
refs/heads/master
| 2021-09-06T21:18:59.534963
| 2018-02-11T15:31:21
| 2018-02-11T15:31:21
| 103,259,391
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 340
|
py
|
# -*- coding:utf-8 -*-
def print_b(**kwargs):
print 'print_b', kwargs
print 'print_b,a:', kwargs['a']
def print_a(**kwargs):
print kwargs['a']
print kwargs['b']
print kwargs['c']
print_b(**kwargs)
dict_a = {'a': 1,
'b': 2,
'c': 3,
}
print_a(**dict_a)
print_a(a=2, b=3, c=4)
|
[
"chenxiang@aiknown.com"
] |
chenxiang@aiknown.com
|
befaf9bd3f0976192acb33af02d31694f4832e35
|
767c2802d355ad389e29d50ab28437b10b78a990
|
/slicepractice1.py
|
76708a19ae08241ad06b66f5610b4b67226a3627
|
[] |
no_license
|
24emmory/Python-Crash-Course
|
ed15047f9e9bbb799f2764319e11b5cb35f3f4ae
|
b64b969102cc50d12952c6853e544598e0b34f70
|
refs/heads/master
| 2022-11-26T06:27:41.916105
| 2020-08-01T22:51:16
| 2020-08-01T22:51:16
| 274,295,667
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 207
|
py
|
passage = ['ave','blvd','road', 'street']
my_passage = passage[:]
print("Names of pathways on the road:")
print(my_passage)
#the exercise
print(my_passage[2:3])
print(my_passage[:])
print(my_passage[1:4])
|
[
"24emmory@gmail.com"
] |
24emmory@gmail.com
|
b567077820ad0049e301dd127fc747cd040cf8cf
|
1271621c750c618570c7658d2e4eb63135223d11
|
/Developer Preview/Volume.app/Volume
|
252b0d03ac2a6129e64c9bc6932255da0778209e
|
[
"BSD-2-Clause"
] |
permissive
|
rbircher/Utilities
|
01aa5054f78db442f6fdcb870fc315007614834f
|
e919db5b7a383ab00563bb1111d9421e640e74f1
|
refs/heads/master
| 2023-03-06T13:12:37.520993
| 2021-02-15T16:38:26
| 2021-02-15T16:38:26
| 339,134,041
| 0
| 0
|
BSD-2-Clause
| 2021-02-15T16:38:27
| 2021-02-15T16:16:27
| null |
UTF-8
|
Python
| false
| false
| 13,444
|
#!/usr/bin/env python3
# https://github.com/helloSystem/Menu/issues/3
from PyQt5.QtWidgets import QApplication, QSystemTrayIcon, QMenu, QAction, QHBoxLayout, QGroupBox, QSlider, QWidget, \
QActionGroup, QDesktopWidget, QMessageBox
from PyQt5.QtGui import QIcon, QPixmap
from PyQt5.QtCore import Qt, QProcess, QMetaObject, QCoreApplication, QEvent, QObject, QTimer
import sys, os
# Show the slider UI when QSystemTrayIcon is left-clicked
class Ui_Slider(QWidget):
def __init__(self):
super().__init__()
self.vol = 0
self.mic = 0
# self.resize(170, 282)
self.setFixedHeight(200)
self.setMaximumWidth(150)
screen = QDesktopWidget().screenGeometry()
widget = self.geometry()
x = screen.width() - widget.width()
y = 21 # Set this to the height of the global menu bar
# self.setWindowFlag(Qt.WindowStaysOnTopHint)
self.setWindowFlag(Qt.FramelessWindowHint)
# FIXME: Find a way to give a hint to the window manager that this is a drop-down menu
# similar to what we are doing in Menu in C++ using
# KWindowSystem::setType(actionCompleter->popup()->winId(), NET::DropdownMenu);
# self.setWindowFlag("NET::DropdownMenu") # Does not work
self.setWindowFlag(Qt.WindowSystemMenuHint) # Does not seem to do the job
y = 15 # FIXME Remove the need for this by getting the above to work
self.move(x, y)
self.horizontalLayout = QHBoxLayout(self)
self.horizontalLayout.setContentsMargins(15, 15, 15, 15)
# Box with slider for playback volume
self.groupBox = QGroupBox()
self.groupBox.setObjectName("Play")
self.horizontalLayout_2 = QHBoxLayout(self.groupBox)
self.play_slider = QSlider(self.groupBox)
self.play_slider.setOrientation(Qt.Vertical)
self.horizontalLayout_2.addWidget(self.play_slider)
self.horizontalLayout.addWidget(self.groupBox)
self.play_slider.sliderReleased.connect(self.sliderReleased)
# Box with slider for recording volume
# TODO: Check whether the output of 'mixer' has a line that starts with 'Mixer mic', otherwise not add this
self.groupBox_2 = QGroupBox()
self.groupBox.setObjectName("Record")
self.horizontalLayout_3 = QHBoxLayout(self.groupBox_2)
self.rec_slider = QSlider(self.groupBox_2)
self.rec_slider.setOrientation(Qt.Vertical)
self.horizontalLayout_3.addWidget(self.rec_slider)
self.horizontalLayout.addWidget(self.groupBox_2)
self.play_slider.valueChanged['int'].connect(self.setPlayVolume)
self.rec_slider.valueChanged['int'].connect(self.setRecVolume)
self.rec_slider.sliderReleased.connect(self.sliderReleased)
QMetaObject.connectSlotsByName(self)
_tr = QCoreApplication.translate
self.groupBox.setTitle(_tr("Slider", "Play"))
self.groupBox_2.setTitle(_tr("Slider", "Record"))
self.installEventFilter(self)
self.updateSliderValues()
self.timer = QTimer()
self.timer.setInterval(1000) # Every second
self.timer.timeout.connect(self.updateSliderValues)
def updateSliderValues(self):
p = QProcess()
p.setProgram("mixer")
p.setArguments(["-S"])
print(p.program() + " " + " ".join(p.arguments()))
p.start()
p.waitForFinished()
results = str(p.readAllStandardOutput(), 'utf-8').strip().split(" ")
print(results)
self.play_slider.setValue(0)
self.rec_slider.setValue(0)
self.groupBox_2.setHidden(True) # Except if we find a mic below
for result in results:
print(result)
parts = result.split(":")
if len(parts) == 3 and parts[0] == "vol":
self.play_slider.setValue(int(parts[1]))
if len(parts) == 3 and parts[0] == "mic":
self.rec_slider.setValue(int(parts[1]))
self.groupBox_2.setHidden(False)
def eventFilter(self, obj, event):
# Close the window if it is not the active window anymore
if event.type() == QEvent.ActivationChange and not self.isActiveWindow():
self.timer.stop()
self.close()
# Update the slider values when the window is shown
if event.type() == QEvent.ActivationChange and self.isActiveWindow():
self.timer.start()
self.updateSliderValues()
return QWidget.eventFilter(self, obj, event)
def setPlayVolume(self):
value = self.play_slider.value()
p = QProcess()
p.setProgram("mixer")
# TODO: Actually there is Mixer vol and Mixer pcm. Which is which and what are they doing? Which one(s) should we set?
p.setArguments(["vol", str(value) + ":" + str(value)])
print(p.program() + " " + " ".join(p.arguments()))
p.startDetached()
def setRecVolume(self):
print(self.rec_slider.value())
value = self.rec_slider.value()
p = QProcess()
p.setProgram("mixer")
# TODO: Check whether the output of 'mixer' has a line that starts with 'Mixer mic', otherwise not do this
p.setArguments(["mic", str(value) + ":" + str(value)])
print(p.program() + " " + " ".join(p.arguments()))
p.startDetached()
def sliderReleased(self):
self.close()
class VolumeMenu(QObject):
def __init__(self):
super().__init__()
self.showTODO()
icon = QIcon.fromTheme("audio-volume-high-symbolic") # FIXME: How to get dark rather than bright icon?
self.tray = QSystemTrayIcon()
self.tray.setIcon(icon)
self.tray.setVisible(True)
self.menu = QMenu()
self.tray.activated.connect(self.onClicked) # Refresh each time the menu is clicked. FIXME: Does not work on right-click; why?
self.tray.setContextMenu(self.menu)
# TODO: Add a check to ensure that
# sysctl hw.snd.verbose is 0
# Otherwise this application will not work correctly
# and that
# sysctl hw.snd.default_auto is 2
# Otherwise newly attached sound devices will not be activated automatically
# NOTE:
# https://forum.learnpyqt.com/t/qsystemtrayicon-example/689
# Sneaky PyQt quirk! A reference to the actions must be kept around or the actions will be destroyed
self.actions = []
self.sliderWindow = None
self.refreshMenu() # Initially populate the menu
self.tray.installEventFilter(self) # FIXME: This never seems to get called, why?
self.installEventFilter(self) # FIXME: This never seems to get called, why?
def eventFilter(self, obj, event):
print("eventFilter function running") # FIXME: Why is this never called when the icon is right-clicked?
# We need to refresh the contents of the right-click menu somehow when the user right-clicks...
def onClicked(self, reason):
self.refreshMenu()
S = Ui_Slider()
self.sliderWindow = S # Need to keep a reference around so that it does not get destroyed
S.show()
def refreshMenu(self):
self.actions = []
self.menu.clear()
# Get the sound devices from
# cat /dev/sndstat
p = QProcess()
p.setProgram("cat")
p.setArguments(["/dev/sndstat"])
print(p.program() + " " + " ".join(p.arguments()))
p.start()
p.waitForFinished()
lines = str(p.readAllStandardOutput(), 'utf-8').strip().split("\n")
# TODO: /dev/sndstat calls *any* USB audio device just "<USB audio>"
# How can we get the proper name, e.g., the one that shows up in /var/log/messages?
# This is what /var/log/messages shows:
# ugen0.8: <ZOOM Corporation H1> at usbus0
# uaudio1 on uhub3
# uaudio1: <ZOOM Corporation H1, class 0/0, rev 1.10/0.00, addr 7> on usbus0
# (...)
# uaudio1: No MIDI sequencer.
# pcm3: <USB audio> on uaudio1
# uaudio1: No HID volume keys found.
#
# This is what /dev/sndstat shows:
# pcm3: <USB audio> (play/rec)
#
# So we could match from pcm3 via uaudio1 to <ZOOM Corporation H1>
# but doing so by parsing /var/log/messages can become messy quickly
# as devices can come and go. What is the best way to do this?
# Add the devices to the menu
# Playback devices
self.playbackGroup = QActionGroup(self.menu) # Only one of the actions added to this group can be active
# action = QAction("Play")
# action.setDisabled(True)
# self.actions.append(action)
# self.menu.addAction(action)
for line in lines:
if not line.startswith("pcm"): # or not "play" in line:
continue
print(line)
label = line.split("<")[1].split(">")[0]
action = QAction(line)
action.__setattr__("device", line.split(":")[0])
action.triggered.connect(self.switchDevice) # lambda could be used to pass an argument but the argument passed is taken at the time when this executes, which is not what we want
action.setText(label)
action.setCheckable(True)
if "default" in line:
action.setChecked(True)
self.actions.append(action)
self.playbackGroup.addAction(action)
self.menu.addAction(action)
# self.menu.addSeparator()
# # Recording devices
# self.recordingGroup = QActionGroup(self.menu) # Only one of the actions added to this group can be active
# action = QAction("Record")
# action.setDisabled(True)
# self.actions.append(action)
# self.menu.addAction(action)
# for line in lines:
# if not line.startswith("pcm") or not "rec" in line:
# continue
# print(line)
# label = line.split("<")[1].split(">")[0]
# action = QAction(line + " rec")
# action.__setattr__("device", line.split(":")[0])
# action.triggered.connect(self.switchDevice) # lambda could be used to pass an argument but the argument passed is taken at the time when this executes, which is not what we want
# action.setText(label)
# action.setCheckable(True)
# if "default" in line:
# action.setChecked(True)
# self.actions.append(action)
# self.recordingGroup.addAction(action)
# self.menu.addAction(action)
self.menu.addSeparator()
action = QAction("About")
action.triggered.connect(self._showAbout)
self.actions.append(action)
self.menu.addAction(action)
def switchDevice(self, line):
# sysctl hw.snd.default_unit=1 switches to pcm1
if getattr(self.playbackGroup.checkedAction(), "device").startswith("pcm"):
dev = getattr(self.playbackGroup.checkedAction(), "device").replace("pcm", "")
else:
return
p = QProcess()
p.setProgram("sysctl")
# TODO: How to ONLY switch the device for recording or ONLY the device for playback?
p.setArguments(["hw.snd.default_unit=" + dev])
print(p.program() + " " + " ".join(p.arguments()))
p.startDetached()
def _showAbout(self):
print("showDialog")
msg = QMessageBox()
msg.setWindowTitle("Volume")
msg.setIconPixmap(QPixmap(os.path.dirname(__file__) + "/Resources/Volume.png").scaledToWidth(64, Qt.SmoothTransformation))
candidates = ["COPYRIGHT", "COPYING", "LICENSE"]
for candidate in candidates:
if os.path.exists(os.path.dirname(__file__) + "/" + candidate):
with open(os.path.dirname(__file__) + "/" + candidate, 'r') as file:
data = file.read()
msg.setDetailedText(data)
msg.setText("<h3>Volume</h3>")
msg.setInformativeText("A simple QSystemTrayIcon application to select the audio device using <a href='https://www.freebsd.org/cgi/man.cgi?sysctl'>sysctl hw.snd.default_unit</a> and to set the volume using using <a href='https://www.freebsd.org/cgi/man.cgi?mixer'>mixer</a><br><br><a href='https://github.com/helloSystem/Utilities'>https://github.com/helloSystem/Utilities</a>")
msg.exec()
def showTODO(self, detailed_text=""):
msg = QMessageBox()
msg.setIcon(QMessageBox.Information)
msg.setWindowTitle("Developer Preview")
msg.setText("This application is a preview for developers.<br>It is not fully functional yet.")
msg.setDetailedText("Please see https://github.com/helloSystem/Utilities if you would like to contribute.\n\n" + detailed_text)
msg.exec()
if __name__ == "__main__":
# Simple singleton:
# Ensure that only one instance of this application is running by trying to kill the other ones
p = QProcess()
p.setProgram("pkill")
p.setArguments(["-f", os.path.abspath(__file__)])
cmd = p.program() + " " + " ".join(p.arguments())
print(cmd)
p.start()
p.waitForFinished()
app = QApplication(sys.argv)
app.setQuitOnLastWindowClosed(False)
VM = VolumeMenu()
sys.exit(app.exec_())
|
[
"probonopd@users.noreply.github.com"
] |
probonopd@users.noreply.github.com
|
|
dc62a8e5da1f08febe890bece64437a7ce3f8255
|
c5b9117e09a191cd6609d798535c6d6f392be7a0
|
/novice/jasaguru-master/home/urls.py
|
1705f56113ab109a22fa893eddb133f588707ff4
|
[] |
no_license
|
iklimah27/praxis-academy
|
3b62cf930698071ba9e99b0878ac66cf70f72edc
|
94b0277215d26c0dac4f129a53d44931dd732aa5
|
refs/heads/master
| 2023-01-30T17:01:02.412711
| 2020-12-14T10:46:27
| 2020-12-14T10:46:27
| 301,330,436
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 397
|
py
|
from django.contrib import admin
from django.urls import path
from . import views
urlpatterns = [
path('', views.Htampil, name='home'),
path('Hguru', views.Hguru, name='Hguru'),
path('Habout', views.Habout, name='Habout'),
path('sd', views.sd, name='sd'),
path('smp', views.smp, name='smp'),
path('sma', views.sma, name='sma'),
path('form', views.form, name='form'),
]
|
[
"iqlima.safira99@gmail.com"
] |
iqlima.safira99@gmail.com
|
f7fd8663575225c4f1ab9e0c64d3a2d5cb25aa8c
|
35618a5f5c6f2a9c232b8528a6846d43d4d64db8
|
/tensorflow_translate.py
|
a9a10ffd6335c9dadd0f450e543cfd4ff1ba670c
|
[] |
no_license
|
leezqcst/tf
|
b2a1b765ff0f77c18158756f3dfd69b7d7f21e67
|
9bd73dfc8adfbf8af304ed6a13f6691b76f1bc30
|
refs/heads/master
| 2021-01-19T15:31:36.320931
| 2016-11-29T23:47:17
| 2016-11-29T23:47:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 42,715
|
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# standard packages
import math
import os
import random
import sys
import time
import argparse
import logging
import re
# special packages
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
# tensorflow packages
import tensorflow.python.platform
import tensorflow as tf
from tensorflow_seq2seq_model import TFSeq2SeqModel
from tensorflow.python.platform import gfile
from tensorflow.models.image.mnist.convolutional import BATCH_SIZE
# logger
logger = logging.getLogger("tensorflow_seq2seq")
# We use a number of buckets and pad to the closest one for efficiency.
# See tensorflow_seq2seq_model.Seq2SeqModel for details of how they work.
# len(_buckets) is the number of buckets
# for each bucket:
# tuples = (padded_encoder_length, padded_decoder_length)
_BUCKETS = [(5, 10), (10, 15), (20, 25), (40, 50)]
# Special vocabulary symbols - we always put them at the start.
_PAD = "_PAD"
_GO = "_GO"
_EOS = "_EOS"
_UNK = "_UNK"
_START_VOCAB = [_PAD, _GO, _EOS, _UNK]
PAD_ID = 0
GO_ID = 1
EOS_ID = 2
UNK_ID = 3
# Regular expressions used to tokenize.
_WORD_SPLIT = re.compile("([.,!?\"':;)(])")
_DIGIT_RE = re.compile(r"\d")
_SOURCE_EXTRACT_GROUP = re.compile("^([^\\t]+)")
_TARGET_EXTRACT_GROUP = re.compile("\\t([^\\t]+)")
def basic_tokenizer(sentence):
"""Very basic tokenizer: split the sentence into a list of tokens."""
words = []
for space_separated_fragment in sentence.strip().split():
words.extend(re.split(_WORD_SPLIT, space_separated_fragment))
return [w for w in words if w]
def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size,
tokenizer=None, normalize_digits=True, extract_regex=None):
"""Create vocabulary file (if it does not exist yet) from data file.
Data file is assumed to contain one sentence per line. Each sentence is
tokenized and digits are normalized (if normalize_digits is set).
Vocabulary contains the most-frequent tokens up to max_vocabulary_size.
We write it to vocabulary_path in a one-token-per-line format, so that later
token in the first line gets id=0, second line gets id=1, and so on.
Args:
vocabulary_path: path where the vocabulary will be created.
data_path: data file that will be used to create vocabulary.
max_vocabulary_size: limit on the size of the created vocabulary.
tokenizer: a function to use to tokenize each data sentence;
if None, basic_tokenizer will be used.
normalize_digits: Boolean; if true, all digits are replaced by 0s.
"""
if not gfile.Exists(vocabulary_path):
logger.info("Creating vocabulary %s from data %s" % (vocabulary_path, data_path))
vocab = {}
with gfile.GFile(data_path, mode="r") as f:
counter = 0
for line in f:
counter += 1
if counter % 100000 == 0:
logger.info(" processing %s line %d vocab size so far %d" % (data_path, counter, len(vocab)))
if extract_regex:
m = re.search(extract_regex, line)
if m:
line = m.group(1)
else:
logger.warn("Skipping empty data in at %s line %d" % (data_path, counter))
continue
tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)
#logger.debug("line %d tokens=%s" % (counter, tokens))
for w in tokens:
word = re.sub(_DIGIT_RE, "0", w) if normalize_digits else w
if word in vocab:
vocab[word] += 1
else:
vocab[word] = 1
vocab_list = _START_VOCAB + sorted(vocab, key=vocab.get, reverse=True)
if len(vocab_list) > max_vocabulary_size:
vocab_list = vocab_list[:max_vocabulary_size]
with gfile.GFile(vocabulary_path, mode="w") as vocab_file:
for w in vocab_list:
vocab_file.write(w + "\n")
logger.info("Read %d lines from %s, found %d vocab items" % (counter, data_path, len(vocab_list)))
else:
logger.info("Re-use existing vocabulary %s " % vocabulary_path)
def initialize_vocabulary(vocabulary_path):
"""Initialize vocabulary from file.
We assume the vocabulary is stored one-item-per-line, so a file:
dog
cat
will result in a vocabulary {"dog": 0, "cat": 1}, and this function will
also return the reversed-vocabulary ["dog", "cat"].
Args:
vocabulary_path: path to the file containing the vocabulary.
Returns:
a pair: the vocabulary (a dictionary mapping string to integers), and
the reversed vocabulary (a list, which reverses the vocabulary mapping).
Raises:
ValueError: if the provided vocabulary_path does not exist.
"""
logger.info("Initialize vocabulary %s" % vocabulary_path)
if gfile.Exists(vocabulary_path):
rev_vocab = []
with gfile.GFile(vocabulary_path, mode="r") as f:
rev_vocab.extend(f.readlines())
rev_vocab = [line.strip() for line in rev_vocab]
vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])
return vocab, rev_vocab
else:
raise ValueError("Vocabulary file %s not found.", vocabulary_path)
def sentence_to_token_ids(sentence, vocabulary,
tokenizer=None, normalize_digits=True):
"""Convert a string to list of integers representing token-ids.
For example, a sentence "I have a dog" may become tokenized into
["I", "have", "a", "dog"] and with vocabulary {"I": 1, "have": 2,
"a": 4, "dog": 7"} this function will return [1, 2, 4, 7].
Args:
sentence: a string, the sentence to convert to token-ids.
vocabulary: a dictionary mapping tokens to integers.
tokenizer: a function to use to tokenize each sentence;
if None, basic_tokenizer will be used.
normalize_digits: Boolean; if true, all digits are replaced by 0s.
Returns:
a list of integers, the token-ids for the sentence.
"""
if tokenizer:
words = tokenizer(sentence)
else:
words = basic_tokenizer(sentence)
if not normalize_digits:
return [vocabulary.get(w, UNK_ID) for w in words]
# Normalize digits by 0 before looking words up in the vocabulary.
return [vocabulary.get(re.sub(_DIGIT_RE, "0", w), UNK_ID) for w in words]
def data_to_token_ids(data_path, target_path, vocabulary_path,
tokenizer=None, normalize_digits=True, extract_regex=None):
"""Tokenize data file and turn into token-ids using given vocabulary file.
This function loads data line-by-line from data_path, calls the above
sentence_to_token_ids, and saves the result to target_path. See comment
for sentence_to_token_ids on the details of token-ids format.
Args:
data_path: path to the data file in one-sentence-per-line format.
target_path: path where the file with token-ids will be created.
vocabulary_path: path to the vocabulary file.
tokenizer: a function to use to tokenize each sentence;
if None, basic_tokenizer will be used.
normalize_digits: Boolean; if true, all digits are replaced by 0s.
"""
if not gfile.Exists(target_path):
logger.info("Creating file %s" % target_path)
logger.info("Tokenizing data in %s" % data_path)
vocab, _ = initialize_vocabulary(vocabulary_path)
with gfile.GFile(data_path, mode="r") as data_file:
with gfile.GFile(target_path, mode="w") as tokens_file:
counter = 0
for line in data_file:
counter += 1
if counter % 100000 == 0:
logger.info(" tokenizing %s line %d" % (data_path, counter))
if extract_regex:
m = re.search(extract_regex, line)
if m:
line = m.group(1)
else:
logger.warn("Skipping empty tokens in at %s line %d" % (data_path, counter))
continue
token_ids = sentence_to_token_ids(line, vocab, tokenizer,
normalize_digits)
tokens_file.write(" ".join([str(tok) for tok in token_ids]) + "\n")
else:
logger.info("Re-using existing token file %s" % target_path)
def prepare_data(input_file, data_dir, source_vocabulary_size, target_vocabulary_size, dev_train_split=0.1, max_data_size=None):
"""Get WMT data into data_dir, create vocabularies and tokenize data.
Args:
input_file: data file with tab-delimited input/output pairs
data_dir: directory in which the data sets will be stored.
source_vocabulary_size: size of the source vocabulary to create and use.
target_vocabulary_size: size of the target vocabulary to create and use.
Returns:
A tuple of 4 elements:
(*) path to the token-ids for source training data-set,
(*) path to the token-ids for target training data-set,
(*) path to the Source vocabulary file,
(*) path to the Target vocabulary file.
"""
# Create vocabularies of the appropriate sizes.
target_vocab_path = os.path.join(data_dir, "target%d.vocab" % target_vocabulary_size)
source_vocab_path = os.path.join(data_dir, "source%d.vocab" % source_vocabulary_size)
create_vocabulary(target_vocab_path, input_file, target_vocabulary_size, extract_regex=_TARGET_EXTRACT_GROUP)
create_vocabulary(source_vocab_path, input_file, source_vocabulary_size, extract_regex=_SOURCE_EXTRACT_GROUP)
# Create token ids for the training data.
target_data_ids_path = os.path.join(data_dir, "target%d.ids" % target_vocabulary_size)
source_data_ids_path = os.path.join(data_dir, "source%d.ids" % source_vocabulary_size)
data_to_token_ids(input_file, target_data_ids_path, target_vocab_path, extract_regex=_TARGET_EXTRACT_GROUP)
data_to_token_ids(input_file, source_data_ids_path, source_vocab_path, extract_regex=_SOURCE_EXTRACT_GROUP)
# split dev and training
target_train_ids_path = target_data_ids_path + ".train"
target_dev_ids_path = target_data_ids_path + ".dev"
source_train_ids_path = source_data_ids_path + ".train"
source_dev_ids_path = source_data_ids_path + ".dev"
if not gfile.Exists(target_train_ids_path):
logger.info("Create train/dev split=%s" % dev_train_split)
with gfile.GFile(source_data_ids_path, mode="r") as source_file:
with gfile.GFile(target_data_ids_path, mode="r") as target_file:
with gfile.GFile(target_train_ids_path, mode="w") as target_train_ids_file:
with gfile.GFile(target_dev_ids_path, mode="w") as target_dev_ids_file:
with gfile.GFile(source_train_ids_path, mode="w") as source_train_ids_file:
with gfile.GFile(source_dev_ids_path, mode="w") as source_dev_ids_file:
counter = 0
source, target = source_file.readline(), target_file.readline()
while source and target and (not max_data_size or counter < max_data_size):
counter += 1
if counter % 100000 == 0:
logger.info("....splitting %s line %d" % (source_data_ids_path, counter))
sys.stdout.flush()
# randomly select for dev vs training set
if random.random() < dev_train_split:
target_dev_ids_file.write(target)
source_dev_ids_file.write(source)
else:
target_train_ids_file.write(target)
source_train_ids_file.write(source)
source, target = source_file.readline(), target_file.readline()
else:
logger.info("Re-using existing training file %s" % target_train_ids_path)
if gfile.Exists(target_dev_ids_path):
logger.info("Re-using existing dev file %s" % target_dev_ids_path)
return (source_train_ids_path, target_train_ids_path, source_dev_ids_path, target_dev_ids_path,
source_vocab_path, target_vocab_path)
def read_data(source_path, target_path, buckets, max_size=None):
"""Read data from source and target files and put into buckets.
Args:
source_path: path to the files with token-ids for the source language.
target_path: path to the file with token-ids for the target language;
it must be aligned with the source file: n-th line contains the desired
output for n-th line from the source_path.
buckets: list of buckets
max_size: maximum number of lines to read, all other will be ignored;
if 0 or None, data files will be read completely (no limit).
Returns:
data_set: a list of length len(_buckets); data_set[n] contains a list of
(source, target) pairs read from the provided data files that fit
into the n-th bucket, i.e., such that len(source) < _buckets[n][0] and
len(target) < _buckets[n][1]; source and target are lists of token-ids.
"""
data_set = [[] for _ in buckets]
with gfile.GFile(source_path, mode="r") as source_file:
with gfile.GFile(target_path, mode="r") as target_file:
source, target = source_file.readline(), target_file.readline()
counter = 0
while source and target and (not max_size or counter < max_size):
counter += 1
if counter % 100000 == 0:
logger.info(" reading %s line %d" % (source_path, counter))
sys.stdout.flush()
source_ids = [int(x) for x in source.split()]
target_ids = [int(x) for x in target.split()]
target_ids.append(EOS_ID)
for bucket_id, (encoder_size, decoder_size) in enumerate(buckets):
# pick the first bucket the data fits into
# NOTE: if there is not bucket big enough data is not selected
if len(source_ids) < encoder_size and len(target_ids) < decoder_size:
encoder_inputs, decoder_inputs = pad_sequence(source_ids, encoder_size, target_ids, decoder_size)
data_set[bucket_id].append([encoder_inputs, decoder_inputs])
break
source, target = source_file.readline(), target_file.readline()
return data_set
def pad_sequence(source_sequence, encoder_size, target_sequence, decoder_size):
""" Pad source and target sequence data up to their respective fixed sizes
Source sequence is padded with PAD symbol up to the source_size
Source sequence is reversed
Target sequence is prepended with GO symbol
Target sequence is padded with PAD symbol up to the target_size
Args:
source_sequence: list with source sequence ids
encoder_size: size to pad up to, must be greater than len(source_sequence)
target_sequence: list with target sequence ids
decoder_size: size to pad up to, must be greater than len(target_sequence)
Returns:
encoder_inputs[encoder_size]
decoder_inputs[decoder_size]
"""
assert(len(source_sequence)<encoder_size)
# Encoder inputs are padded and then reversed.
encoder_pad = [PAD_ID] * (encoder_size - len(source_sequence))
encoder_inputs = list(reversed(source_sequence + encoder_pad))
# Decoder inputs get an extra "GO" symbol, and are padded then.
assert(len(target_sequence)<decoder_size)
decoder_pad_size = decoder_size - len(target_sequence) - 1
decoder_inputs = [GO_ID] + target_sequence + [PAD_ID] * decoder_pad_size
return encoder_inputs, decoder_inputs
def get_batch(data, encoder_size, decoder_size, batch_size, randomize=True):
"""Get a random batch of data from the specified bucket, prepare for step.
To feed data in step(..) it must be a list of batch-major vectors, while
data here contains single length-major cases. So the main logic of this
function is to re-index data cases to be in the proper format for feeding.
Args:
data: a list of tuples ([encoder_input], [decoder_input]) training pairs we use to create a batch.
encoder_size: list size of input data (1st element of tuple)
decoder_size: list size of output data (2nd element of each tuple)
batch_size: size of batch, if you want all the data in the batch use len(data)
randomize: randomly select data (default=True)
Returns:
The triple (encoder_inputs, decoder_inputs, target_weights) for
encoder_inputs[encoder_size][batch_size]: encoder inputs with right shape for model
decoder_inputs[decoder_size][batch_size]: decoder inputs with right shape for model
target_weights[decoder_size][batch_size]: masks out PAD decoder inputs
"""
# lists will contain matched pairs of input/output data for each training example in the batch
encoder_inputs, decoder_inputs = [], []
# Get a random batch of encoder and decoder inputs from data,
# pad them if needed, reverse encoder inputs and add GO to decoder.
for idx in xrange(batch_size):
# select a tuple from the data
if randomize is True:
encoder_input, decoder_input = random.choice(data)
else:
encoder_input, decoder_input = data[idx]
# make sure data matches the model
assert len(encoder_input)==encoder_size
assert len(decoder_input)==decoder_size
# add to batch
encoder_inputs.append(encoder_input)
decoder_inputs.append(decoder_input)
# Now we create batch-major vectors from the data selected above.
batch_encoder_inputs, batch_decoder_inputs, batch_weights = [], [], []
# go from encoder_inputs[batch_size][encoder_size] ==> batch_encoder_inputs[encoder_size][batch_size]
# Batch encoder inputs list is size of padded input sequence length
for input_idx in xrange(encoder_size):
# each element is size of the batch
batch_encoder_inputs.append(
np.array([encoder_inputs[batch_idx][input_idx]
for batch_idx in xrange(batch_size)], dtype=np.int32))
# go from decoder_inputs[batch_size][decoder_size] ==> batch_decoder_inputs[decoder_size][batch_size]
# Batch decoder inputs list is size of padded output sequence length
for input_idx in xrange(decoder_size):
batch_decoder_inputs.append(
np.array([decoder_inputs[batch_idx][input_idx]
for batch_idx in xrange(batch_size)], dtype=np.int32))
# Create target_weights to be 0 for targets that are padding.
batch_weight = np.ones(batch_size, dtype=np.float32)
for batch_idx in xrange(batch_size):
# We set weight to 0 if the corresponding target is a PAD symbol.
# The corresponding target is decoder_input shifted by 1 forward.
if input_idx < decoder_size - 1:
target = decoder_inputs[batch_idx][input_idx + 1]
if input_idx == decoder_size - 1 or target == PAD_ID:
batch_weight[batch_idx] = 0.0
batch_weights.append(batch_weight)
return batch_encoder_inputs, batch_decoder_inputs, batch_weights
def create_model(session, model_dir, source_vocab_size, target_vocab_size, buckets, size,
num_layers, max_gradient_norm, batch_size, learning_rate,
learning_rate_decay_factor, use_lstm=False,
num_samples=512, forward_only=False):
"""Create translation model and initialize or load parameters in session."""
model = TFSeq2SeqModel(source_vocab_size, target_vocab_size, buckets, size,
num_layers, max_gradient_norm, batch_size, learning_rate,
learning_rate_decay_factor, use_lstm,
num_samples, forward_only)
ckpt = tf.train.get_checkpoint_state(model_dir)
if ckpt and gfile.Exists(ckpt.model_checkpoint_path):
logger.info("Reading model parameters from %s" % ckpt.model_checkpoint_path)
model.saver.restore(session, ckpt.model_checkpoint_path)
else:
logger.info("Created new model with fresh parameters.")
session.run(tf.initialize_all_variables())
return model
def train(input_file, model_dir, max_train_data_size, dev_train_split, steps_per_checkpoint, source_vocab_size, target_vocab_size, buckets, size,
num_layers, max_gradient_norm, batch_size, learning_rate,
learning_rate_decay_factor, use_lstm=False,
num_samples=512, forward_only=False, checkpoints=0, max_global_step=0):
"""Train a translation model using source, target data."""
# Prepare WMT data.
logger.info("Preparing data in %s" % model_dir)
source_train_data, target_train_data, source_dev_data, target_dev_data, _, _ = prepare_data(input_file,
model_dir,
source_vocab_size,
target_vocab_size,
dev_train_split=dev_train_split,
max_data_size=max_train_data_size
)
with tf.Session() as sess:
# Create model.
logger.info("Creating %d layers of %d units." % (num_layers, size))
model = create_model(sess, model_dir, source_vocab_size, target_vocab_size, buckets, size,
num_layers, max_gradient_norm, batch_size, learning_rate,
learning_rate_decay_factor, use_lstm,
num_samples, forward_only)
# Read data into buckets and compute their sizes.
logger.info("Reading development and training data (limit: %d)."
% max_train_data_size)
dev_set = read_data(source_dev_data, target_dev_data, buckets)
train_set = read_data(source_train_data, target_train_data, buckets)
train_bucket_sizes = [len(train_set[b]) for b in xrange(len(buckets))]
logger.info("Training bucket sizes: %s" % train_bucket_sizes)
train_total_size = sum(train_bucket_sizes)
logger.info("Training set size: %s" % train_total_size)
dev_bucket_sizes = [len(dev_set[b]) for b in xrange(len(buckets))]
logger.info("Dev bucket sizes: %s" % dev_bucket_sizes)
dev_total_size = sum(dev_bucket_sizes)
logger.info("Dev set size: %s" % dev_total_size)
# A bucket scale is a list of increasing numbers from 0 to 1 that we'll use
# to select a bucket. Length of [scale[i], scale[i+1]] is proportional to
# the size if i-th training bucket, as used later.
train_buckets_scale = [sum(train_bucket_sizes[:i + 1]) / float(train_total_size)
for i in xrange(len(train_bucket_sizes))]
# This is the training loop.
step_time, loss = 0.0, 0.0
current_step = 0
previous_losses = []
bucket_tally = [0] * len(buckets)
counter = 0
while True:
# Choose a bucket according to data distribution. We pick a random number
# in [0, 1] and use the corresponding interval in train_buckets_scale.
random_number_01 = np.random.random_sample()
bucket_id = min([i for i in xrange(len(train_buckets_scale))
if train_buckets_scale[i] > random_number_01])
# Get a batch of batch_size training examples
# input sequence size depends on the bucket id
start_time = time.time()
encoder_inputs, decoder_inputs, target_weights = get_batch(data=train_set[bucket_id],
encoder_size=buckets[bucket_id][0],
decoder_size=buckets[bucket_id][1],
batch_size=batch_size,
randomize=True)
bucket_tally[bucket_id] += np.size(encoder_inputs[0])
# make a step
model.batch_size = batch_size
_, _, _, step_loss, _ = model.step(sess, encoder_inputs, decoder_inputs,
target_weights, bucket_id, False)
step_time += (time.time() - start_time) / steps_per_checkpoint
loss += step_loss / steps_per_checkpoint
current_step += 1
logger.info("train_step: %d bucket: %d step_loss: %f step_time: %.2f tally: %s"
% (current_step, bucket_id, step_loss, step_time, bucket_tally))
# Once in a while, we save checkpoint, print statistics, and run evals.
if current_step % steps_per_checkpoint == 0:
checkpoint_path = os.path.join(model_dir, "model.checkpoint")
# Print statistics for the previous epoch.
perplexity = math.exp(loss) if loss < 300 else float('inf')
my_global_step = model.global_step.eval()
logger.info("Checkpoint: global_step: %d learning_rate: %f step_time: %.2f loss: %f, perplexity: %f tally: %s"
% (my_global_step, model.learning_rate.eval(),
step_time, loss, perplexity, bucket_tally))
# Decrease learning rate if no improvement was seen over last 3 times.
if len(previous_losses) > 2 and loss > max(previous_losses[-3:]):
sess.run(model.learning_rate_decay_op)
previous_losses.append(loss)
# Save checkpoint and zero timer and loss.
logger.info("Saving global_step: %s to %s" % (model.global_step, checkpoint_path))
model.saver.save(sess, checkpoint_path, global_step=model.global_step)
step_time, loss = 0.0, 0.0
# Run evals on development set and print their perplexity.
if dev_set:
logger.info("Starting evaluation on dev set")
dev_losses = []
#dev_pplxes = []
dev_batches = []
dev_time = time.time()
for bucket_id in xrange(len(buckets)):
dev_set_batch = dev_set[bucket_id]
if dev_set_batch:
model.batch_size = len(dev_set_batch)
# We decode the whole dev batch
encoder_inputs, decoder_inputs, target_weights = get_batch(data=dev_set_batch,
encoder_size=buckets[bucket_id][0],
decoder_size=buckets[bucket_id][1],
batch_size=model.batch_size,
randomize=False)
_, _, _, eval_loss, _ = model.step(session=sess,
encoder_inputs=encoder_inputs,
decoder_inputs=decoder_inputs,
target_weights=target_weights,
bucket_id=bucket_id,
forward_only=True)
#eval_ppx = math.exp(eval_loss) if eval_loss < 300 else float('inf')
dev_losses.append(eval_loss)
#dev_pplxes.append(eval_ppx)
dev_batches.append(model.batch_size)
dev_loss = 0
dev_total = 0
for i in range(len(dev_losses)):
dev_loss += dev_losses[i] * dev_batches[i]
dev_total += dev_batches[i]
if dev_total>0:
dev_loss = dev_loss / dev_total
dev_perplexity = math.exp(dev_loss)
dev_time = time.time() - dev_time
logger.info("Evaluate: global_step: %d dev_time: %.2f dev_batches: %s dev_loss: %s loss: %s perplexity: %s" % (my_global_step, dev_time, dev_batches, dev_losses, dev_loss, dev_perplexity))
counter += 1
if checkpoints>0 and counter>=checkpoints:
logger.info("Checkpoint limit reached")
break
if max_global_step>0 and my_global_step>=max_global_step:
logger.info("Max global step limit reached")
break
def decode(sentences, model_dir, source_vocab_size, target_vocab_size, buckets, size,
num_layers, max_gradient_norm, batch_size, learning_rate,
learning_rate_decay_factor, use_lstm,
num_samples):
with tf.Session() as sess:
# Create model and load parameters.
model = create_model(sess, model_dir, source_vocab_size, target_vocab_size, buckets, size,
num_layers, max_gradient_norm, batch_size, learning_rate,
learning_rate_decay_factor, use_lstm,
num_samples, forward_only=True)
model.batch_size = 1 # We decode one sentence at a time.
# Load vocabularies.
source_vocab_path = os.path.join(model_dir,
"source%d.vocab" % source_vocab_size)
target_vocab_path = os.path.join(model_dir,
"target%d.vocab" % target_vocab_size)
source_vocab, _ = initialize_vocabulary(source_vocab_path)
_, rev_target_vocab = initialize_vocabulary(target_vocab_path)
# Decode from standard input.
#sys.stdout.write("> ")
#sys.stdout.flush()
#sentence = sys.stdin.readline()
for sentence in sentences:
# Get token-ids for the input sentence.
token_ids = sentence_to_token_ids(sentence, source_vocab)
# Which bucket does it belong to?
bucket_id = min([b for b in xrange(len(buckets))
if buckets[b][0] > len(token_ids)])
encoder_size, decoder_size = buckets[bucket_id]
padded_token_ids, padded_decoder_ids = pad_sequence(source_sequence=token_ids,
encoder_size=encoder_size,
target_sequence=[],
decoder_size=decoder_size)
data_set = [(padded_token_ids, padded_decoder_ids)]
# Get a 1-element batch to feed the sentence to the model.
encoder_inputs, decoder_inputs, target_weights = get_batch(
data=data_set,
encoder_size=encoder_size,
decoder_size=decoder_size,
batch_size=1,
randomize=False)
# Get output logits for the sentence.
_, _, _, _, output_logits = model.step(sess, encoder_inputs, decoder_inputs,
target_weights, bucket_id, True)
# This is a greedy decoder - outputs are just argmaxes of output_logits.
outputs = [int(np.argmax(logit, axis=1)) for logit in output_logits]
logger.debug("decode: bucket_id=%d outputs=%s" % (bucket_id, outputs))
# If there is an EOS symbol in outputs, cut them at that point.
if EOS_ID in outputs:
outputs = outputs[:outputs.index(EOS_ID)]
# Print out French sentence corresponding to outputs.
logger.info("DECODE input=[%s] output=[%s]" % (sentence, " ".join([rev_target_vocab[output] for output in outputs])))
def self_test():
# 2 small buckets
buckets = [(3, 3), (6, 6)]
# Fake data set for both the (3, 3) and (6, 6) bucket.
fake_data = [
([1, 1], [2, 2]),
([3, 3], [4]),
([5], [6]),
([1, 1, 1, 1, 1], [2, 2, 2, 2, 2]),
([3, 3, 3], [5, 6])
]
data_set = [[] for _ in buckets]
for source_ids, target_ids in fake_data:
for bucket_id, (encoder_size, decoder_size) in enumerate(buckets):
# pick the first bucket the data fits into
# NOTE: if there is not bucket big enough data is not selected
if len(source_ids) < encoder_size and len(target_ids) < decoder_size:
encoder_inputs, decoder_inputs = pad_sequence(source_ids, encoder_size, target_ids, decoder_size)
data_set[bucket_id].append([encoder_inputs, decoder_inputs])
break
"""Test the translation model."""
with tf.Session(config=tf.ConfigProto(log_device_placement=True)) as sess:
logger.info("Self-test for neural translation model.")
# Create model with vocabularies of 10
# batch size 32
batch_size = 32
# 2 layers of size 32
model = TFSeq2SeqModel(source_vocab_size=10,
target_vocab_size=10,
buckets=buckets,
size=32,
num_layers=2,
max_gradient_norm=5.0,
batch_size=batch_size,
learning_rate=0.3,
learning_rate_decay_factor=0.99,
num_samples=8)
sess.run(tf.initialize_all_variables())
loss = 0.0
for _ in xrange(5): # Train the fake model for 5 steps.
bucket_id = random.choice([0, 1])
encoder_inputs, decoder_inputs, target_weights = get_batch(data=data_set[bucket_id],
encoder_size=buckets[bucket_id][0],
decoder_size=buckets[bucket_id][1],
batch_size=batch_size,
randomize=True)
_, _, _, step_loss, _ = model.step(sess, encoder_inputs, decoder_inputs, target_weights,
bucket_id, False)
loss += step_loss
perplexity = math.exp(loss)
my_global_step = model.global_step.eval()
logger.info("Self-test: global_step=%d learning_rate=%f perplexity=%f"
% (my_global_step, model.learning_rate.eval(), perplexity))
def main():
parser = argparse.ArgumentParser(description='RNN Encoder-Decoder')
parser.add_argument("--input", help="Input file for training, testing, or decoding")
parser.add_argument("--model_dir", help="Model directory", default=None)
parser.add_argument("--learning_rate", help="Learning rate.", type=float, default=0.5)
parser.add_argument("--learning_rate_decay_factor", help="Learning rate decays by this much.", type=float, default=0.99)
parser.add_argument("--max_gradient_norm", help="Clip gradients to this norm.", type=float, default=5.0)
parser.add_argument("--batch_size", help="Batch size to use during training.", type=int, default=64)
parser.add_argument("--size", help="Size of each model layer.", type=int, default=1024)
parser.add_argument("--num_layers", help="Number of layers in the model.", type=int, default=3)
parser.add_argument("--source_vocab_size", help="Source vocabulary size.", type=int, default=40000)
parser.add_argument("--target_vocab_size", help="Target vocabulary size.", type=int, default=40000)
parser.add_argument("--dev_train_split", help="Fraction of examples for dev/validation set", type=float, default=0.1)
parser.add_argument("--max_train_data_size", help="Limit on the size of training data (0: no limit).", type=int, default=0)
parser.add_argument("--steps_per_checkpoint", help="How many training steps to do per checkpoint.", type=int, default=200)
parser.add_argument("--checkpoints", help="How many checkpoints to run", type=int, default=0)
parser.add_argument("--max_global_step", help="Max number of global steps", type=int, default=0)
parser.add_argument("--decode", help="Run decoding on input file if this is set to True.", default=False, action='store_true')
parser.add_argument("--debug", help="Debug mode", default=False, action='store_true')
parser.add_argument("--self_test", help="Run a self-test if this is set to True.", default=False, action='store_true')
args = parser.parse_args()
logger.setLevel(logging.INFO)
if args.debug:
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
ch = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.info("Command line: %s" % " ".join(sys.argv))
logger.info(args)
if args.self_test:
logger.info("START SELF-TEST")
self_test()
elif args.decode:
if not os.path.exists(args.model_dir):
raise ValueError("Model directory %s does not exist.", args.model_dir)
if not os.path.isdir(args.model_dir):
raise ValueError("Model directory %s is not a directory.", args.model_dir)
if not os.path.exists(args.input) or not os.path.isfile(args.input):
raise ValueError("Input file %s does not exist.", args.input)
logger.info("START DECODE")
sentences = []
with open(args.input, "r") as f:
for line in f:
sentence = line.strip()
if len(sentence)>0:
sentences.append(sentence)
decode(sentences=sentences,
model_dir=args.model_dir,
source_vocab_size=args.source_vocab_size,
target_vocab_size=args.target_vocab_size,
buckets=_BUCKETS,
size=args.size,
num_layers=args.num_layers,
max_gradient_norm=args.max_gradient_norm,
batch_size=args.batch_size,
learning_rate=args.learning_rate,
learning_rate_decay_factor=args.learning_rate_decay_factor,
use_lstm=False,
num_samples=512)
else:
if not os.path.exists(args.model_dir):
raise ValueError("Model directory %s does not exist.", args.model_dir)
if not os.path.isdir(args.model_dir):
raise ValueError("Model directory %s is not a directory.", args.model_dir)
if not os.path.exists(args.input) or not os.path.isfile(args.input):
raise ValueError("Input file %s does not exist.", args.input)
logger.info("START TRAINING")
train(input_file=args.input,
model_dir=args.model_dir,
max_train_data_size=args.max_train_data_size,
dev_train_split=args.dev_train_split,
steps_per_checkpoint=args.steps_per_checkpoint,
source_vocab_size=args.source_vocab_size,
target_vocab_size=args.target_vocab_size,
buckets=_BUCKETS,
size=args.size,
num_layers=args.num_layers,
max_gradient_norm=args.max_gradient_norm,
batch_size=args.batch_size,
learning_rate=args.learning_rate,
learning_rate_decay_factor=args.learning_rate_decay_factor,
use_lstm=False,
num_samples=512,
checkpoints=args.checkpoints,
max_global_step=args.max_global_step)
logger.info("DONE")
if __name__ == "__main__":
main()
|
[
"jim@queernet.org"
] |
jim@queernet.org
|
2ea0f1154fb758ae1af3e0672ef29d482d65786e
|
54aebb608c183c338513e8311d04c5e743f131cf
|
/flask_tech_env/bin/alembic
|
64c175c88e11b545422f195672ecc5778eecd5e5
|
[] |
no_license
|
kishoresvk21/tech_support
|
f7ee1d23eb80eae2e5215c9c122e4f5b07394509
|
f749852ad361d40fb5d6e82a3f0df1b98ee70ea2
|
refs/heads/master
| 2023-08-27T03:02:22.675670
| 2021-11-02T04:48:52
| 2021-11-02T04:48:52
| 416,664,375
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 275
|
#!/home/krishnakishore/Documents/projects/tech_support/flask_tech_env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from alembic.config import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"svkrishnakishore2000@gmail.com"
] |
svkrishnakishore2000@gmail.com
|
|
bc486ed35369c02c5c3259c17869ec0db5d761ee
|
4ba0b403637e7aa3e18c9bafae32034e3c394fe4
|
/zeroMQ/python/sync-pub-sub/pub.py
|
41eae3b6eb6ac82650c18151caf3bfe41f6c5250
|
[] |
no_license
|
ASMlover/study
|
3767868ddae63ac996e91b73700d40595dd1450f
|
1331c8861fcefbef2813a2bdd1ee09c1f1ee46d6
|
refs/heads/master
| 2023-09-06T06:45:45.596981
| 2023-09-01T08:19:49
| 2023-09-01T08:19:49
| 7,519,677
| 23
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,929
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import zmq
import sys
if __name__ == '__main__':
if (len(sys.argv) < 3):
print 'arguments error ...'
exit()
ctx = zmq.Context()
pub = ctx.socket(zmq.PUB)
pub.bind('tcp://*:5555')
sync = ctx.socket(zmq.REP)
sync.bind('tcp://*:6666')
print 'publish init success ...'
for i in range(int(sys.argv[1])):
msg = sync.recv()
sync.send('')
print 'broadcasting messages ...'
for i in range(int(sys.argv[2])):
pub.send('publish index [%d]' % (i + 1))
pub.send('END')
sync.close()
pub.close()
|
[
"asmlover@126.com"
] |
asmlover@126.com
|
28bc1f6d1d34c47bdde61822b9f1444460a05e58
|
5f64d91dc45e58c8e73a52985c6db45d340d09cc
|
/Pibow_Zero_W/vertical_striper_14.py
|
ab0183f9778b298a7c2fb809e80bb040a7805f52
|
[] |
no_license
|
Breakfast-for-Pigeons/Unicorn-PHAT
|
e0343eb9a46c4b7be11d5028be07ea6b0f071efd
|
6e70302eac995cd11821ecf2ee363a1b926df2ce
|
refs/heads/master
| 2023-01-01T18:05:27.436081
| 2020-10-23T22:18:13
| 2020-10-23T22:18:13
| 74,320,010
| 5
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,040
|
py
|
#!/usr/bin/python3
"""
Vertical Striper 14 - Pibow Zero W
With the Raspberry Pi oriented with the GPIO pins at the top, this
program stripes from left to right and alternates between from the
bottom to the top and from top to bottom.
This is exactly the same as Vertical Striper 6 except the color order
is reversed.
....................
Functions:
- vertical_striper_14: Gets x and y coordinates and sends them to the
striping function
....................
Author: Paul Ryan
This program was written on a Raspberry Pi using the Geany IDE.
"""
########################################################################
# Import modules #
########################################################################
from bfp_unicornphat import print_header
from bfp_unicornphat import stop
from bfp_unicornphat import stripe_vertically_reverse_alt
########################################################################
# Import variables #
########################################################################
from bfp_unicornphat import X_COORDINATES
from bfp_unicornphat import Y_COORDINATES
########################################################################
# Functions #
########################################################################
def vertical_striper_14():
"""
Sends x and y coordinates to the striper function
"""
x_coordinate_list = X_COORDINATES[::-1]
y_coordinate_list = Y_COORDINATES
stripe_vertically_reverse_alt(x_coordinate_list, y_coordinate_list)
if __name__ == '__main__':
try:
# STEP01: Print header
print_header()
# STEP02: Print instructions in white text
print("\033[1;37;40mPress Ctrl-C to stop the program.")
# STEP03:
vertical_striper_14()
# STEP04: Exit the program.
stop()
except KeyboardInterrupt:
stop()
|
[
"noreply@github.com"
] |
Breakfast-for-Pigeons.noreply@github.com
|
b9ff1e4bf3383d887ebca92a18d0c8ce3b6ba9eb
|
39d89c1f7e3f3ce9ed4070a39f1def94d244ef76
|
/searchface.py
|
375bb9f5877c3209934f52a2e1d4d29ab5c4b578
|
[] |
no_license
|
c-mike-chang/face-rec
|
65c9e34c4d41711a83f0de8be172fa58518867af
|
f2afab9f81d3f66d1d153fa01a03e3622b505ad8
|
refs/heads/main
| 2022-12-26T16:00:26.762679
| 2020-10-10T18:34:19
| 2020-10-10T18:34:19
| 302,847,097
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 871
|
py
|
import boto3
import io
from PIL import Image
rekognition = boto3.client('rekognition', region_name='us-east-1')
dynamodb = boto3.client('dynamodb', region_name='us-east-1')
image = Image.open("image10.jpeg")
stream = io.BytesIO()
image.save(stream,format="JPEG")
image_binary = stream.getvalue()
response = rekognition.search_faces_by_image(
CollectionId='family_collection',
Image={'Bytes':image_binary}
)
for match in response['FaceMatches']:
print (match['Face']['FaceId'],match['Face']['Confidence'])
face = dynamodb.get_item(
TableName='family_collection',
Key={'RekognitionId': {'S': match['Face']['FaceId']}}
)
if 'Item' in face:
print (face['Item']['FullName']['S'])
else:
print ('no match found in person lookup')
|
[
"cmc8dm@virginia.edu"
] |
cmc8dm@virginia.edu
|
f02b7cb23762c15d63c32160f3fad6ddf0bb0aa3
|
22b62594357e0d62bdcf81e39fa52ef15a6b8cdc
|
/sketchbook/exceptions.py
|
3c345f28539ff48b5fa70da824cfbfdfd09d89f9
|
[
"Apache-2.0"
] |
permissive
|
futursolo/sketchbook
|
0d1386c4f8ee997e5838b730b4d4bb8994429a2d
|
53ac4457e11fb0a47b0bed01bf1409d48303b8fe
|
refs/heads/master
| 2022-07-28T17:04:23.425472
| 2022-04-04T05:17:06
| 2022-04-04T05:17:06
| 83,974,013
| 7
| 1
|
Apache-2.0
| 2022-07-11T03:08:50
| 2017-03-05T13:57:34
|
Python
|
UTF-8
|
Python
| false
| false
| 1,613
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2021 Kaede Hoshikawa
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"SketchbookException",
"SketchNotFoundError",
"SketchSyntaxError",
"UnknownStatementError",
"BlockNameConflictError",
"SketchDrawingError",
]
class SketchbookException(Exception):
"""
Base class of exceptions from Sketchbook.
"""
pass
class SketchNotFoundError(FileNotFoundError, SketchbookException):
"""
Error when trying to load a sketch but the finder cannot find it.
"""
pass
class SketchSyntaxError(SyntaxError, SketchbookException):
"""
Syntax error in the current sketch.
"""
pass
class UnknownStatementError(SketchSyntaxError):
"""
The statement string is not a valid statement.
"""
pass
class BlockNameConflictError(SketchbookException):
"""
There's more than one block with the same name in one sketch.
"""
pass
class SketchDrawingError(SketchbookException):
"""
Error when drawing the sketch.
"""
pass
|
[
"futursolo@icloud.com"
] |
futursolo@icloud.com
|
85e6d219fcd2e07292d87d9383b72a6b579b876f
|
e1cdadaab73971f8af5461c3355667515062141d
|
/weatherAI.py
|
96b597b8ed624f1598a025d5eb231247f6d00bde
|
[] |
no_license
|
jshubh19/pythonapps
|
34fc5fd1531231bc690f90800c0ca3f5e80a666f
|
e4a98bd407af60426e9555977c4dd3b924220765
|
refs/heads/master
| 2020-04-27T02:45:33.445699
| 2019-03-06T07:43:08
| 2019-03-06T07:43:08
| 174,004,359
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,245
|
py
|
import requests
from meya import Component
API_URL = (
"http://api.openweathermap.org/data/2.5/weather"
"?q={city},{country}&APPID={api_key}"
)
API_KEY = '0d3efb33fc57a68d3d90224751ee224d'
def farenheit(celsius):
return 9.0/5.0 * celsius + 32
class Weather(Component):
def start(self):
city = self.db.flow.get('city') or \
self.properties.get('city') or "New York"
country = self.db.flow.get('country') or \
self.properties.get('country') or "US"
url = API_URL.format(city=city, country=country, api_key=API_KEY)
data = requests.get(url).json()
temp = int(data['main']['temp'] - 273.15)
description = data['weather'][0]['description']
if country == "US":
units = "F"
temp = farenheit(temp)
else:
units = "C"
text = ( 'It is currently {temp}{units} with {description} in {city}! :D').format(
temp=temp,
units=units,
description=description,
city=city,
country=country
)
message = self.create_message(text=text)
return self.respond(message=message, action="next")
|
[
"shubhj4019@gmail.com"
] |
shubhj4019@gmail.com
|
3f3b2d5958003f9a4488591b8d8517ff7ad800b1
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-5/caf658b420345bcc905cfcdabcf64028eb692bcb-<process_instance>-fix.py
|
f3de19b7ae69fef0761e4e1d86014089c4e91df7
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133
| 2021-10-23T14:11:22
| 2021-10-23T14:11:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,851
|
py
|
def process_instance(self, instance, instance_type='virtual'):
'Populate the inventory dictionary with any instance information'
if (('status' in instance) and (instance['status']['name'] != 'Active')):
return
if (('powerState' in instance) and (instance['powerState']['name'] != 'Running')):
return
if (('hardwareStatusId' in instance) and (instance['hardwareStatusId'] != 5)):
return
if ('primaryIpAddress' not in instance):
return
instance['userData'] = (instance['userData'][0]['value'] if instance['userData'] else '')
dest = instance['primaryIpAddress']
instance['tags'] = list()
for tag in instance['tagReferences']:
instance['tags'].append(tag['tag']['name'])
del instance['tagReferences']
self.inventory['_meta']['hostvars'][dest] = instance
if ('maxMemory' in instance):
self.push(self.inventory, self.to_safe(('memory_' + str(instance['maxMemory']))), dest)
elif ('memoryCapacity' in instance):
self.push(self.inventory, self.to_safe(('memory_' + str(instance['memoryCapacity']))), dest)
if ('maxCpu' in instance):
self.push(self.inventory, self.to_safe(('cpu_' + str(instance['maxCpu']))), dest)
elif ('processorPhysicalCoreAmount' in instance):
self.push(self.inventory, self.to_safe(('cpu_' + str(instance['processorPhysicalCoreAmount']))), dest)
self.push(self.inventory, self.to_safe(('datacenter_' + instance['datacenter']['name'])), dest)
self.push(self.inventory, self.to_safe(instance['hostname']), dest)
self.push(self.inventory, self.to_safe(instance['fullyQualifiedDomainName']), dest)
self.push(self.inventory, self.to_safe(instance['domain']), dest)
self.push(self.inventory, instance_type, dest)
for tag in instance['tags']:
self.push(self.inventory, tag, dest)
|
[
"dg1732004@smail.nju.edu.cn"
] |
dg1732004@smail.nju.edu.cn
|
de5e6f99a765185eb9c7fa0e0a349f68f86ecd10
|
b85809734f170855a6c95b13bb8374634c5e5b6f
|
/ex13.py
|
0986b002e060e2b0816af347481d548b8d57c798
|
[] |
no_license
|
JoseMorales7/LearnPython
|
658c0be1c26e53e6961c40929adc37eb9af4391a
|
3dcf2aadbe456a86a2fe68b533c77b6d732e115f
|
refs/heads/master
| 2021-12-02T23:52:33.058041
| 2021-11-25T19:51:57
| 2021-11-25T19:51:57
| 164,707,380
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 224
|
py
|
from sys import argv
script, first, second, third = argv
print("The script is called: ", script)
print("Your first varibale is: ", first)
print("Your second varibale is: ", second)
print("Your third varibale is: ", third)
|
[
"student.133716@worc.k12.ma.us"
] |
student.133716@worc.k12.ma.us
|
2e262723d1ef86ce4ea87fb289967b60a5dbc6ce
|
98bb92e5a9886e9d7130392039e3ce7304f78882
|
/python/python_0513/python04_09_strfun04_우상민.py
|
2105ded171f8996442ecbb529832eeb583a9ca50
|
[] |
no_license
|
woo9599/DataScience
|
36e6982699b1897f6dbf87da316fa407473b7c2c
|
55bd26dcf6d63264df0739f456c87d1ba4180887
|
refs/heads/main
| 2023-06-18T12:37:23.453556
| 2021-07-13T03:22:42
| 2021-07-13T03:22:42
| 378,833,170
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 347
|
py
|
#python04_09_strfun04_우상민.py
# 왼쪽/ 오른쪽/ 양쪽 공백 지우기(lstrip/rstrip/strip)
a = 'hi'
print(a)
print(a.lstrip()+ 'chk')
print(a.rstrip() + 'chk')
print(a.strip() + 'chk')
print('-' *15)
#문자열 바꾸기(replace)
a = ' Life is too short'
print(a)
cng =a.replace('Life','your leg')
print(cng)
print('-'*15)
|
[
"noreply@github.com"
] |
woo9599.noreply@github.com
|
c8a302c1d318255295e6e3049c9e0679e59f2192
|
bddbc04172680558b969aaa40b3defc1d99b8cfb
|
/probegin_test/forms.py
|
848a82b4ee831c55bac7cce851a1663b9d8e5c0b
|
[] |
no_license
|
psvprogrammer/probegin_test
|
19509a6a112ffcde8bca3767598a6e47a2c80a8b
|
8078bd6a5862e911505fbaa7e07abd9d39f637e8
|
refs/heads/master
| 2021-04-09T12:57:19.352481
| 2018-03-13T13:01:10
| 2018-03-13T13:01:10
| 125,051,097
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,220
|
py
|
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import (
AuthenticationForm,
UserCreationForm,
)
from django.forms import ModelForm
from probegin_test.models import Comment
User = get_user_model()
class CustomAuthenticationForm(AuthenticationForm):
"""Authentication form which uses bootstrap CSS."""
username = forms.CharField(max_length=254,
widget=forms.TextInput({
'class': 'form-control',
'placeholder': 'login or email'}))
password = forms.CharField(label='Your password',
widget=forms.PasswordInput({
'class': 'form-control',
'placeholder': 'password'}))
class SignUpForm(UserCreationForm):
email = forms.EmailField(max_length=254, required=True,
help_text='* Inform a valid email address.')
class Meta:
model = User
fields = ('username', 'email', 'password1', 'password2',)
class CommentForm(ModelForm):
class Meta:
model = Comment
fields = ['content']
|
[
"psv.programmer@gmail.com"
] |
psv.programmer@gmail.com
|
4ee2d73cccbb589fb599a4826754d0a334de4816
|
ce29d99c84fc7a54d7eed578d9360dc266b01ff9
|
/784/a.py
|
157d9d6fe7a40b448287565690053d7692b6d21a
|
[] |
no_license
|
mruxim/codeforces
|
a3f8d28029a77ebc247d939adedc1d547bab0225
|
5a214b19a0d09c9152ca5b2ef9ff7e5dcc692c50
|
refs/heads/master
| 2021-10-26T04:33:42.553698
| 2019-04-10T12:58:21
| 2019-04-10T12:58:21
| 180,573,952
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 28
|
py
|
a = int(input())
print(a*9)
|
[
"maleki.mr@gmail.com"
] |
maleki.mr@gmail.com
|
597ebedfb0dc1f5a70054ad75b068b3ea7c20b01
|
51eccd7327d3cf8163140bbec66b319d005c4037
|
/ShellApp/models.py
|
eb7c76bd087d986fb9aec0ea1dc74ba2f691051c
|
[] |
no_license
|
SatinderKainth/Book1
|
ede27f4a987b0e4f7119af0729fc612aa4076183
|
a661e8416d0dd45d84d377011f92d2f1804548e2
|
refs/heads/master
| 2023-05-04T02:01:04.061329
| 2021-05-22T05:10:42
| 2021-05-22T05:10:42
| 369,720,803
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 778
|
py
|
from django.db import models
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=100)
desc = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return f"<Book:{self.title} ({self.desc})>"
class Author(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
notes = models.TextField(default="")
books = models.ManyToManyField("Book", related_name= "authors")
created_at = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
def __str__(self):
return f"<Author : {self.first_name}({self.last_name})>"
|
[
"kainthsatinder8@gmail.com"
] |
kainthsatinder8@gmail.com
|
e2d5ad39f9186b6e9594e5a5a99f7959d0933fe2
|
d0a31c5b74c6e3f83807917789bafde791a2c842
|
/pyfiles/app.py
|
b557db5394f97710f5441f4d083d52c3a9682417
|
[] |
no_license
|
tabzhangjx/record_codes
|
ad531307c692fec387c7b8daf9dc0ba34b407cd3
|
9c3935bc219f1a22fa97a3435f9a007d18959d3b
|
refs/heads/master
| 2021-06-12T21:01:44.083974
| 2019-09-20T09:26:43
| 2019-09-20T09:26:43
| 100,364,285
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 721
|
py
|
from flask import Flask
from flask import request, render_template
app = Flask(__name__)
@app.route('/', methods=['GET', 'post'])
def home():
return render_template('home.html')
@app.route('/signin', methods=['GET'])
def signin_form():
return render_template('form.html')
@app.route('/signin', methods=['POST'])
def signin():
# 需要从request对象读取表单内容:
username = request.form['username']
password = request.form['password']
if username=='admin' and password=='password':
return render_template('signin-ok.html',username=username)
return render_template('form.html',message='bad username or password',username=username)
if __name__ == '__main__':
app.run()
|
[
"tabzhangjx@outlook.com"
] |
tabzhangjx@outlook.com
|
a9f9793b9dcbf03f3edb9d326dcfe41163fdb65e
|
894b290b4f4f47b5eb523c23efd7bd6110d91b2f
|
/75_dianping_yaodian/dianping_yaodian/dianping_yaodian/settings.py
|
552e6f2e5023699ab5d0103a55d0f6c83b159cbe
|
[] |
no_license
|
wliustc/SpiderS
|
6650c00616d11239de8c045828bafdc5a299b1ce
|
441f309c50d28c1a3917bed19321cd5cbe7c2861
|
refs/heads/master
| 2020-03-27T06:15:39.495785
| 2018-06-14T07:55:44
| 2018-06-14T07:55:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,008
|
py
|
# -*- coding: utf-8 -*-
BOT_NAME='dianping_yaodian'
SPIDER_MODULES=['dianping_yaodian.spiders']
NEWSPIDER_MODULE='dianping_yaodian.spiders'
DOWNLOAD_HANDLERS={'s3': None}
USER_AGENT="Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.87 Safari/537.36"
MAIL_FROM="spider_man_warn@126.com"
ITEM_PIPELINES= {
'dianping_yaodian.pipelines_global.WriteFilePipeline': 300,
}
LOG_LEVEL='WARNING'
REDIS_HOST="10.15.1.11"
REDIS_PORT="6379"
CORE_METRICS_INTERVAL=5
DEFAULT_REQUEST_HEADERS={'Connection':'keep-alive','Cache-Control':'max-age=0','Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8','Upgrade-Insecure-Requests':'1','Accept-Encoding':'gzip, deflate, sdch','Accept-Language':'zh-CN,zh;q=0.8',"user-agent": USER_AGENT}
MAIL_USER="spider_man_warn@126.com"
MAX_FILESIZE='500'
MAIL_PASS="dev123"
HDFS_MODULE="hdfs"
EXTENSIONS= {
'dianping_yaodian.stats_collector_global.PrintCoreMetrics': 500,
'dianping_yaodian.stats_mail_global.StatsMailer': 505,
}
SAVE_PATH='/home/work/backup/spiders_platform/data'
MAIL_HOST="smtp.126.com"
MAIL_PORT="25"
HDFS_IP="10.15.1.11"
USER_AGENTS=[
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)",
"Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
"Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
"Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
"Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5",
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52",
]
|
[
"luoshao23@gmail.com"
] |
luoshao23@gmail.com
|
8a3a140bdde0f32a1ce279a856bd639481d0d97b
|
894b290b4f4f47b5eb523c23efd7bd6110d91b2f
|
/121_taobao/taobao/taobao/scripts/tranform.py
|
99dfe77776a5cd9797207afa9a1f3c83e66fec7f
|
[] |
no_license
|
wliustc/SpiderS
|
6650c00616d11239de8c045828bafdc5a299b1ce
|
441f309c50d28c1a3917bed19321cd5cbe7c2861
|
refs/heads/master
| 2020-03-27T06:15:39.495785
| 2018-06-14T07:55:44
| 2018-06-14T07:55:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,279
|
py
|
# coding=utf8
import sys
import json
import web
db = web.database(dbn='mysql', db='hillinsight', user='writer', pw='hh$writer', port=3306, host='10.15.1.24')
def parse1(line):
# print line
line_json = json.loads(line)
# print line_json
comment_list = line_json.get('comment_list')
title = line_json.get('title')
price = line_json.get('price')
sale_num = line_json.get('sale_num')
comment_num = line_json.get('comment_num')
comment_url = line_json.get('comment_url')
detail_url = line_json.get('detail_url')
for comment in comment_list:
item = {}
displayUserNick = comment.get('displayUserNick')
rateContent = comment.get('rateContent')
rateDate = comment.get('rateDate')
item['title'] = title
item['price'] = price.strip()
item['sale_num'] = sale_num
item['comment_num'] = comment_num
item['comment_url'] = comment_url
item['detail_url'] = detail_url
item['displayUserNick'] = displayUserNick
item['rateContent'] = rateContent
item['rateDate'] = rateDate
print item
def parse(line):
json_line = json.loads(line)
db.insert('t_spider_taobao_xiaoguancha',**json_line)
for line in sys.stdin:
parse(line)
|
[
"luoshao23@gmail.com"
] |
luoshao23@gmail.com
|
a5373dfe1ebe90039ddc6406c46857379a2f007c
|
add08cbeb081563555e11a25f6fc53541934ff86
|
/python-for-pentesters/9_macspoof.py
|
e2b43b546b1f89b8c724b9f115747cf3b5e587a4
|
[] |
no_license
|
olorin28/scratch-python
|
394949da3a1594163b9dcf03220b43bcc776ef12
|
a2f1813bb3fd74de2148678dd81a0907f53276c0
|
refs/heads/master
| 2023-07-10T09:12:01.267913
| 2021-08-13T17:59:19
| 2021-08-13T17:59:19
| 395,667,928
| 0
| 0
| null | 2021-08-13T17:59:20
| 2021-08-13T13:37:47
|
Python
|
UTF-8
|
Python
| false
| false
| 648
|
py
|
import random
import os
import subprocess
def get_rand():
return random.choice("abcdef0123456789")
def new_mac():
new_ = ""
for i in range(0,5):
new_ += get_rand() + get_rand() + ":"
new_ += get_rand() + get_rand()
return new_
print(os.system("ifconfig en0 | grep ether | grep -oE [0-9abcdef:]{17}"))
# subprocess.call(["sudo","ifconfig","eth0","down"])
new_m = new_mac()
# subprocess.call(["sudo","ifconfig","eth0","hw","ether","%s"%new_m])
# subprocess.call(["sudo","ifconfig","eth0","up"])
# print(os.system("ifconfig eth0 | grep ether | grep -oE [0-9abcdef:]{17}"))
print(new_m)
|
[
"kyle.cossel@healthmine.com"
] |
kyle.cossel@healthmine.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.