text
stringlengths 8
6.05M
|
|---|
# created by Ryan Spies
# 3/2/2015
# Python 2.7
# Description: parse through a individual data files from IEM website
# (e.g. hourly ASOS) and generate formatted cardfile. Also creates a summary csv file
# with calculated valid data points and percent of total. Used to display in arcmap
# datacard format: http://www.nws.noaa.gov/oh/hrl/nwsrfs/users_manual/part7/_pdf/72datacard.pdf
import os
import datetime as dt
from datetime import datetime
from dateutil.relativedelta import relativedelta
import dateutil
import numpy as np
import glob
maindir = os.getcwd()
workingdir = maindir[:-16] + 'Calibration_NWS'+ os.sep +'APRFC_FY2015'+ os.sep +'raw_data'
################### user input #########################
variable = 'ptpx' # choices: 'ptpx' or 'temp'
timestep = 'hourly' # choices: 'hourly' or 'daily'
state = 'AK'
data_files = workingdir + os.sep + 'asos_' + timestep +os.sep
out_dir = workingdir + os.sep + 'asos_' + timestep +os.sep + 'cardfiles_' + variable + os.sep
########################################################
summary_file = open(workingdir + os.sep + 'asos_summary_' + variable + '_' + timestep + '.csv','w')
summary_file.write('NAME,SITE_ID,LAT,LON,ELEV,MISSING_DATA,VALID_DATA,YEARS_VALID,PCT_AVAIL\n')
if variable == 'temp':
data_type = variable.upper(); dim = 'D'; unit = 'F'; inter = '1'; ext = '.tpt'
if variable == 'ptpx':
data_type = variable.upper(); dim = 'L'; unit = 'IN'; inter = '1'; ext = '.ptp'
if timestep == 'hourly':
year_factor = float(24*365)
if timestep == 'daily':
year_factor = float(365)
# loop through data files
for data_file in glob.glob(data_files+'/*.txt'):
print os.path.basename(data_file)
name = os.path.basename(data_file)[5:-4] # get the actual file name from the path
read_data = open(data_file,'r')
count_all = 0; count_missing = 0
site_data = {}
print 'Parsing raw data file...'
for each in read_data:
if each[:1] != '#' and each[:7] != 'station':
line = each.split(',')
site_id = line[0]; lon = line[2]; lat = line[3]; elev=''
if variable == 'temp':
data = line[4]
if variable == 'ptpx':
data = line[5]
date_time = dateutil.parser.parse(line[1])
changemin = date_time.minute
# round sub-hourly data points up to nearest hour
if int(changemin) != 0:
changet = dt.timedelta(minutes=(60-int(changemin)))
round_dt = date_time + changet
else:
round_dt = date_time
if str(data) != 'M' and str(data) != 'M\n': # ignore missing data -> filled in below (-999)
if variable == 'ptpx' and float(data) < 12.0 and float(data) >= 0.0: # QA/QC remove unrealistic precip values
if round_dt in site_data:
site_data[round_dt].append(float(data))
else:
site_data[round_dt] = [float(data)]
if variable == 'temp':
if round_dt in site_data:
site_data[round_dt].append(float(data))
else:
site_data[round_dt] = [float(data)]
read_data.close()
min_date = min(site_data); max_date = max(site_data); iter_date = min_date
# need to be sure that the first data point starts on day 1 hour 1
if iter_date.day != 1 or iter_date.hour != 1:
iter_date = iter_date + relativedelta(months=+1)
iter_date = dt.datetime(iter_date.year,iter_date.month,1,1,0)
min_date = iter_date
month_count = 0; previous_month = 13 # use these for calculating line number for month/year lines
if timestep == 'hourly':
site_label = state + '-' + site_id + '-HLY'
print 'Writing data to cardfile...'
cardfile = open(out_dir + site_label + '.' + str(min_date.month) + str(min_date.year) + '.' + str(max_date.month) + str(max_date.year) + ext,'wb')
###### header info ######
cardfile.write('$ Data downloaded from Iowa Environmental Mesonet (ASOS/AWOS)\n')
cardfile.write('$ Data processed from hourly/sub-hourly text files\n')
cardfile.write('$ Ryan Spies ryan.spies@amecfw.com\n')
cardfile.write('$ Data Generated: ' + str(datetime.now())[:19] + '\n')
cardfile.write('$ Symbol for missing data = -999\n')
cardfile.write('{:12s} {:4s} {:4s} {:4s} {:2d} {:12s} {:12s}'.format('datacard', variable.upper(), dim,unit,int(inter),site_label,name))
cardfile.write('\n')
cardfile.write('{:2d} {:4d} {:2d} {:4d} {:2d} {:8s}'.format(int(min_date.month), int(min_date.year), int(max_date.month),int(max_date.year),1,'F9.2'))
cardfile.write('\n')
###### write formatted data #####
valid_count = 0; miss_count = 0
while iter_date <= max_date:
if int(iter_date.month) == previous_month:
month_count += 1
else:
month_count = 1
if iter_date in site_data:
valid_count += 1
if variable == 'ptpx':
out_data = max(site_data[iter_date]) # sub-hourly precip accumulates up to end of hour???
if variable == 'temp':
out_data = np.mean(site_data[iter_date])
else:
out_data = -999
miss_count += 1
cardfile.write('{:12s}{:2d}{:02d}{:4d}{:9.2f}'.format(site_label,int(iter_date.month),int(str(iter_date.year)[-2:]),month_count,float(out_data)))
cardfile.write('\n')
previous_month = int(iter_date.month)
iter_date = iter_date + dt.timedelta(hours=1)
cardfile.close()
summary_file.write(str(name)+','+str(site_id)+','+str(lat)+','+str(lon)+','+str(elev)+','+str(miss_count)+','+str(valid_count)+','+str(round((valid_count/year_factor),2))+','+str((float(valid_count)/(miss_count+valid_count))*100)+'\n')
summary_file.close()
print 'Completed!'
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'advanced.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.setWindowModality(QtCore.Qt.ApplicationModal)
Dialog.setEnabled(True)
Dialog.resize(304, 650)
Dialog.setToolTip("")
Dialog.setModal(False)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.label_login = QtWidgets.QLabel(Dialog)
self.label_login.setObjectName("label_login")
self.verticalLayout.addWidget(self.label_login)
self.frame_login = QtWidgets.QFrame(Dialog)
self.frame_login.setFrameShape(QtWidgets.QFrame.Panel)
self.frame_login.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_login.setObjectName("frame_login")
self.formLayout = QtWidgets.QFormLayout(self.frame_login)
self.formLayout.setObjectName("formLayout")
self.login_label_id = QtWidgets.QLabel(self.frame_login)
self.login_label_id.setObjectName("login_label_id")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.login_label_id)
self.login_lineEdit_id = QtWidgets.QLineEdit(self.frame_login)
self.login_lineEdit_id.setObjectName("login_lineEdit_id")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.login_lineEdit_id)
self.login_label_key = QtWidgets.QLabel(self.frame_login)
self.login_label_key.setObjectName("login_label_key")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.login_label_key)
self.login_lineEdit_key = QtWidgets.QLineEdit(self.frame_login)
self.login_lineEdit_key.setObjectName("login_lineEdit_key")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.login_lineEdit_key)
self.login_label_secret = QtWidgets.QLabel(self.frame_login)
self.login_label_secret.setObjectName("login_label_secret")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.login_label_secret)
self.login_lineEdit_secret = QtWidgets.QLineEdit(self.frame_login)
self.login_lineEdit_secret.setEchoMode(QtWidgets.QLineEdit.Password)
self.login_lineEdit_secret.setObjectName("login_lineEdit_secret")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.login_lineEdit_secret)
self.label = QtWidgets.QLabel(self.frame_login)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label)
self.comboBox_2 = QtWidgets.QComboBox(self.frame_login)
self.comboBox_2.setEditable(True)
self.comboBox_2.setObjectName("comboBox_2")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.comboBox_2)
self.verticalLayout.addWidget(self.frame_login)
self.label_reg = QtWidgets.QLabel(Dialog)
self.label_reg.setObjectName("label_reg")
self.verticalLayout.addWidget(self.label_reg)
self.frame_reg = QtWidgets.QFrame(Dialog)
self.frame_reg.setFrameShape(QtWidgets.QFrame.Panel)
self.frame_reg.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_reg.setObjectName("frame_reg")
self.gridLayout_3 = QtWidgets.QGridLayout(self.frame_reg)
self.gridLayout_3.setObjectName("gridLayout_3")
self.reg_label_lay = QtWidgets.QLabel(self.frame_reg)
self.reg_label_lay.setObjectName("reg_label_lay")
self.gridLayout_3.addWidget(self.reg_label_lay, 0, 0, 1, 1)
self.reg_timeEdit = QtWidgets.QTimeEdit(self.frame_reg)
self.reg_timeEdit.setCurrentSection(QtWidgets.QDateTimeEdit.SecondSection)
self.reg_timeEdit.setTime(QtCore.QTime(0, 0, 2))
self.reg_timeEdit.setObjectName("reg_timeEdit")
self.gridLayout_3.addWidget(self.reg_timeEdit, 0, 1, 1, 2)
self.reg_label_num = QtWidgets.QLabel(self.frame_reg)
self.reg_label_num.setObjectName("reg_label_num")
self.gridLayout_3.addWidget(self.reg_label_num, 1, 0, 1, 1)
self.reg_comboBox = QtWidgets.QComboBox(self.frame_reg)
self.reg_comboBox.setEditable(True)
self.reg_comboBox.setObjectName("reg_comboBox")
self.reg_comboBox.addItem("")
self.reg_comboBox.addItem("")
self.reg_comboBox.addItem("")
self.reg_comboBox.addItem("")
self.gridLayout_3.addWidget(self.reg_comboBox, 1, 1, 1, 1)
self.reg_pushButton = QtWidgets.QPushButton(self.frame_reg)
self.reg_pushButton.setObjectName("reg_pushButton")
self.gridLayout_3.addWidget(self.reg_pushButton, 1, 2, 1, 1)
self.reg_label_debug = QtWidgets.QLabel(self.frame_reg)
self.reg_label_debug.setObjectName("reg_label_debug")
self.gridLayout_3.addWidget(self.reg_label_debug, 2, 0, 1, 1)
self.comboBox = QtWidgets.QComboBox(self.frame_reg)
self.comboBox.setObjectName("comboBox")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.gridLayout_3.addWidget(self.comboBox, 2, 1, 1, 1)
self.verticalLayout.addWidget(self.frame_reg)
self.label_out = QtWidgets.QLabel(Dialog)
self.label_out.setObjectName("label_out")
self.verticalLayout.addWidget(self.label_out)
self.frame_out = QtWidgets.QFrame(Dialog)
self.frame_out.setFrameShape(QtWidgets.QFrame.Panel)
self.frame_out.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_out.setObjectName("frame_out")
self.gridLayout = QtWidgets.QGridLayout(self.frame_out)
self.gridLayout.setObjectName("gridLayout")
self.out_label_fmt = QtWidgets.QLabel(self.frame_out)
self.out_label_fmt.setObjectName("out_label_fmt")
self.gridLayout.addWidget(self.out_label_fmt, 0, 0, 1, 1)
self.out_label_dir = QtWidgets.QLabel(self.frame_out)
self.out_label_dir.setObjectName("out_label_dir")
self.gridLayout.addWidget(self.out_label_dir, 1, 0, 1, 1)
self.out_lineEdit_dir = QtWidgets.QLineEdit(self.frame_out)
self.out_lineEdit_dir.setObjectName("out_lineEdit_dir")
self.gridLayout.addWidget(self.out_lineEdit_dir, 1, 1, 1, 1)
self.out_lineEdit_title = QtWidgets.QLineEdit(self.frame_out)
self.out_lineEdit_title.setEnabled(True)
self.out_lineEdit_title.setObjectName("out_lineEdit_title")
self.gridLayout.addWidget(self.out_lineEdit_title, 2, 1, 1, 1)
self.out_comboBox = QtWidgets.QComboBox(self.frame_out)
self.out_comboBox.setObjectName("out_comboBox")
self.out_comboBox.addItem("")
self.out_comboBox.addItem("")
self.gridLayout.addWidget(self.out_comboBox, 0, 1, 1, 1)
self.out_label_title = QtWidgets.QLabel(self.frame_out)
self.out_label_title.setToolTip("")
self.out_label_title.setWhatsThis("")
self.out_label_title.setObjectName("out_label_title")
self.gridLayout.addWidget(self.out_label_title, 2, 0, 1, 1)
self.verticalLayout.addWidget(self.frame_out)
self.label_advanced = QtWidgets.QLabel(Dialog)
self.label_advanced.setObjectName("label_advanced")
self.verticalLayout.addWidget(self.label_advanced)
self.frame_advanced = QtWidgets.QFrame(Dialog)
self.frame_advanced.setFrameShape(QtWidgets.QFrame.Panel)
self.frame_advanced.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_advanced.setObjectName("frame_advanced")
self.gridLayout_2 = QtWidgets.QGridLayout(self.frame_advanced)
self.gridLayout_2.setContentsMargins(10, 10, -1, -1)
self.gridLayout_2.setObjectName("gridLayout_2")
self.adv_pushButton_help = QtWidgets.QPushButton(self.frame_advanced)
self.adv_pushButton_help.setObjectName("adv_pushButton_help")
self.gridLayout_2.addWidget(self.adv_pushButton_help, 2, 3, 1, 1)
self.adv_listWidget = QtWidgets.QListWidget(self.frame_advanced)
self.adv_listWidget.setEnabled(True)
self.adv_listWidget.setObjectName("adv_listWidget")
self.gridLayout_2.addWidget(self.adv_listWidget, 4, 0, 1, 4)
self.adv_checkBox = QtWidgets.QCheckBox(self.frame_advanced)
self.adv_checkBox.setText("")
self.adv_checkBox.setObjectName("adv_checkBox")
self.gridLayout_2.addWidget(self.adv_checkBox, 2, 1, 1, 1)
self.adv_label_clean = QtWidgets.QLabel(self.frame_advanced)
self.adv_label_clean.setObjectName("adv_label_clean")
self.gridLayout_2.addWidget(self.adv_label_clean, 2, 0, 1, 1)
self.adv_pushButton_add = QtWidgets.QPushButton(self.frame_advanced)
self.adv_pushButton_add.setObjectName("adv_pushButton_add")
self.gridLayout_2.addWidget(self.adv_pushButton_add, 2, 2, 1, 1)
self.adv_label_region = QtWidgets.QLabel(self.frame_advanced)
self.adv_label_region.setObjectName("adv_label_region")
self.gridLayout_2.addWidget(self.adv_label_region, 0, 0, 1, 1)
self.adv_lineEdit = QtWidgets.QLineEdit(self.frame_advanced)
self.adv_lineEdit.setObjectName("adv_lineEdit")
self.gridLayout_2.addWidget(self.adv_lineEdit, 0, 2, 1, 2)
self.verticalLayout.addWidget(self.frame_advanced)
self.frame_set = QtWidgets.QFrame(Dialog)
self.frame_set.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_set.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_set.setObjectName("frame_set")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.frame_set)
self.horizontalLayout.setObjectName("horizontalLayout")
self.set_pushButton_reset = QtWidgets.QPushButton(self.frame_set)
self.set_pushButton_reset.setObjectName("set_pushButton_reset")
self.horizontalLayout.addWidget(self.set_pushButton_reset)
self.set_pushButton_apply = QtWidgets.QPushButton(self.frame_set)
self.set_pushButton_apply.setObjectName("set_pushButton_apply")
self.horizontalLayout.addWidget(self.set_pushButton_apply)
self.verticalLayout.addWidget(self.frame_set)
self.retranslateUi(Dialog)
self.set_pushButton_reset.clicked.connect(Dialog.set_resetSlot)
self.set_pushButton_apply.clicked.connect(Dialog.set_applySlot)
self.out_comboBox.currentIndexChanged['int'].connect(Dialog.out_fmtSlot)
self.reg_pushButton.clicked.connect(Dialog.reg_buttonSlot)
self.adv_pushButton_add.clicked.connect(Dialog.adv_buttonAddSlot)
self.adv_pushButton_help.clicked.connect(Dialog.adv_buttonHelpSlot)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "高级"))
self.label_login.setText(_translate("Dialog", "登录"))
self.login_label_id.setText(_translate("Dialog", "账号名"))
self.login_label_key.setText(_translate("Dialog", "账户密码"))
self.login_label_secret.setText(_translate("Dialog", "账户密钥"))
self.label.setText(_translate("Dialog", "用户名"))
self.label_reg.setText(_translate("Dialog", "识别"))
self.reg_label_lay.setText(_translate("Dialog", "识别间隔:"))
self.reg_timeEdit.setDisplayFormat(_translate("Dialog", "s秒"))
self.reg_label_num.setText(_translate("Dialog", "识别数量:"))
self.reg_comboBox.setItemText(0, _translate("Dialog", "-1"))
self.reg_comboBox.setItemText(1, _translate("Dialog", "1"))
self.reg_comboBox.setItemText(2, _translate("Dialog", "2"))
self.reg_comboBox.setItemText(3, _translate("Dialog", "3"))
self.reg_pushButton.setText(_translate("Dialog", "帮助"))
self.reg_label_debug.setText(_translate("Dialog", "识别模式:"))
self.comboBox.setItemText(0, _translate("Dialog", "精准识别"))
self.comboBox.setItemText(1, _translate("Dialog", "通用识别"))
self.comboBox.setItemText(2, _translate("Dialog", "手写识别"))
self.label_out.setText(_translate("Dialog", "导出"))
self.out_label_fmt.setText(_translate("Dialog", "导出格式:"))
self.out_label_dir.setText(_translate("Dialog", "导出目录:"))
self.out_comboBox.setItemText(0, _translate("Dialog", "文本 txt"))
self.out_comboBox.setItemText(1, _translate("Dialog", "Excel xlsx"))
self.out_label_title.setText(_translate("Dialog", "标题设置:"))
self.label_advanced.setText(_translate("Dialog", "高级"))
self.adv_pushButton_help.setText(_translate("Dialog", "帮助"))
self.adv_label_clean.setText(_translate("Dialog", "字段清洗:"))
self.adv_pushButton_add.setText(_translate("Dialog", "添加"))
self.adv_label_region.setText(_translate("Dialog", "识别区域:"))
self.set_pushButton_reset.setText(_translate("Dialog", "重置"))
self.set_pushButton_apply.setText(_translate("Dialog", "应用"))
|
def geometric_sequence_elements(a, r, n):
return ', '.join(str(a * r ** i) for i in xrange(n))
|
from enum import Enum
class RaidBoss(Enum):
ASTROLAB = "astrolab"
SAMURAI = "samurai"
MAD_KING = "mad king"
GUNLORD = "gunlord"
FROSTWING = "frostwing"
TWIN_FACE = "twin face"
|
list = [4, 2, 3,3]
def findIndex():
for i in range(len(list)):
if list[i] == 2:
index = i
return index
print(findIndex())
fo = open("avatar_list.txt", "r")
print(fo.read())
|
from pa.plugin import Plugin
class LNetworkPlugin(Plugin):
__pluginname__ = 'LNetwork'
pass
|
'''
sentinela.py
Copyright 2013 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
'''
import logging
class Sentinela(object):
def __init__(self, rules):
self.rules = rules
def click(self):
for rule_clicker in self.rules[:]:
try:
rule_clicker()
except:
msg = 'The %s rule raised an exception.' % rule_clicker
logging.exception(msg)
self.rules.remove(rule_clicker)
|
import argparse
import json
import os
import re
import sys
from collections import namedtuple
from utilities import constants
class Arguments:
class __Arguments:
def __init__(self):
parser = argparse.ArgumentParser(
description="Collect OSINT for GitLab groups and, optionally, members. Search repository assets for "
"sensitive data.")
required_args = parser.add_mutually_exclusive_group(required=True)
required_args.add_argument('-g', '--group', type=str, action='append',
help="ID or HTML encoded name of a GitLab group. This option, by itself, "
"will display group projects only.")
required_args.add_argument('-p', '--project', type=str, action='append',
help="ID or HTML encoded name of a GitLab project. This option, by itself, "
"will display project details only.")
parser.add_argument('-u', '--url', default='https://gitlab.com',
help="An optional argument to specify the base URL of your GitLab instance. If the "
"argument is not supplied, its defaulted to 'https://gitlab.com'")
parser.add_argument('-m', '--members', action='store_true',
help="Include group members and their personal projects and their related assets in the "
"search for sensitive data.")
parser.add_argument('-s', '--snippets', action='store_true',
help="Searches found projects for GitLab Snippets with sensitive data.")
parser.add_argument('-i', '--issues', action='store_true',
help="Searches found projects for GitLab Issues and discussions/comments with sensitive "
"data.")
parser.add_argument('-r', '--mergerequests', action='store_true',
help="Searches found projects for GitLab Merge Requests and discussions/comments with "
"sensitive data.")
parser.add_argument('-j', '--jobs', action='store_true',
help="Searches each projects' public CI job logs for sensitive data starting with the "
"most recent jobs that either succeeded or failed")
parser.add_argument('-d', '--depth', type=int,
help="Limit the number of requests across ALL targeted assets including group projects")
parser.add_argument('-t', '--timestamp', action='store_true',
help='Disables display of start/finish times and originating IP to the output')
parser.add_argument('-x', '--proxy', type=str, action='store',
help='Proxies all requests using the provided URI matching the scheme: '
'http(s)://user:pass@10.10.10.10:8000')
parser.add_argument('-c', '--cert', type=str, action='store',
help='Used in tandem with -p (--proxy), this switch provides a fully qualified path to a '
'certificate to verify TLS connections. Provide a fully qualified path to the dynamic '
'cert. Example: /Users/<username>/owasp_zap_root_ca.cer.')
parser.add_argument('-l', '--logfile', type=str, action='store',
help='Will APPEND all output to specified file.')
constants.Banner.render()
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
self.parsed_args = parser.parse_args()
if self.parsed_args.proxy and not self.parsed_args.cert:
parser.error('If you specify a proxy address, you must also specify a dynamic certificate in order to '
'decrypt TLS traffic with the --cert switch.')
instance = None
def __init__(self):
if not Arguments.instance:
Arguments.instance = Arguments.__Arguments()
def __getattr__(self, name):
return getattr(self.instance.parsed_args, name)
JobLog = namedtuple('JobLog', 'ident web_url trace')
Issue = namedtuple('Issue', 'ident web_url description')
MergeRequest = namedtuple('MergeRequest', 'ident web_url description')
Comment = namedtuple('Comment', 'comment_type parent_url comment_body')
Secret = namedtuple('Secret', 'secret_type secret url')
class SecretsMonitor:
def __init__(self):
with open(os.path.join(os.path.dirname(__file__), "../regexes.json")) as f:
self.regexes = json.loads(f.read())
self.regex_names = self.__regex_names(self.regexes)
self.master_regex = self.__compile_regexes(self.regexes)
def __regex_names(self, regexes):
""" Returns a dict containing regex names keyed by group
"""
return {self.__group(i): name for i, name in enumerate(regexes)}
def __compile_regexes(self, regexes):
""" Concatenates all regexes into one big, compiled regex.
"""
parts = []
for i, name in enumerate(regexes):
group = self.__group(i)
regex = regexes[name]
parts.append(f'(?P<{group}>{regex})')
return re.compile('|'.join(parts))
def __group(self, i):
return f'group_{i}'
def sniff_secrets(self, content):
if not content:
return []
secrets = []
for web_url, raw_data in content.items():
found_secrets = self.__get_secrets(raw_data)
for secret_type, secret in found_secrets.items():
secrets.append(Secret(secret_type, secret, web_url))
return secrets
def __get_secrets(self, content):
result = {}
if not content:
return result
match = self.master_regex.search(content)
if not match:
return result
for group, value in match.groupdict().items():
if value is None:
continue
name = self.regex_names[group]
result[name] = value
return result
|
import roboclaw.py
|
'''
:Boneh-Lynn-Shacham Identity Based Signature
| From: "D. Boneh, B. Lynn, H. Shacham Short Signatures from the Weil Pairing"
| Published in: Journal of Cryptology 2004
| Available from: http://
| Notes: This is the IBE (2-level HIBE) implementation of the HIBE scheme BB_2.
* type: signature (identity-based)
* setting: bilinear groups (asymmetric)
:Authors: J. Ayo Akinyele
:Date: 1/2011
'''
from charm.toolbox.pairinggroup import PairingGroup,ZR,G1,G2,pair
from charm.core.engine.util import objectToBytes
debug = False
class IBSig():
"""
>>> from charm.toolbox.pairinggroup import PairingGroup
>>> group = PairingGroup('MNT224')
>>> messages = { 'a':"hello world!!!" , 'b':"test message" }
>>> ib = IBSig(group)
>>> (public_key, secret_key) = ib.keygen()
>>> signature = ib.sign(secret_key['x'], messages)
>>> ib.verify(public_key, signature, messages)
True
"""
def __init__(self, groupObj):
global group
group = groupObj
def dump(self, obj):
return objectToBytes(obj, group)
def keygen(self, secparam=None):
g, x = group.random(G2), group.random()
g_x = g ** x
pk = { 'g^x':g_x, 'g':g, 'identity':str(g_x), 'secparam':secparam }
sk = { 'x':x }
return (pk, sk)
def sign(self, x, message):
M = self.dump(message)
if debug: print("Message => '%s'" % M)
return group.hash(M, G1) ** x
def verify(self, pk, sig, message):
M = self.dump(message)
h = group.hash(M, G1)
if pair(sig, pk['g']) == pair(h, pk['g^x']):
return True
return False
def main():
groupObj = PairingGroup('MNT224')
m = { 'a':"hello world!!!" , 'b':"test message" }
bls = IBSig(groupObj)
(pk, sk) = bls.keygen()
sig = bls.sign(sk['x'], m)
if debug: print("Message: '%s'" % m)
if debug: print("Signature: '%s'" % sig)
assert bls.verify(pk, sig, m), "Failure!!!"
if debug: print('SUCCESS!!!')
if __name__ == "__main__":
debug = True
main()
|
import cv2
import numpy as np
# 读入图像并转化为float类型,用于传递给harris函数
filename = './images/test_corner.jpg'
img = cv2.imread(filename)
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray_img = np.float32(gray_img)
# 对图像执行harris
Harris_detector = cv2.cornerHarris(gray_img, 2, 3, 0.04)
# 腐蚀harris结果
dst = cv2.dilate(Harris_detector, None)
# 设置阈值
thres = 0.01 * dst.max()
img[dst > thres] = [255, 0, 0]
cv2.imwrite('corner_detectin_Harris.png', img)
|
from lxml import etree
import re
f = open ('result1.xml', 'r')
xml = f.read ()
f.close ()
root = etree.fromstring (xml)
c = 0
#f = open ('data1_validurl.txt', 'r')
#urls = f.readlines ()
#f.close ()
print "<dblp>"
for article in root.xpath ('*'):
if len (article.xpath ('author/text ()')) == 0:
c = c + 1
continue
p = re.compile (r'(?P<name>.*)\((?P<uname>.*)\).*\((?P<affn>.*)\)')
rauthors = article.xpath ('rauthor')
for rauthor in rauthors:
m = p.search (rauthor.text)
if m:
rauthor.text = m.group ('name') + '(' + m.group ('affn') + ')'
print etree.tostring (article, pretty_print = True)
print "</dblp>"
|
from datetime import datetime
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Subscribe(models.Model):
""" stores the email addresses of the people who subscribed to the teaser. """
email = models.EmailField(_(u'Subscribers'), max_length = 100)
added = models.DateField(_('added'), default=datetime.now)
class Meta:
ordering = ('-added',)
def __unicode__(self):
return "#%d, %s" % (self.id, self.email)
class SubscribeForm(forms.ModelForm):
"""
Subscribe Form : takes the email address for the user who wants to subscribe.
"""
email = forms.EmailField(initial = _("please drop in your email here."),
max_length = 100,
error_messages = {
'required': _(u"amigo, you need to put a real email address in there! :D"),
'invalid': _(u"ahh! we'll actually need a valid e-mail address."),
})
def __init__ (self, *args, **kwargs):
super(SubscribeForm, self).__init__(*args, **kwargs)
self.fields['email'].label = ""
self.fields['email'].required = True
self.fields['email'].help_text = _(u"we won't spam you!")
class Meta:
model = Subscribe
exclude = ('added',)
|
from n3_camera_models_and_augmented_reality import camera
from n4_multiple_view_geometry import sfm
from PIL import Image
from pylab import *
from numpy import *
def example_plot_3d_points():
# plotting 3D points
from mpl_toolkits.mplot3d import axes3d
fig = figure()
ax = fig.gca(projection='3d')
ax.plot(points3D[0],points3D[1],points3D[2],'k.')
def example_compute_and_plot_epipole():
# index for points in first two views
ndx = (corr[:, 0] >= 0) & (corr[:, 1] >= 0)
# get coordinates and make homogeneous
x1 = points2D[0][:, corr[ndx, 0]]
x1 = vstack((x1, ones(x1.shape[1])))
x2 = points2D[1][:, corr[ndx, 1]]
x2 = vstack((x2, ones(x2.shape[1])))
# compute F
F = sfm.compute_fundamental(x1, x2)
# compute the epipole
e = sfm.compute_epipole(F)
# plotting
figure()
imshow(im1)
# plot each line individually, this gives nice colors
for i in range(5):
sfm.plot_epipolar_line(im1, F, x2[:, i], e, False)
axis('off')
figure()
imshow(im2)
# plot each point individually, this gives same colors as the lines
for i in range(5):
plot(x2[0, i], x2[1, i], 'o')
axis('off')
def example_triangulation():
# index for points in first two views
ndx = (corr[:, 0] >= 0) & (corr[:, 1] >= 0)
# get coordinates and make homogeneous
x1 = points2D[0][:, corr[ndx, 0]]
x1 = vstack((x1, ones(x1.shape[1])))
x2 = points2D[1][:, corr[ndx, 1]]
x2 = vstack((x2, ones(x2.shape[1])))
Xtrue = points3D[:, ndx]
Xtrue = vstack((Xtrue, ones(Xtrue.shape[1])))
# check first 3 points
Xest = sfm.triangulate(x1, x2, P[0].P, P[1].P)
print(Xest[:, :3])
print(Xtrue[:, :3])
# plotting
from mpl_toolkits.mplot3d import axes3d
fig = figure()
ax = fig.gca(projection='3d')
ax.plot(Xest[0], Xest[1], Xest[2], 'ko')
ax.plot(Xtrue[0], Xtrue[1], Xtrue[2], 'r.')
axis('equal')
def example_compute_P_from_points():
""" pick out the points that are visible in the first view, compute the camera matrix from them.
and use the estimated P to project the 3d points to see the outcome """
corr1 = corr[:, 0] # view 1
ndx3D = where(corr1 >= 0)[0] # missing values are -1
ndx2D = corr1[ndx3D]
# select visible points and make homogeneous
x = points2D[0][:, ndx2D] # view 1
x = vstack((x, ones(x.shape[1])))
X = points3D[:, ndx3D]
X = vstack((X, ones(X.shape[1])))
# estimate P
Pest = camera.Camera(sfm.compute_P(x, X))
# compare!
print(Pest.P / Pest.P[2, 3])
print(P[0].P / P[0].P[2, 3])
xest = Pest.project(X)
# plotting
figure()
imshow(im1)
plot(x[0], x[1], 'bo')
plot(xest[0], xest[1], 'r.')
axis('off')
# load some images
im1 = array(Image.open('images/001.jpg'))
im2 = array(Image.open('images/002.jpg'))
# load 2D points for each view to a list
points2D = [loadtxt('2D/00'+str(i+1)+'.corners').T for i in range(3)]
# load 3D points
points3D = loadtxt('3D/p3d').T
# load correspondences
corr = genfromtxt('2D/nview-corners',dtype='int',missing_values='*')
# load cameras to a list of Camera objects
P = [camera.Camera(loadtxt('2D/00'+str(i+1)+'.P')) for i in range(3)]
# example_plot_3d_points()
# example_compute_and_plot_epipole()
# example_triangulation()
example_compute_P_from_points()
show()
|
import random as rng
import math
import entities
class Block(object):
def __init__(self, biome=-1):
self.explored = False
self.biome = biome
self.sound = -1
def passable(self):
return not self.collides
def interact(self, player):
return "Boop."
class Null(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (0,0,0)
self.collides = False
self.transparent = False
self.sprite = 0
def __str__(self):
return "nl"
def interact(self,player):
return ""
class Floor(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (200,200,200)
self.collides = False
self.transparent = True
random= rng.random()
if random < 0.05:
self.sprite = rng.choice([37,44,45])
elif random <0.15:
self.sprite = rng.choice([38,39,40,41,42,43])
elif random <0.40:
self.sprite = rng.choice([51,50])
else:
self.sprite = 1
def __str__(self):
return " "
def interact(self,player):
if self.sprite == 37:
return "I hope those aren't human bones."
elif self.sprite == 44:
return "It looks like... moss?"
elif self.sprite == 45:
return "It's a puddle of... probably something gross."
else:
return rng.choice(["An empty space.","Nothing to interact with here.","I wonder why there's tile down here.","The tile is a bit cracked."])
class Stone(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (80,80,80)
self.collides = True
self.transparent = False
self.sprite = 2
def __str__(self):
return "@@"
def interact(self,player):
return rng.choice(["It looks like some kind of sandstone... or maybe ignimbrite?","You lick the rock. It tastes dirty.","The walls here are surprisingly smooth."])
class Brick(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (70,70,70)
self.collides = True
self.transparent = False
self.blank = True
self.density = 0.25
if rng.random() < 0.25:
self.blank = False
self.blankSprites = [29,30,31,32,33,34,35,36]
self.objectSprites = [19,20,21,22,23,24,25,26,27,28]
if self.blank == True:
self.sprite = rng.choice(self.blankSprites)
else:
self.sprite = rng.choice(self.objectSprites)
def __str__(self):
return "##"
def interact(self,player):
if self.sprite == 19:
return "You decide not to think about that."
elif self.sprite == 20:
return "Huh, what purpose would that serve?"
elif self.sprite == 21:
return "There is a crack in the wall. It looks very artistic, don't you think?"
elif self.sprite == 22:
return "There is a crack in the wall."
elif self.sprite == 23:
return "Yikes. It looks like something was trying to break out of here."
elif self.sprite == 24:
return "Umm, is that blood?"
elif self.sprite == 25:
return "Gross."
elif self.sprite == 26:
return "There's a spiderweb on the wall. No spider, though."
elif self.sprite == 27:
return "There's a spiderweb on the wall. No spider, though."
elif self.sprite == 28:
return "A lamp. How nice."
elif self.sprite > 28:
return rng.choice(["These stone bricks are huge!","Clearly man-made; who built this, and why?","The bricks are cold."])
class Door(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (140,140,140)
self.collides = True
self.sprite = 5
self.transparent = False
def __str__(self):
return "/\\"
def passable(self):
return True
def open(self):
self.color = (160,160,160)
self.collides = False
self.sprite = 4
self.transparent = True
def close(self):
self.color = (120,120,120)
self.collides = True
self.sprite = 5
self.transparent = False
def interact(self, player):
self.sound = 2
if self.collides:
self.open()
return rng.choice(["You push the door open.","The door slides into the ground.","The door creaks as it moves out of the way."])
else:
self.close()
return rng.choice(["You close the door behind you.","The door slides out of the ground.","With a great heft, you pull the door up."])
class Lava(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (255,20,0)
self.collides = False
self.transparent = True
self.sprite = 6
def __str__(self):
return "::"
def passable(self):
return False
def interact(self,player):
return rng.choice(["You can feel the heat from here.","I'd better not fall into that.","Magma... I must be deep!"])
class Bedrock(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (0,0,0)
self.collides = True
self.transparent = False
self.sprite = 7
def __str__(self):
return "BB"
def interact(self,player):
return rng.choice(["This rock is corse and tough.","You bite the rock. Mm, crunchy!","If you look closely, you can see minerals sparkling in the stone wall."])
class Obsidian(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (80,10,100)
self.collides = True
self.transparent = False
self.sprite = 8
def __str__(self):
return "XX"
def interact(self,player):
return rng.choice(["The lava rock here is shiny and purple.","The walls are pourus and sharp.","This rooms seems to be a drained lava chamber."])
class Glass(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (240,240,240)
self.collides = True
self.transparent = True
self.sprite = 9
def __str__(self):
return "||"
def interact(self,player):
return rng.choice(["I wonder how they got glass down here.","The glass is surprisingly clean.","You breathe on the glass and draw a smiley face."])
class Metal(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (140,140,140)
self.collides = True
self.transparent = False
self.sprite = 10
def __str__(self):
return "//"
def interact(self,player):
return rng.choice(["The walls here are metal and hollow.","You knock on the wall, and hear a resounding clang.","There are no bolts here; the metal is fused together."])
class Furniture(Block):
def __init__(self, biome=-1):
Block.__init__(self, biome)
self.color = (200,150,120)
self.collides = True
self.transparent = True
self.sprite = rng.choice([14,15,16,17,18,46,47,48,49])
def __str__(self):
return "TT"
def interact(self,player):
if self.sprite == 14:
return rng.choice(["There is nothing on this table.","You lean on the table, and it wobbles dangerously.","The surface of the table is caked in dust."])
elif self.sprite == 15:
return rng.choice(["You sit down, and then stand back up.","This chair has a broken leg.","This must be the time-out chair."])
elif self.sprite == 16:
return "You examine a random book: "+rng.choice(["Death of Pi","To Murder a Mockingbird","The Afterlife for Dummies","Basics of Pomegrante Gardening","Twilight","Bury Potter and the Dead Hallows","Dealing with Grief","Pictures of Puppies","It's Okay to be Dead"])
else:
return rng.choice(["What a comfy-looking couch.","You would sit, but it's filled with holes.","You reach under the cusions and find a penny."])
class Loot(Block):
def __init__(self, value, islocked = False, isopen = False, biome=-1):
Block.__init__(self, biome)
self.color = (255,250,0)
self.collides = True
self.transparent = True
self.raised = True
self.islocked = islocked
self.isopen = isopen
if self.isopen == True:
self.sprite = 13
self.contents = None
else:
self.sprite = 12
self.contents = []
if rng.random() < 0.5:
self.contents.append(entities.Item('Frog',"a frog. It isn't moving. Is it dead?")) #contents can be a list of stuff
if rng.random() < 0.2:
self.contents.append(entities.MusicSheet(rng.choice([1,3,5]))) # chests can have odd songs
if rng.random() < 0.95:
self.contents.append(entities.Potion('heal',1))
def __str__(self):
return "[]"
def interact(self, player):
if self.isopen == True:
return "This chest has been emptied."
#Have way to unlock
elif self.islocked == True:
return "This chest is locked."
else:
self.color = (229,225,50)
self.sprite = 13
if self.contents == None:
return "The chest is empty."
else:
for item in self.contents:
player.editinventory(item)
self.isopen = True
self.contents = None
return "You loot the chest of its contents."
class Node(object): # used for my A* search
def __init__(self, x,y,xf,yf,parent,cost):
self.x = x
self.y = y
if parent != None:
self.parent = parent
self.g = parent.g+cost
else:
self.parent = None
self.g = 0
self.h = abs(x-xf) + abs(y-yf)
self.f = self.g+self.h
def __lt__(this, that): # if x < y then y is objectively better than x i.e. x is redundant
return this.x == that.x and this.y == that.y and this.f >= that.f
|
from .searchstims import Searchstims
from .voc import VOCDetection
|
import time
import sys
import torch
import numpy as np
import copy
import pickle
import gzip
import hashlib
import os.path
from tqdm import tqdm
from backpack import backpack, extend
from backpack.extensions import BatchGrad
from env import WindowEnv, WindowEnvBatch
class SumLoss(torch.nn.Module):
def __init__(self):
super(SumLoss, self).__init__()
def forward(self, input):
return input.sum()
sumloss = extend(SumLoss())
if '--cpu' in sys.argv:
device = torch.device('cpu')
else:
device = torch.device('cuda')
data_root = 'cifar-10-batches-py/'
save_root = 'results/'
if __name__ == '__main__':
xs, ys = [], []
for i in range(5):
with open(f'{data_root}/data_batch_{i+1}', 'rb') as fo:
d = pickle.load(fo, encoding='bytes')
xs.append(d[b'data'])
ys.append(d[b'labels'])
train_x = np.concatenate(xs, 0).reshape((-1, 3, 32, 32)) / 255
train_y = np.concatenate(ys, 0)
with open(f'{data_root}/test_batch', 'rb') as fo:
d = pickle.load(fo, encoding='bytes')
test_x = d[b'data'].reshape((-1, 3, 32, 32)) / 255
test_y = d[b'labels']
_train_x = torch.tensor(train_x).float().to(device)
_train_y = torch.tensor(train_y).long().to(device)
test_x = torch.tensor(test_x).float().to(device)
test_y = torch.tensor(test_y).long().to(device)
def run_exp(meta_seed, nhid, nlayers, n_train_seeds):
torch.manual_seed(meta_seed)
np.random.seed(meta_seed)
gamma = 0.9
def init_weights(m):
if isinstance(m, torch.nn.Linear):
k = np.sqrt(6 / (np.sum(m.weight.shape)))
m.weight.data.uniform_(-k, k)
m.bias.data.fill_(0)
if isinstance(m, torch.nn.Conv2d):
u,v,w,h = m.weight.shape
k = np.sqrt(6 / (w*h*u + w*h*v))
m.weight.data.uniform_(-k, k)
m.bias.data.fill_(0)
env = WindowEnv(_train_x, _train_y)
test_env = WindowEnvBatch()
env.step_reward = 0.05
test_env.step_reward = 0.05
##nhid = 32
act = torch.nn.LeakyReLU()
#act = torch.nn.Tanh()
model = torch.nn.Sequential(*([torch.nn.Conv2d(4, nhid, 5, stride=2), act,
torch.nn.Conv2d(nhid, nhid*2, 3), act,
torch.nn.Conv2d(nhid*2, nhid*4, 3), act] +
sum([[torch.nn.Conv2d(nhid*4, nhid*4, 3, padding=1), act]
for i in range(nlayers)], []) +
[torch.nn.Flatten(),
torch.nn.Linear(nhid*4*10*10, nhid*4), act,
torch.nn.Linear(nhid*4, 14)]))
model.to(device)
model.apply(init_weights)
target = copy.deepcopy(model)
if 1:
model = extend(model)
opt = torch.optim.Adam(model.parameters(), 2.5e-4)#, weight_decay=1e-5)
n_rp_test = 128
rpt_s = torch.zeros((n_rp_test, 4, 32, 32), device=device)
rpt_a = torch.zeros((n_rp_test, 2), device=device, dtype=torch.long)
rpt_r = torch.zeros((n_rp_test,), device=device)
rpt_z = torch.zeros((n_rp_test, 4, 32, 32), device=device)
rpt_t = torch.zeros((n_rp_test,), device=device)
rpt_idx = [0]
def run_test(X, Y, dataacc=False):
obs = test_env.reset(X, Y)
if dataacc: rpt_idx[0] = 0
for i in range(test_env.max_steps):
Qs = model(obs)
actions = [Qs[:, :10].argmax(1).data.cpu().numpy(),
Qs[:, 10:].argmax(1).data.cpu().numpy()]
obsp, r, done, _ = test_env.step(actions)
for i in range(2):
if dataacc and rpt_idx[0] < n_rp_test:
u = np.random.randint(0, len(obs))
rpt_s[rpt_idx[0]] = obs[u]
rpt_a[rpt_idx[0]] = torch.tensor([actions[0][u], actions[1][u]])
rpt_r[rpt_idx[0]] = r[u]
rpt_z[rpt_idx[0]] = obsp[u]
rpt_t[rpt_idx[0]] = 1 - done[u]
rpt_idx[0] += 1
obs = obsp
if done.all():
break
return test_env.correct_answers / len(X), test_env.acc_reward
train_perf = []
test_perf = []
all_dots = []
all_jdots = []
all_dots_test = []
tds = []
qs = []
xent = torch.nn.CrossEntropyLoss()
tau = 0.1
n_rp = 100000
rp_s = torch.zeros((n_rp, 4, 32, 32), device=device)
rp_a = torch.zeros((n_rp, 2), device=device, dtype=torch.long)
rp_r = torch.zeros((n_rp,), device=device)
rp_z = torch.zeros((n_rp, 4, 32, 32), device=device)
rp_t = torch.zeros((n_rp,), device=device)
rp_idx = 0
rp_fill = 0
obs = env.reset(np.random.randint(0, n_train_seeds))
ntest = 128
epsilon = 0.9
ep_reward = 0
ep_rewards = []
for i in range(200001):#tqdm(range(200001)):
epsilon = 0.9 * (1 - min(i, 100000) / 100000) + 0.05
if not i % 10000:
t0 = time.time()
with torch.no_grad():
train_perf.append(run_test(_train_x[:min(ntest, n_train_seeds)], _train_y[:min(ntest, n_train_seeds)]))
test_perf.append(run_test(test_x[:ntest], test_y[:ntest], dataacc=True))
print(train_perf[-2:], test_perf[-2:], np.mean(ep_rewards[-50:]), len(ep_rewards), file=sys.stderr)
sys.stderr.flush()
if i:
t1 = time.time()
mbidx = np.random.randint(0, min(len(rp_s), rp_fill), 128)
s = rp_s[mbidx]
loss = sumloss(model(s).max(1).values)
with backpack(BatchGrad()):
loss.backward()
train_grads = torch.cat([i.grad_batch.reshape((s.shape[0], -1)) for i in model.parameters()], 1)
opt.zero_grad()
s = rpt_s[:rpt_idx[0]]
loss = sumloss(model(s).max(1).values)
with backpack(BatchGrad()):
loss.backward()
test_grads = torch.cat([i.grad_batch.reshape((s.shape[0], -1)) for i in model.parameters()], 1)
opt.zero_grad()
trtr = []
trte = []
tete = []
for i in range(128):
for j in range(i+1, 128):
trtr.append(train_grads[i].dot(train_grads[j]).item())
for j in range(rpt_idx[0]):
trte.append(train_grads[i].dot(test_grads[j]))
for i in range(rpt_idx[0]):
for j in range(i+1, rpt_idx[0]):
tete.append(test_grads[i].dot(test_grads[j]).item())
all_dots.append(list(map(np.float32, [trtr, trte, tete])))
s = rp_s[mbidx]
a = rp_a[mbidx]
r = rp_r[mbidx]
z = rp_z[mbidx]
t = rp_t[mbidx]
with torch.no_grad():
Qp = target(z)
vp1 = Qp[:, :10].max(1).values
vp2 = Qp[:, 10:].max(1).values
Q = model(s)
v1 = Q[np.arange(len(a)), a[:, 0]]
v2 = Q[np.arange(len(a)), a[:, 1] + 10]
td1 = v1 - (r + gamma * vp1 * t)
td2 = v2 - (r + gamma * vp2 * t)
loss = torch.min(td1**2, abs(td1)) / 128
loss += torch.min(td2**2, abs(td2)) / 128
loss = sumloss(loss)
with backpack(BatchGrad()):
loss.backward()
train_grads = torch.cat([i.grad_batch.reshape((s.shape[0], -1)) for i in model.parameters()], 1)
opt.zero_grad()
s = rpt_s[:rpt_idx[0]]
a = rpt_a[:rpt_idx[0]]
r = rpt_r[:rpt_idx[0]]
z = rpt_z[:rpt_idx[0]]
t = rpt_t[:rpt_idx[0]]
with torch.no_grad():
Qp = target(z)
vp1 = Qp[:, :10].max(1).values
vp2 = Qp[:, 10:].max(1).values
Q = model(s)
v1 = Q[np.arange(len(a)), a[:, 0]]
v2 = Q[np.arange(len(a)), a[:, 1] + 10]
td1 = v1 - (r + gamma * vp1 * t)
td2 = v2 - (r + gamma * vp2 * t)
loss = torch.min(td1**2, abs(td1)) / 128
loss += torch.min(td2**2, abs(td2)) / 128
loss = sumloss(loss)
with backpack(BatchGrad()):
loss.backward()
test_grads = torch.cat([i.grad_batch.reshape((s.shape[0], -1)) for i in model.parameters()], 1)
opt.zero_grad()
trtr = []
trte = []
tete = []
for i in range(128):
for j in range(i+1, 128):
trtr.append(train_grads[i].dot(train_grads[j]).item())
for j in range(rpt_idx[0]):
trte.append(train_grads[i].dot(test_grads[j]))
for i in range(rpt_idx[0]):
for j in range(i+1, rpt_idx[0]):
tete.append(test_grads[i].dot(test_grads[j]).item())
all_jdots.append(list(map(np.float32, [trtr, trte, tete])))
if np.random.uniform(0,1) < epsilon:
action = [np.random.randint(0, 10),
np.random.randint(0, 4)]
else:
Qs = model(obs[None, :])[0]
action = [Qs[:10].argmax().item(), Qs[10:].argmax().item()]
#if np.random.uniform(0,1) < 0.4:
# action = env.current_y
obsp, r, done, _ = env.step(action)
rp_s[rp_idx] = obs
rp_a[rp_idx] = torch.tensor(action)
rp_r[rp_idx] = r
rp_z[rp_idx] = obsp
rp_t[rp_idx] = 1 - done
rp_idx = (rp_idx + 1) % rp_s.shape[0]
rp_fill += 1
ep_reward += r
obs = obsp
if done:
ep_rewards.append(ep_reward)
ep_reward = 0
obs = env.reset(np.random.randint(0, n_train_seeds))
if i > 5000 and not i % 2:
mbidx = np.random.randint(0, min(len(rp_s), rp_fill), 128)
s = rp_s[mbidx]
a = rp_a[mbidx]
r = rp_r[mbidx]
z = rp_z[mbidx]
t = rp_t[mbidx]
with torch.no_grad():
Qp = target(z)
vp1 = Qp[:, :10].max(1).values
vp2 = Qp[:, 10:].max(1).values
Q = model(s)
v1 = Q[np.arange(len(a)), a[:, 0]]
v2 = Q[np.arange(len(a)), a[:, 1] + 10]
td1 = v1 - (r + gamma * vp1 * t)
td2 = v2 - (r + gamma * vp2 * t)
loss = torch.min(td1**2, abs(td1)).mean()
loss += torch.min(td2**2, abs(td2)).mean()
loss.backward()
opt.step()
opt.zero_grad()
for target_param, param in zip(target.parameters(), model.parameters()):
target_param.data.copy_(tau * param + (1 - tau) * target_param)
return {'all_dots': all_dots,
'all_jdots': all_jdots,
'train_perf': train_perf,
'test_perf': test_perf,
}
def main():
cfgs = []
for nhid in [8, 16, 32]:#
for nlayers in [1,0,2,3]:
for n_train_seeds in [20, 100, 500, 1000, 5000, 10000, 50000]:#[4,8,16,32,64,128]:
for meta_seed in [1]:
cfg = {'nhid': nhid,
'nlayers': nlayers,
'n_train_seeds': n_train_seeds,
'meta_seed': meta_seed,
'what':'cifar-agent-4'}
cfgs.append(cfg)
idx = np.arange(len(cfgs))
np.random.shuffle(idx)
for i in idx:
cfg = cfgs[i]
h = hashlib.sha1(bytes(str(sorted(cfg.items())), 'utf8')).hexdigest()
path = f'{save_root}/{h}.pkl.gz'
if os.path.exists(path):
continue
print(cfg, file=sys.stderr)
sys.stderr.flush()
open(path,'w').write('touch')
results = run_exp(cfg['meta_seed'], cfg['nhid'], cfg['nlayers'], cfg['n_train_seeds'])
with gzip.open(path, 'wb') as f:
pickle.dump((cfg, results), f)
if __name__ == '__main__':
main()
|
import os
import shutil
from typing import Any, Callable, Optional, Tuple
import numpy as np
from PIL import Image
from .utils import download_and_extract_archive, download_url, verify_str_arg
from .vision import VisionDataset
class SBDataset(VisionDataset):
"""`Semantic Boundaries Dataset <http://home.bharathh.info/pubs/codes/SBD/download.html>`_
The SBD currently contains annotations from 11355 images taken from the PASCAL VOC 2011 dataset.
.. note ::
Please note that the train and val splits included with this dataset are different from
the splits in the PASCAL VOC dataset. In particular some "train" images might be part of
VOC2012 val.
If you are interested in testing on VOC 2012 val, then use `image_set='train_noval'`,
which excludes all val images.
.. warning::
This class needs `scipy <https://docs.scipy.org/doc/>`_ to load target files from `.mat` format.
Args:
root (string): Root directory of the Semantic Boundaries Dataset
image_set (string, optional): Select the image_set to use, ``train``, ``val`` or ``train_noval``.
Image set ``train_noval`` excludes VOC 2012 val images.
mode (string, optional): Select target type. Possible values 'boundaries' or 'segmentation'.
In case of 'boundaries', the target is an array of shape `[num_classes, H, W]`,
where `num_classes=20`.
download (bool, optional): If true, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
transforms (callable, optional): A function/transform that takes input sample and its target as entry
and returns a transformed version. Input sample is PIL image and target is a numpy array
if `mode='boundaries'` or PIL image if `mode='segmentation'`.
"""
url = "https://www2.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/semantic_contours/benchmark.tgz"
md5 = "82b4d87ceb2ed10f6038a1cba92111cb"
filename = "benchmark.tgz"
voc_train_url = "http://home.bharathh.info/pubs/codes/SBD/train_noval.txt"
voc_split_filename = "train_noval.txt"
voc_split_md5 = "79bff800c5f0b1ec6b21080a3c066722"
def __init__(
self,
root: str,
image_set: str = "train",
mode: str = "boundaries",
download: bool = False,
transforms: Optional[Callable] = None,
) -> None:
try:
from scipy.io import loadmat
self._loadmat = loadmat
except ImportError:
raise RuntimeError("Scipy is not found. This dataset needs to have scipy installed: pip install scipy")
super().__init__(root, transforms)
self.image_set = verify_str_arg(image_set, "image_set", ("train", "val", "train_noval"))
self.mode = verify_str_arg(mode, "mode", ("segmentation", "boundaries"))
self.num_classes = 20
sbd_root = self.root
image_dir = os.path.join(sbd_root, "img")
mask_dir = os.path.join(sbd_root, "cls")
if download:
download_and_extract_archive(self.url, self.root, filename=self.filename, md5=self.md5)
extracted_ds_root = os.path.join(self.root, "benchmark_RELEASE", "dataset")
for f in ["cls", "img", "inst", "train.txt", "val.txt"]:
old_path = os.path.join(extracted_ds_root, f)
shutil.move(old_path, sbd_root)
download_url(self.voc_train_url, sbd_root, self.voc_split_filename, self.voc_split_md5)
if not os.path.isdir(sbd_root):
raise RuntimeError("Dataset not found or corrupted. You can use download=True to download it")
split_f = os.path.join(sbd_root, image_set.rstrip("\n") + ".txt")
with open(os.path.join(split_f)) as fh:
file_names = [x.strip() for x in fh.readlines()]
self.images = [os.path.join(image_dir, x + ".jpg") for x in file_names]
self.masks = [os.path.join(mask_dir, x + ".mat") for x in file_names]
self._get_target = self._get_segmentation_target if self.mode == "segmentation" else self._get_boundaries_target
def _get_segmentation_target(self, filepath: str) -> Image.Image:
mat = self._loadmat(filepath)
return Image.fromarray(mat["GTcls"][0]["Segmentation"][0])
def _get_boundaries_target(self, filepath: str) -> np.ndarray:
mat = self._loadmat(filepath)
return np.concatenate(
[np.expand_dims(mat["GTcls"][0]["Boundaries"][0][i][0].toarray(), axis=0) for i in range(self.num_classes)],
axis=0,
)
def __getitem__(self, index: int) -> Tuple[Any, Any]:
img = Image.open(self.images[index]).convert("RGB")
target = self._get_target(self.masks[index])
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target
def __len__(self) -> int:
return len(self.images)
def extra_repr(self) -> str:
lines = ["Image set: {image_set}", "Mode: {mode}"]
return "\n".join(lines).format(**self.__dict__)
|
word = 'brontosaurus'
d = dict()
for c in word:
if c not in d:
d[c] = 1
else:
d[c] = d[c] + 1
print(d)
# user th get method
word1 = 'brontosaurus'
d1 = dict()
for c1 in word1:
d1[c1] = d1.get(c1, 0) + 1
print(d1)
counts = {'chuck': 1, 'annie': 42, 'jan': 100}
print(counts.get('chuck', 0))
print(counts.get('jan', 0))
|
from PyQt5.QtWidgets import QSizePolicy
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
class PlotCanvas(FigureCanvas):
def __init__(self, parent=None, width=8.8, height=10.5, dpi=100, title=' '):
self.fig = Figure(figsize=(width, height), dpi=dpi)
FigureCanvas.__init__(self, self.fig)
self.setStyleSheet("background-color:rgb(240,240,240);")
self.fig.set_facecolor("none")
self.axes = self.fig.add_subplot(111)
self.setParent(parent)
self.title = title
self.set_up_plot()
FigureCanvas.setSizePolicy(self, QSizePolicy.Expanding, QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def set_up_plot(self):
self.axes.cla()
self.axes.grid(True, which='both', axis='both')
self.axes.set_xscale('linear')
self.axes.set_yscale('linear')
self.axes.set_title(self.title)
def plot(self, x, y, do_cla=True, **kwargs):
if do_cla:
self.axes.cla() # Стереть всё предыдущее
self.axes.grid(True, which='both', axis='both')
self.axes.plot(x, y, **kwargs)
if 'label' in kwargs:
self.fig.legend()
self.fig.canvas.draw_idle()
def add_point(self, x, y, **kwargs):
self.axes.plot(x, y, **kwargs)
self.fig.canvas.draw_idle()
def clear(self):
self.axes.cla()
#self.axes.clear()
def set_xlim(self, left, right):
self.axes.set_xlim(left, right)
def setFrameShape(self, styled_panel):
pass # Этот метод вызывается в автоматически сгенерированном коде от Qt Designer, но нам он не нужен
def setFrameShadow(self, raised):
pass # Этот метод вызывается в автоматически сгенерированном коде от Qt Designer, но нам он не нужен
def setTitle(self, title):
self.axes.set_title(title, fontsize=19)
|
# Generated by Django 3.0.3 on 2020-03-12 20:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main_app', '0011_auto_20200312_2056'),
]
operations = [
migrations.RemoveField(
model_name='membre',
name='mail',
),
]
|
#!/usr/bin/python39
from environment import terrain_gen, error, loop
from environment import __dumb_controller, debug_draw_terrain
import numpy as np
import matplotlib.pyplot as plt
print('Draw road')
terrain = terrain_gen()
plt = debug_draw_terrain(terrain)
plt.xlabel('Example road segment')
plt.legend()
plt.show()
result = loop(terrain,__dumb_controller)
if result['failed']:
print('Car stuck, retry with better controller or smoother road')
exit(0)
print(f'Error : {error(result["v"])}')
print('Testing dumb controller')
print('Draw velocity and terrain over time')
plt.plot(range(result['nstep']),result['v'],'r',label='velocity')
plt.plot(range(result['nstep']),result['c'],'b',label='terrain factor')
plt.ylabel('Value (Corresponding unit)')
plt.xlabel('Time')
plt.legend()
plt.show()
|
from bsm.loader import load_relative
from bsm.loader import LoadError
from bsm.handler import HandlerNotAvailableError
from bsm.logger import get_logger
_logger = get_logger()
def run(param):
cmd = param['command']
if not cmd:
_logger.error('Command is empty')
raise HandlerNotAvailableError
try:
run_func = load_relative(__name__, 'command.'+cmd[0], 'run')
except LoadError as e:
_logger.error('Could not find command: {0}'.format(cmd[0]))
raise HandlerNotAvailableError
if not callable(run_func):
_logger.error('Command could not run')
raise HandlerNotAvailableError
return run_func(param)
|
# Bài 04: Viết hàm
# def get_file_size(file)
# để lấy và trả về dung lượng của file
def get_file_size(file) :
with open(file,'r',encoding='utf-8') as text :
text.read()
print(text.tell())
get_file_size('text/text.txt')
|
def overlap(a, b):
overlap = []
for i in a:
if (not(i in overlap)) and (i in b):
overlap.append(i)
print(overlap)
|
"""
This example demonstrates how to retrieve information for a channel.
"""
from pyyoutube import Client
API_KEY = "Your key" # replace this with your api key.
def get_channel_info():
cli = Client(api_key=API_KEY)
channel_id = "UC_x5XG1OV2P6uZZ5FSM9Ttw"
resp = cli.channels.list(
channel_id=channel_id, parts=["id", "snippet", "statistics"], return_json=True
)
print(f"Channel info: {resp['items'][0]}")
if __name__ == "__main__":
get_channel_info()
|
from google.cloud import translate
from core.env import Environment
class TranslationService:
def __init__(self, env: Environment) -> None:
self._env = env
def translate(self, text: str, target_lang: str):
parent = f"projects/{self._env.google_project_id}"
client = translate.TranslationServiceClient()
response = client.translate_text(
contents=[text],
target_language_code=target_lang,
parent=parent,
)
return response.translations[0].translated_text
|
# -*- coding: utf-8 -*-
# All models are imported here in order to be accessed through the root package
from app.models.users import User
from app.models.tokens import UserToken, PasswordToken
from app.models.comments import TournamentComment, PollComment
from app.models.results import Result
from app.models.tournaments import Tournament
from app.models.seasons import Season
from app.models.sessions import Session
from app.models.news import News
from app.models.polls import PollChoice, PollVote, PollUserChoice, Poll
|
import io
import pathlib
from collections import namedtuple
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
from torchdata.datapipes.iter import IterDataPipe, Mapper, Zipper
from torchvision.prototype.datasets.utils import Dataset, GDriveResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import hint_sharding, hint_shuffling
from torchvision.prototype.tv_tensors import Label
from torchvision.tv_tensors import Image
from .._api import register_dataset, register_info
NAME = "pcam"
class PCAMH5Reader(IterDataPipe[Tuple[str, io.IOBase]]):
def __init__(
self,
datapipe: IterDataPipe[Tuple[str, io.IOBase]],
key: Optional[str] = None, # Note: this key thing might be very specific to the PCAM dataset
) -> None:
self.datapipe = datapipe
self.key = key
def __iter__(self) -> Iterator[Tuple[str, io.IOBase]]:
import h5py
for _, handle in self.datapipe:
try:
with h5py.File(handle) as data:
if self.key is not None:
data = data[self.key]
yield from data
finally:
handle.close()
_Resource = namedtuple("_Resource", ("file_name", "gdrive_id", "sha256"))
@register_info(NAME)
def _info() -> Dict[str, Any]:
return dict(categories=["0", "1"])
@register_dataset(NAME)
class PCAM(Dataset):
# TODO write proper docstring
"""PCAM Dataset
homepage="https://github.com/basveeling/pcam"
"""
def __init__(
self, root: Union[str, pathlib.Path], split: str = "train", *, skip_integrity_check: bool = False
) -> None:
self._split = self._verify_str_arg(split, "split", {"train", "val", "test"})
self._categories = _info()["categories"]
super().__init__(root, skip_integrity_check=skip_integrity_check, dependencies=("h5py",))
_RESOURCES = {
"train": (
_Resource( # Images
file_name="camelyonpatch_level_2_split_train_x.h5.gz",
gdrive_id="1Ka0XfEMiwgCYPdTI-vv6eUElOBnKFKQ2",
sha256="d619e741468a7ab35c7e4a75e6821b7e7e6c9411705d45708f2a0efc8960656c",
),
_Resource( # Targets
file_name="camelyonpatch_level_2_split_train_y.h5.gz",
gdrive_id="1269yhu3pZDP8UYFQs-NYs3FPwuK-nGSG",
sha256="b74126d2c01b20d3661f9b46765d29cf4e4fba6faba29c8e0d09d406331ab75a",
),
),
"test": (
_Resource( # Images
file_name="camelyonpatch_level_2_split_test_x.h5.gz",
gdrive_id="1qV65ZqZvWzuIVthK8eVDhIwrbnsJdbg_",
sha256="79174c2201ad521602a5888be8f36ee10875f37403dd3f2086caf2182ef87245",
),
_Resource( # Targets
file_name="camelyonpatch_level_2_split_test_y.h5.gz",
gdrive_id="17BHrSrwWKjYsOgTMmoqrIjDy6Fa2o_gP",
sha256="0a522005fccc8bbd04c5a117bfaf81d8da2676f03a29d7499f71d0a0bd6068ef",
),
),
"val": (
_Resource( # Images
file_name="camelyonpatch_level_2_split_valid_x.h5.gz",
gdrive_id="1hgshYGWK8V-eGRy8LToWJJgDU_rXWVJ3",
sha256="f82ee1670d027b4ec388048d9eabc2186b77c009655dae76d624c0ecb053ccb2",
),
_Resource( # Targets
file_name="camelyonpatch_level_2_split_valid_y.h5.gz",
gdrive_id="1bH8ZRbhSVAhScTS0p9-ZzGnX91cHT3uO",
sha256="ce1ae30f08feb468447971cfd0472e7becd0ad96d877c64120c72571439ae48c",
),
),
}
def _resources(self) -> List[OnlineResource]:
return [ # = [images resource, targets resource]
GDriveResource(file_name=file_name, id=gdrive_id, sha256=sha256, preprocess="decompress")
for file_name, gdrive_id, sha256 in self._RESOURCES[self._split]
]
def _prepare_sample(self, data: Tuple[Any, Any]) -> Dict[str, Any]:
image, target = data # They're both numpy arrays at this point
return {
"image": Image(image.transpose(2, 0, 1)),
"label": Label(target.item(), categories=self._categories),
}
def _datapipe(self, resource_dps: List[IterDataPipe]) -> IterDataPipe[Dict[str, Any]]:
images_dp, targets_dp = resource_dps
images_dp = PCAMH5Reader(images_dp, key="x")
targets_dp = PCAMH5Reader(targets_dp, key="y")
dp = Zipper(images_dp, targets_dp)
dp = hint_shuffling(dp)
dp = hint_sharding(dp)
return Mapper(dp, self._prepare_sample)
def __len__(self) -> int:
return 262_144 if self._split == "train" else 32_768
|
if 5>2:
print("5 is greater than 2")
print("5 is greater")
|
import os
import json
import time
import sys
search_domain_dev = 'http://search-mparticle-docs-dev-bjdn4zkr3qejlv27yt7ydobuqe.us-east-1.cloudsearch.amazonaws.com'
search_domain_prod = 'http://search-mparticle-docs-prod-6ozkfhxijk6v43sgv6wjl4kapq.us-east-1.cloudsearch.amazonaws.com'
search_domain = search_domain_prod if sys.argv[1] == 'prod' else search_domain_dev
current_docs_filename = 'current_documents.json'
new_docs_filename = 'latest_search_items.json'
update_command_filename = 'update_documents.json'
search_command = 'aws cloudsearchdomain search --search-query "matchall" --region "us-east-1" --endpoint-url "'+search_domain+'" --query-parser "structured" --size 5000 --return "_no_fields" > ' + current_docs_filename
upload_command = 'aws cloudsearchdomain upload-documents --debug --region "us-east-1" --endpoint-url "'+search_domain+'" --content-type application/json --documents ' + update_command_filename
print 'Connecting to AWS'
os.system(search_command)
with open(current_docs_filename) as data_file:
current_search_docs = json.load(data_file)
with open(new_docs_filename) as data_file:
new_search_docs = json.load(data_file)
update_operation = []
hits = current_search_docs['hits']['hit']
print 'Found ' + str(len(hits)) + ' old documents'
for hit in hits:
delete_op = {"type":"delete","id":hit["id"]}
update_operation.append(delete_op)
for item in new_search_docs:
update_operation.append(item)
with open(update_command_filename, 'w') as outfile:
json.dump(update_operation, outfile, indent=4)
print 'Deleting ' + str(len(hits)) + ' old documents and uploading '+ str(len(new_search_docs)) +' new documents'
os.system(upload_command)
print 'Done uploading'
|
import logging
from .device_management import application
from .performance_analysis import pa
from .fault_management import fm
from .probe_monitoring import pm
from flask import Flask
from config import pms_app
# from run_pms import pms_app
from logging.config import dictConfig
dictConfig({
'version': 1,
'formatters': {'default': {
'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s',
}},
'handlers': {'wsgi': {
'class': 'logging.StreamHandler',
'stream': 'ext://flask.logging.wsgi_errors_stream',
'formatter': 'default'
}},
'root': {
'level': 'INFO',
'handlers': ['wsgi']
}
})
with pms_app.app_context():
pms_app.register_blueprint(pa)
logging.info("Performance Analysis started")
pms_app.register_blueprint(application)
logging.info("Device Management started")
pms_app.register_blueprint(fm)
logging.info("Fault Management started")
pms_app.register_blueprint(pm)
logging.info("Probe Monitoring started")
|
"""
Adapted from: https://realpython.com/async-io-python/
"""
import re
import time
from urllib.parse import urljoin
from urllib.request import urlopen
import aiofiles
import aiohttp
from aiohttp import ClientSession
HREF_RE = re.compile(r'href="(.*?)"')
def fetch_html(url):
""" GET request wrapper to fetch page HTML """
response = urlopen(url)
html = str(response.read())
return html
def parse_url(url):
""" Find HREFs in the HTML of the given url """
html = fetch_html(url)
linked_urls = {urljoin(url, link) for link in HREF_RE.findall(html)}
return linked_urls
def process_url(file_path, url):
""" Write to file all the linked urls at the given url """
linked_urls = parse_url(url)
if not linked_urls:
return
with open(file_path, "a") as file:
for linked_url in linked_urls:
file.write(f"{url}\t{linked_url}\n")
def bulk_process_urls(file_path, urls):
""" Process all urls """
for url in urls:
process_url(file_path, url)
if __name__ == "__main__":
with open("urls.txt") as file:
urls = set(line.strip() for line in file)
outpath = "linked_urls.txt"
with open(outpath, "w") as file:
file.write("source_url\tlinked_url\n")
start = time.time()
bulk_process_urls(file_path=outpath, urls=urls)
end = time.time()
print(f"Completed in {end - start:.02f} seconds")
|
# -*- coding: utf-8 -*-
"""
Created on Mon May 20 11:12:01 2019
@author: HP
"""
r=[]
s=[]
c=3
def rod_cutting(n,p):
r.append(0)
for i in range(1,n+1):
q=-5
ind=i
for j in range(1,i+1):
if q<(p[j]+r[i-j]):
ind=j
q=p[j]+r[i-j]
s.insert(i,j)
if ind!=i:
r.append(q-c)
else:
r.append(q)
return r[n]
p=[0,1,5,8,9,10,17,17,20,24,30]
print(rod_cutting(5,p))
n=5
while n>0:
print(s[n])
n=n-s[n]
|
# Generated by Django 3.1.7 on 2021-03-26 12:00
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
]
|
#----------------URL and imports-----------------------
import requests #Should be install requests libary
URL = 'http://localhost:8088/services/'
#---------------REQUESTS--------------------------------
def get_users():
response = requests.get(URL+'users/all')
users = response.json()
#print(response.content) #In case of error u can remove '#' and see more delailed description
return users
def get_users_response():
response = requests.get(URL+'users/all')
users = response.json()
#print(response.content) # #In case of error u can remove '#' and see more delailed description
print('GET ALL USERS Request----',response)
return response
def serach_by_email(email):
get_all_users = get_users()
for dic in get_all_users:
for key, value in dic.items():
if value == email:
id = dic['id']
return id
def search_all_water_forecast():
response = requests.get(URL + '/waterforecast/all')
water_forecast = response.json()
print('GET Water Forecast Request----',response)
#print(water_forecast) #Uncomment to see all water forecast list
return response
#------------------------BY ID--------------------------------------------
def get_user_by_id(id):
response = requests.get(URL + 'users/' + str(id))
user = response.json()
#print(response.content) #In case of error u can remove '#' and see more delailed description
print('GET user by ID rquest----',response)
#print(user)
return user
def get_notifications_by_id(id):
response = requests.get(URL + '/notification/settings/' + str(id))
user_notification = response.json()
print('GET notifications by ID rquest----', response)
return response,user_notification
#-------------------------------------------------------------
|
# -*- coding: utf-8 -*-
"""
Created on Mon May 14 16:02:12 2018
@author: Binish125
"""
import random
pop_size=400
Num_items=15
Num_items=Num_items-1
tot_capacity=10
sim_run=4
val=[1,4,5,7,10,5,9,4,8,11,5,10,12,4,9]
wt=[1,3,4,5,4,2,3,6,8,4,1,2,6,5,1]
generations=50
def weighted_choice(items):
weight_total=sum((item[1] for item in items))
n=random.uniform(0,weight_total)
for item,weight in items:
if n<weight:
return item;
n=n-weight
return item
def random_num():
return(random.randint(0,Num_items))
def random_pop():
pop=[]
for i in range(pop_size):
items=[]
for j in range(Num_items):
random_item=random_num()
if(random_item not in items):
items.append(random_item)
pop.append(items)
return(pop)
def fitness(dna,wt,val):
fit=0
weight=0
for item in dna:
weight+=wt[item]
fit+=val[item]
if(weight>tot_capacity):
return(0)
return(fit)
def mutation(dna,wt,val):
dna_out=[]
mutation_chance=200
for c in dna:
mutation=int(mutation_chance*random.random())
if(mutation>1 and mutation<5):
rand_no=random_num()
if(rand_no not in dna_out):
dna_out.append(rand_no)
else:
if(c not in dna_out):
dna_out.append(c)
return(dna_out)
def crossover(dna1,dna2):
pos=int(random.random()*(Num_items))
return(dna1[:pos]+list(set(dna2[pos:])-set(dna1[:pos])),dna2[:pos]+list(set(dna1[pos:])-set(dna2[:pos])))
if __name__== "__main__":
output=[]
for run in range(sim_run):
print("\n\n\tSimulation Run "+ str(run+1)+" :\n")
population=random_pop()
for generation in range(generations):
fittest_population=population[0]
maximum_fitness=fitness(population[0],wt,val)
for individual in population:
indi_fitness=fitness(individual,wt,val)
if(indi_fitness>=maximum_fitness):
maximum_fitness=indi_fitness
fittest_population=individual
print("Generation : "+ str(generation) + " random sample : "+ str(population[0]) + " - fittest population: "+ str(fittest_population)+ " fitness : "+ str(maximum_fitness))
weighted_pop=[]
for individual in population:
fitness_val=fitness(individual,wt,val)
if(fitness_val==0):
pair=(individual,1.0)
else:
pair=(individual,fitness_val*1.0)
weighted_pop.append(pair)
population=[]
index=random.randint(0,pop_size/2)
for i in range(int(pop_size/2)):
ind1=weighted_choice(weighted_pop)
ind2=weighted_choice(weighted_pop)
ind1,ind2=crossover(ind1,ind2)
inter=ind1+list(set(ind2)-set(ind1))
if(i==index):
population.append(fittest_population)
else:
population.append(mutation(ind1,wt,val))
population.append(mutation(ind2,wt,val))
output_pair=(fittest_population,maximum_fitness)
output.append(output_pair)
print("\n")
best_pop=output[0][0]
max_value=output[0][1]
for out_pop, max_fit in output:
print("Fittest Population: "+ str(out_pop)+ " Fitness : "+ str(max_fit))
if(max_value<=max_fit):
max_value=max_fit
best_pop=out_pop
print("\n\nBest Population: \t"+str(best_pop))
print("Maximum Fitness: \t"+str(max_value))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 3 13:48:09 2018
@author: vincent
"""
import os, sys
info_files = sys.argv[1:3]
info_files = [os.path.expanduser(f) for f in info_files]
info_files = [os.path.abspath(f) for f in info_files]
genome_size = float(sys.argv[3])
coverage = float(sys.argv[4])
def eng2float(num_string):
if num_string[-1] in '0123456789':
return(float(num_string))
elif num_string[-1] == 'k':
return(1e3 * float(num_string[:-1]))
elif num_string[-1] == 'M':
return(1e6 * float(num_string[:-1]))
elif num_string[-1] == 'G':
return(1e9 * float(num_string[:-1]))
else:
print("That number string was not recognized")
return(None)
avg_len = 0
for f in info_files:
with open(f, 'r') as fr:
totals = fr.readline()
totals = totals.split(',')
seqs = totals[1].split()[0]
seqs = (eng2float(seqs))
nucs = totals[2].split()[0]
nucs = (eng2float(nucs))
avg_len += nucs / seqs
req_seqs = genome_size * coverage / avg_len
if (req_seqs > seqs):
# print("Requested coverage is larger than the maximum possible for this isolate.")
print(int(seqs))
else:
print(int(req_seqs))
|
# encoding=utf8
'''
Created on 2016-08-18
@author: jingyang <jingyang@nexa-corp.com>
Usage:
fab staging deploy
fab prod deploy
'''
from fabric.api import local
from fabric.context_managers import lcd, cd
from fabric.operations import put, run
from fabric.state import env
import os
# import wingdbstub
PROJECT_NAME = "liantang"
PROJECT_DIR = "/pypro/liantang" # project dir on server
with_helpers=True
local_dir= os.path.dirname(os.path.dirname(__file__))
def staging():
env.user = "root"
env.hosts = ["prototype.enjoyst.com"]
def prod():
env.user = "develop"
env.hosts = ["10.0.2.253"]
def archive(path,name):
deploy_dir = os.path.join(local_dir,'deploy')
tar_file = os.path.join(deploy_dir,name+'.tar.gz') #'deploy/liantang.tar.gz' #
with lcd(path):
local("git archive -o %s HEAD"%tar_file)
def upload(name):
with cd(PROJECT_DIR):
put("{}.tar.gz".format(name), ".")
def extract(name,dst):
with cd(PROJECT_DIR):
run("tar xf {}.tar.gz -C {}".format(name,dst))
def deploy():
archive(local_dir,PROJECT_NAME)
upload(PROJECT_NAME)
extract(PROJECT_NAME,PROJECT_DIR)
if with_helpers:
helper_path=os.path.join(local_dir,'src/helpers')
archive(helper_path,'helpers')
upload('helpers')
extract('helpers',os.path.join(PROJECT_DIR,'src/helpers'))
|
"""
2. Add Two Numbers
You are given two non-empty linked lists representing two non-negative integers.
The digits are stored in reverse order, and each of their nodes contains a single digit.
Add the two numbers and return the sum as a linked list.
You may assume the two numbers do not contain any leading zero, except the number 0 itself.
Example 1:
Input: l1 = [2,4,3], l2 = [5,6,4]
Output: [7,0,8]
Explanation: 342 + 465 = 807.
Example 2:
Input: l1 = [0], l2 = [0]
Output: [0]
Example 3:
Input: l1 = [9,9,9,9,9,9,9], l2 = [9,9,9,9]
Output: [8,9,9,9,0,0,0,1]
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
dummy_head = ListNode(-1)
cur = dummy_head
p = l1
q = l2
carry = 0
while p or q:
i = p.val if p else 0
j = q.val if q else 0
s = i + j + carry
if s >= 10:
carry = 1
remainder = s % 10
cur.next = ListNode(remainder)
else:
carry = 0
cur.next = ListNode(s)
cur = cur.next
if p:
p = p.next
if q:
q = q.next
# Need to check if carry == 1
if carry:
cur.next = ListNode(carry)
return dummy_head.next
|
from __future__ import division
import time
import numpy as np
import pandas
import csv
import itertools
from sklearn.svm import SVR
from sklearn.model_selection import KFold, cross_val_predict
from sklearn.metrics.regression import mean_squared_error, r2_score
import matplotlib.pyplot as plt
"""Read in dataset"""
set_sizes = [100,500,1000,5000,10000,50000,100000,500000,1000000,5000000,10000000,50000000,100000000]
nrows2 = set_sizes[5]
column_names = ["Instance","Feature 1","Feature 2", "Feature 3","Feature 4","Feature 5","Feature 6","Feature 7",
"Feature 8","Feature 9","Feature 10","Target","TargetClass"]
dataframe = pandas.read_csv("C:\\Users\\gordo\\Desktop\\ML\\datasets\\with-noise\\The-SUM-dataset-with-noise.csv",
sep=';',header=0,names=column_names,index_col=0,usecols=[0,1,2,3,4,5,6,7,8,9,10,11],
nrows =set_sizes[5])
X_train = dataframe.head(int(nrows2 * .7))
Y_train = X_train.Target
X_train = X_train[["Feature 1","Feature 2","Feature 3","Feature 4","Feature 5","Feature 6","Feature 7",
"Feature 8","Feature 9","Feature 10"]]
X_test = dataframe.tail(int(nrows2 * .3))
Y_test = X_test.Target
X_test = X_test[["Feature 1", "Feature 2","Feature 3","Feature 4","Feature 5","Feature 6","Feature 7",
"Feature 8","Feature 9", "Feature 10"]]
svr = SVR(C=500000).fit(X_train,Y_train)
pred_test = svr.predict(X_test)
score = r2_score(Y_test,pred_test)
cv_score = cross_val_predict(svr, X_train, Y_train, cv=10)
test_se = mean_squared_error(Y_test, pred_test)
print(score)
print(test_se)
print(cv_score)
|
import requests
from utils.functions import *
from colorama import init, Fore
def main():
init()
userInput = input(
"::. welcome to bitband brute v1.0 .::\n1- brute force attack\n2- dictionary attack\n3- quit\n\nchoose your option: "
)
if userInput == "1":
username = input("username: ")
passwordChars = input("password includes: ")
minPassLength = int(input("min password length: "))
maxPassLength = int(input("max password length: "))
url = input("url: ")
wrongPassMessage = input("wrong pass message: ")
usernameFormName = input("what is username name in html form: ")
passwordFormName = input("what is password name in html form: ")
for testingPassword in generatePassword(passwordChars, minPassLength, maxPassLength):
print(f"testing password {testingPassword}")
testingResult = test(url, wrongPassMessage, usernameFormName=usernameFormName, passwordFormName=passwordFormName, username=username, testingPassword=testingPassword)
if testingResult:
print(f"{Fore.RED}PASSWORD IS {testingPassword}")
return
elif userInput == "2":
username = input("username: ")
url = input("url: ")
wrongPassMessage = input("wrong pass message: ")
usernameFormName = input("what is username name in html form: ")
passwordFormName = input("what is password name in html form: ")
passListAddress = input(
"password list address(fileName shouldnt contain space): ")
f = open(passListAddress, 'r')
for strTestingPass in f:
print(f"testing password: {strTestingPass}")
data = {
usernameFormName: username,
passwordFormName: strTestingPass.replace("\n", "")
}
response = requests.post(url, data=data)
if not wrongPassMessage in response.text:
print(f"{Fore.RED}PASSWORD IS {strTestingPass}")
return 0
elif "3":
print("Good Bye ;)")
return 0
else:
print("wrong option")
if __name__ == "__main__":
main()
|
import numpy as np
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QFileDialog, QMessageBox
from ui import MainWindow_design
from PlotWindow import PlotWindow
from DREAM import DREAMIO
import AUG
import EqFile
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, argv):
"""
Constructor.
"""
QtWidgets.QMainWindow.__init__(self)
self.ui = MainWindow_design.Ui_EqGet()
self.ui.setupUi(self)
self.equil = None
# Set up flux surface figure
self.canvas = FigureCanvas(Figure())
self.fluxSurfaceLayout = QtWidgets.QVBoxLayout(self.ui.frameFluxSurfaces)
self.fluxSurfaceLayout.addWidget(self.canvas)
self.setupPlot()
# List of open windows
self.windows = {}
if AUG.isAvailable():
self.ui.cbTokamak.addItem('ASDEX Upgrade', AUG)
self.ui.cbTokamak.addItem('File', EqFile)
self.toggleEnabled(False)
self.bindEvents()
def bindEvents(self):
"""
Bind control events to methods.
"""
self.ui.actionExit.triggered.connect(self.exit)
self.ui.btnLoad.clicked.connect(self.load)
self.ui.btnPlotPsi.clicked.connect(self.plotPsi)
self.ui.btnPlotB.clicked.connect(self.plotB)
self.ui.btnPlotBpol.clicked.connect(self.plotBpol)
self.ui.btnPlotBr.clicked.connect(self.plotBr)
self.ui.btnPlotBz.clicked.connect(self.plotBz)
self.ui.btnPlotBphi.clicked.connect(self.plotBphi)
self.ui.btnSave.clicked.connect(self.save)
def closeEvent(self, event):
self.exit()
def exit(self):
"""
Close any child windows before exiting.
"""
for _, w in self.windows.items():
w.close()
self.close()
def toggleEnabled(self, enabled=True):
"""
Toggle the enabled state of controls which require data to be
available.
"""
self.ui.btnPlotPsi.setEnabled(enabled)
self.ui.btnPlotB.setEnabled(enabled)
self.ui.btnPlotBpol.setEnabled(enabled)
self.ui.btnPlotBr.setEnabled(enabled)
self.ui.btnPlotBz.setEnabled(enabled)
self.ui.btnPlotBphi.setEnabled(enabled)
self.ui.btnSave.setEnabled(enabled)
def load(self):
"""
Load data using the selected module.
"""
shot = self.ui.tbShot.text()
# Try to convert to integer. If that fails, the user may
# have provided a file name instead...
try: shot = int(shot)
except: pass
try:
mod = self.ui.cbTokamak.currentData()
self.equil = mod.getLUKE(shot)
print("Loaded '{}'...".format(shot))
self.plotFluxSurfaces()
self.toggleEnabled(True)
except Exception as ex:
QMessageBox.critical(self, 'Error loading shot', "The specified shot file could not be loaded:\n\n{}".format(ex))
def plotFluxSurfaces(self):
"""
Plot flux surfaces from loaded equilibrium data.
"""
ax = self.fluxSurfaceAx
ptx = self.equil['ptx']
pty = self.equil['pty']
Rp = self.equil['Rp']
Zp = self.equil['Zp']
ax.plot(ptx[:,:-1]+Rp, pty[:,:-1]+Zp, linewidth=0.7, color=(0.5, 0.5, 0.5))
ax.plot(ptx[:,-1]+Rp, pty[:,-1]+Zp, linewidth=2, color='r')
ax.plot(Rp, Zp, 's', color='r')
ax.axis('equal')
self.canvas.draw()
def plotPsi(self):
"""
Plot poloidal flux as function of minor radius.
"""
if 'psi' in self.windows:
self.windows['psi'].close()
w = PlotWindow(600, 400)
r = self.equil['ptx'][0,:]
psi_apRp = self.equil['psi_apRp']
Rp = self.equil['Rp']
ap = r[-1]
psi = psi_apRp * (Rp/ap)
w.ax.plot(r, psi)
w.ax.set_xlim([0, ap])
w.ax.set_xlabel(r'$r$ (m)')
w.ax.set_ylabel(r'Poloidal flux $\Psi$ (Wb)')
w.show()
self.windows['psi'] = w
def plot2D(self, name, data):
"""
Plot the given magnetic field.
"""
if name in self.windows:
self.windows[name].close()
w = PlotWindow()
Rp = self.equil['Rp']
Zp = self.equil['Zp']
R = self.equil['ptx'] + Rp
Z = self.equil['pty'] + Zp
cnt = w.ax.contourf(R, Z, data, cmap='GeriMap', levels=40)
cbar = w.figure.colorbar(cnt)
w.ax.set_xlabel('$R$ (m)')
w.ax.set_ylabel('$Z$ (m)')
w.ax.axis('equal')
cbar.set_label('{} (T)'.format(name))
w.show()
self.windows[name] = w
def plotB(self):
"""
Plot the magnetic field strength in (R, Z).
"""
Br = self.equil['ptBx']
Bz = self.equil['ptBy']
Bp = self.equil['ptBPHI']
self.plot2D('$|B|$', np.sqrt(Br**2 + Bz**2 + Bp**2))
def plotBpol(self):
"""
Plot the poloidal magnetic field.
"""
Br = self.equil['ptBx']
Bz = self.equil['ptBy']
self.plot2D(r'$B_{\rm pol}$', np.sqrt(Br**2 + Bz**2))
def plotBr(self):
"""
Plot the radial magnetic field component.
"""
self.plot2D(r'$B_r$', self.equil['ptBx'])
def plotBz(self):
"""
Plot the radial magnetic field component.
"""
self.plot2D(r'$B_z$', self.equil['ptBy'])
def plotBphi(self):
"""
Plot the toroidal magnetic field component.
"""
self.plot2D(r'$B_\varphi$', self.equil['ptBPHI'])
def save(self):
"""
Save the loaded equilibrium to file.
"""
filename, _ = QFileDialog.getSaveFileName(self, caption="Save LUKE equilibrium file", filter='HDF5 file (*.h5)')
if filename:
DREAMIO.SaveDictAsHDF5(filename, {'equil': self.equil})
QMessageBox.information(self, "Equilibrium file saved", "The magnetic equilibrium data was saved to the file '{}'.".format(filename))
def setupPlot(self):
self.fluxSurfaceAx = self.canvas.figure.subplots()
self.fluxSurfaceAx.set_xlabel(r'$R$ (m)')
self.fluxSurfaceAx.set_ylabel(r'$Z$ (m)')
self.fluxSurfaceAx.figure.tight_layout()
|
from flask import Flask, redirect, url_for, render_template, request
from time import sleep
import RPi.GPIO as GPIO # import the RPi library and its GPIO function?? PWM to control the servo motor??
app = Flask(__name__)
#Default page(Spray Power Off)
@app.route("/", methods=["GET","POST"])
def home():
TES_pin = 26
if request.method =="POST":
print("has posted")
userValue = request.form["val"] # get the value of val from html file and store as str
print("Please Turn On the Device First")
return render_template("index.html") #turn On switch
else:
GPIO.setmode(GPIO.BCM)
GPIO.setup(TES_pin,GPIO.OUT)
GPIO.output(TES_pin,GPIO.LOW)
print("Pump Off")
return render_template("index.html") # nothing happen
#Spray Power ON
@app.route('/on',methods=["GET","POST"])
def on():
RelayControl = 26
print("Three")
if request.method =="POST":
userValue = request.form["val"] # get the value of val from html file and store as str
print(userValue)
SMotor_Control(userValue)
return render_template('on.html')
else:
GPIO.setmode(GPIO.BCM)
GPIO.setup(RelayControl,GPIO.OUT)
GPIO.output(RelayControl,GPIO.HIGH)
return render_template('on.html')
def SMotor_Control(degree):
print("Spray Motor Control")
val1=SetAngle(degree)
servoM1_pin = 6 # PWM pin
GPIO.setmode(GPIO.BCM)
GPIO.setup(servoM1_pin,GPIO.OUT)
m1=GPIO.PWM(servoM1_pin,50)
m1.start(0)
m1.ChangeDutyCycle(12)
sleep(4)
m1.ChangeDutyCycle(val1)
sleep(2)
GPIO.cleanup()
def SetAngle(angle):
angle1 = float(angle)
print(angle1)
angle1+=90
print(angle1)
duty = 12-(angle1/180)*10
result=int(duty)
print(duty)
print('Result')
print(result)
return result
# initialize the calling
if __name__ == "__main__":
app.run(host='0.0.0.0',port = 5000)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-25 18:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('qa', '0011_ratequestion'),
]
operations = [
migrations.AlterModelOptions(
name='answer',
options={'ordering': ['-date']},
),
migrations.AlterModelOptions(
name='category',
options={'ordering': ['-name'], 'verbose_name_plural': 'Categories'},
),
migrations.AlterModelOptions(
name='question',
options={'ordering': ['-date']},
),
migrations.AlterField(
model_name='ratequestion',
name='rating',
field=models.BooleanField(help_text='Rate the question'),
),
]
|
import os
from build_database import build_database
ruta1=os.path.dirname(os.path.abspath(__file__))+'\\TerrassaBuildings900\\val\\images'
ruta2=os.path.dirname(os.path.abspath(__file__))+'\\TerrassaBuildings900\\train\\images'
savepath1=os.path.dirname(os.path.abspath(__file__))+'\\TerrassaBuildings900\\val'
savepath2=os.path.dirname(os.path.abspath(__file__))+'\\TerrassaBuildings900\\train'
build_database(ruta1,savepath1);
build_database(ruta2,savepath2);
|
# ランナーパッケージ
# train, infer, loggingがimportできるようにする
from runner.runner import Runner
from runner.infer import Infer
from utils import *
from dataset import *
from models import *
|
#!/usr/bin/python3
''' I/O module '''
def append_write(filename="", text=""):
''' Appends a string at the end of a text file (UTF8) and returns the
number of characters added.
'''
with open(filename, mode='a', encoding='utf-8') as f:
return f.write(text)
|
import json
import os
from collections import UserDict
from .logger import get_logger
logger = get_logger('config')
global_config_paths = {"LOCAL": "local_config.json",
"TESTNET": "testnet_config.json",
"MAINNET": "mainnet_config.json"}
env_defaults = {'LOCAL': './config/local_config.json',
'TESTNET': './config/testnet_config.json',
'MAINNET': './config/mainnet_config.json'}
__all__ = ['Config']
class Config(UserDict):
""" Configuration manager -- loads global parameters
automatically selects environment variables first, local configuration second, and global configuration if all else fails
Can pass a list of required arguments which are checked before initialization passes -- this way you can catch any
missing parameters early
"""
def __init__(self, required: list = None, config_file: str = None):
# don't use mutable objects as default arguments
self.required = [] if required is None else required
super().__init__()
if not config_file:
config_file = env_defaults[os.getenv('SWAP_ENV', 'LOCAL')]
logger.info(f'Loading custom configuration: {config_file}')
try:
with open(config_file) as f:
conf_file = json.load(f)
self.update(conf_file)
except IOError:
logger.critical("there was a problem opening the config file")
raise
except json.JSONDecodeError as e:
logger.critical("config file isn't valid json")
raise ValueError from e
self.check_required()
def check_required(self):
for key in self.required:
if key not in self:
raise EnvironmentError(f'Missing key {key} in configuration file or environment variables')
def __contains__(self, key):
if key in os.environ:
return True
return super().__contains__(key)
def __getitem__(self, item):
""" first search environment variable, and only then our stored keys
Will search both the key name, and the upper case key name just in case """
if isinstance(item, str):
if item in os.environ:
return os.getenv(item)
env_name = item.upper()
if env_name in os.environ:
return os.getenv(env_name)
return super().__getitem__(item)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib2
import hashlib
import json
import time
import datetime
import datamodel as dm
from apiconfig import APIConfig
from logger import Logger
class OpenWeatherMap:
def getDateStr(self, delta_days):
#create time string of the following format: "yyyy-mm-dd"
today = datetime.date.today()
target = today + datetime.timedelta(days=delta_days)
return target.strftime("%Y-%m-%d")
def __init__(self, config):
# set up API data
self.api_key = config.api_key
self.city_code = config.city_code
self.url = "http://api.openweathermap.org/data/2.5/forecast/city?id={0}&APPID={1}&units=metric".format(self.city_code, self.api_key)
self.current_url = "http://api.openweathermap.org/data/2.5/weather?id={0}&APPID={1}&units=metric".format(self.city_code, self.api_key)
## print "OWM url: " + self.url
# get date and hour to read API results
nr_days_forecast = 3
self.api_dates = []
for i in range(0, nr_days_forecast):
self.api_dates.append( self.getDateStr(i) )
self.api_hours = ["06:00", "12:00", "18:00", "21:00"]
self.api_hours_num = [6,12,18,21]
self.sampled_days = []
self.current_weather = None
self.logfile = "logOWM.txt"
with Logger(self.logfile) as log:
log.lprint("--- NEW ---")
def getCurrentData(self):
#get current weather data
data_status = None
# read data as JSON and transform it this way to a dict
try:
with Logger(self.logfile) as log:
log.lprint("fetching data from API... OWM")
print "fetching data from API... OWM"
## print "URL - Request",
req = urllib2.Request(self.current_url)
## print " - Open",
response = urllib2.urlopen(req)
## print " - Read Response"
output = response.read()
data = json.loads(output)
#req = urllib2.urlopen(self.url)
#data = json.load(req)
data_status = True
except urllib2.URLError, e:
with Logger(self.logfile) as log:
log.lprint("URLError: " + str(e.reason))
print "Could not connect to the openweathermap server - sorry, please \
check your internet connection and possible server down times: URLError:" + str(e.reason)
data_status = False
if data_status:
date = self.getDateStr(0)
time = datetime.datetime.now().strftime('%H:00')
self.current_weather = dm.WeatherSamplePoint(date, time)
rain_mm = 0.0
if 'rain' not in data:
rain_mm = 0.0
else:
if '3h' not in data['rain']:
rain_mm = 0.0
else:
rain_mm = float(data['rain']['3h'])/3.0
self.current_weather.setValues(data['main']['temp'],
data['main']['temp'],
rain_mm,
data['wind']['speed'])
def get_forecast(self, wind_named=False):
forecast_status = None
# read data as JSON and transform it this way to a dict
try:
with Logger(self.logfile) as log:
log.lprint("fetching data")
print "fetching data from API... OWM"
## print "URL - Request",
req = urllib2.Request(self.url)
## print " - Open",
response = urllib2.urlopen(req)
## print " - Read Response"
output = response.read()
data = json.loads(output)
#req = urllib2.urlopen(self.url)
#data = json.load(req)
forecast_status = True
except urllib2.URLError, e:
with Logger(self.logfile) as log:
log.lprint("URLError: " + str(e.reason))
print "Could not connect to the openweathermap server - sorry, please \
check your internet connection and possible server down times: URLError:" + str(e.reason)
forecast_status = False
# read the forecast basis in the variable
if forecast_status:
#print data
count = data['cnt']
del self.sampled_days[:]
for d in self.api_dates:
wds = dm.WeatherDaySample(d, self.api_hours)
for h in self.api_hours:
wsp = dm.WeatherSamplePoint(d, h)
for x in range(count):
f = data['list'][x]
fstamp = f['dt_txt']
fdate = fstamp[0:10]
ftime = fstamp[11:16]
if fdate == d and ftime == h:
# high temp, low temp, rain chance, wind speed
rain_mm = 0.0
if 'rain' not in f:
rain_mm = 0.0
else:
if '3h' not in f['rain']:
rain_mm = 0.0
else:
rain_mm = float(f['rain']['3h']) / 3.0
rain = 0
if(rain_mm > 0):
rain = 1
wsp.setValues(f['main']['temp_max'],
f['main']['temp_min'],
rain_mm,
f['wind']['speed'])
wds.setValuesDayTimeData(wsp)
self.sampled_days.append(wds)
with Logger(self.logfile) as log:
log.lprint("--- end ---\n")
else:
print "WARNING: could not get forecast!"
|
'''
| From: "Digitalized Signatures and Public-Key Functions as Intractable as Factorization".
| Published in: 1979
| Security Assumption: Integer Factorization
* type: public-key encryption
* setting: Integer
:Authors: Christina Garman
:Date: 09/2011
'''
from charm.core.math.integer import integer
from charm.toolbox.PKEnc import PKEnc
from charm.toolbox.PKSig import PKSig
from charm.toolbox.paddingschemes import OAEPEncryptionPadding,SAEPEncryptionPadding
from charm.toolbox.redundancyschemes import InMessageRedundancy
from charm.toolbox.conversion import Conversion
from charm.toolbox.bitstring import Bytes
from charm.toolbox.specialprimes import BlumWilliamsInteger
from math import ceil
debug = False
class Rabin():
def __init__(self, modulus=BlumWilliamsInteger()):
self.modulustype = modulus
# generate p,q and n
def paramgen(self, secparam):
(p, q, N) = self.modulustype.generateBlumWilliamsInteger(secparam)
yp = (p % q) ** -1
yq = (q % p) ** -1
return (p, yp, q, yq, N)
def keygen(self, s0, secparam=1024, params=None):
if params:
(N, p, q, yp, yq) = self.convert(params)
pk = { 'N':N, 'n':secparam, 's0':s0 }
sk = { 'p':p, 'q':q, 'N':N , 'yp':yp, 'yq':yq }
return (pk, sk)
(p, yp, q, yq, N) = self.paramgen(secparam)
pk = { 'N':N, 'n':secparam, 's0':s0 }
sk = { 'p':p, 'q':q, 'N':N , 'yp':yp, 'yq':yq }
return (pk, sk)
def convert(self, N, p, q, yp, yq):
return (integer(N), integer(p), integer(q), integer(yp), integer(yq))
class Rabin_Enc(Rabin,PKEnc):
"""
>>> rabin = Rabin_Enc()
>>> (public_key, secret_key) = rabin.keygen(128, 1024)
>>> msg = b'This is a test'
>>> cipher_text = rabin.encrypt(public_key, msg)
>>> decrypted_msg = rabin.decrypt(public_key, secret_key, cipher_text)
>>> decrypted_msg == msg
True
"""
def __init__(self, padding=SAEPEncryptionPadding(), redundancy=InMessageRedundancy(), params=None):
Rabin.__init__(self)
PKEnc.__init__(self)
self.paddingscheme = padding
self.redundancyscheme = redundancy
# m : Bytes
def encrypt(self, pk, m, salt=None):
if(self.paddingscheme.name == "SAEPEncryptionPadding"):
EM = self.paddingscheme.encode(m, pk['n'], pk['s0'])
else:
m = self.redundancyscheme.encode(m)
octetlen = int(ceil(int(pk['N']).bit_length() / 8.0))
EM = self.paddingscheme.encode(m, octetlen, "", salt)
if debug: print("EM == >", EM)
i = Conversion.OS2IP(EM)
ip = integer(i) % pk['N'] #Convert to modular integer
return (ip ** 2) % pk['N']
def decrypt(self, pk, sk, c):
p = sk['p']
q = sk['q']
yp = sk['yp']
yq = sk['yq']
mp = (c ** ((p+1)/4)) % p
mq = (c ** ((q+1)/4)) % q
if(not(((c % p) == (mp ** 2)) and ((c % q) == (mq ** 2)))):
assert False, "invalid ciphertext"
r1 = ((int(yp)*int(p)*int(mq)) + ((int(yq)*int(q)*int(mp)))) % int(sk['N'])
r2 = int(sk['N']) - int(r1)
s1 = (int(yp)*int(p)*int(mq) - int(yq)*int(q)*int(mp)) % int(sk['N'])
s2 = int(sk['N']) - int(s1)
m1 = r1 % int(sk['N'])
m2 = r2 % int(sk['N'])
m3 = s1 % int(sk['N'])
m4 = s2 % int(sk['N'])
if(self.paddingscheme.name == "SAEPEncryptionPadding"):
if(m1 < int(sk['N']/2)):
os1 = Conversion.IP2OS(int(m1))
if(m2 < int(sk['N']/2)):
os2 = Conversion.IP2OS(int(m2))
else:
if(m3 < int(sk['N']/2)):
os2 = Conversion.IP2OS(int(m3))
else:
os2 = Conversion.IP2OS(int(m4))
else:
if(m2 < int(sk['N']/2)):
os1 = Conversion.IP2OS(int(m2))
if(m3 < int(sk['N']/2)):
os2 = Conversion.IP2OS(int(m3))
else:
os2 = Conversion.IP2OS(int(m4))
else:
os1 = Conversion.IP2OS(int(m3))
os2 = Conversion.IP2OS(int(m4))
if debug:
print("OS1 =>", os1)
print("OS2 =>", os2)
(m1, t1) = self.paddingscheme.decode(os1, pk['n'], pk['s0'])
(m2, t2) = self.paddingscheme.decode(os2, pk['n'], pk['s0'])
if((t1 == Bytes.fill(b'\x00', pk['s0']/8)) and (t2 == Bytes.fill(b'\x00', pk['s0']/8))):
assert False, "invalid ciphertext"
if(t1 == Bytes.fill(b'\x00', pk['s0']/8)):
return m1
else:
if(t2 == Bytes.fill(b'\x00', pk['s0']/8)):
return m2
else:
assert False, "invalid ciphertext"
else:
octetlen = int(ceil(int(pk['N']).bit_length() / 8.0))
os1 = Conversion.IP2OS(int(m1), octetlen)
os2 = Conversion.IP2OS(int(m2), octetlen)
os3 = Conversion.IP2OS(int(m3), octetlen)
os4 = Conversion.IP2OS(int(m4), octetlen)
if debug:
print("OS1 =>", os1)
print("OS2 =>", os2)
print("OS3 =>", os3)
print("OS4 =>", os4)
for i in [os1, os2, os3, os4]:
(isMessage, message) = self.redundancyscheme.decode(self.paddingscheme.decode(i))
if(isMessage):
return message
class Rabin_Sig(Rabin, PKSig):
"""
RSASSA-PSS
>>> msg = b'This is a test message.'
>>> rabin = Rabin_Sig()
>>> (public_key, secret_key) = rabin.keygen(1024)
>>> signature = rabin.sign(secret_key, msg)
>>> rabin.verify(public_key, msg, signature)
True
"""
def __init__(self, padding=OAEPEncryptionPadding()):
Rabin.__init__(self)
PKSig.__init__(self)
self.paddingscheme = padding
def sign(self,sk, M, salt=None):
#apply encoding
while True:
octetlen = int(ceil(int(sk['N']).bit_length() / 8.0))
em = self.paddingscheme.encode(M, octetlen, "", salt)
m = Conversion.OS2IP(em)
m = integer(m) % sk['N'] #ERRROR m is larger than N
p = sk['p']
q = sk['q']
yp = sk['yp']
yq = sk['yq']
mp = (m ** ((p+1)/4)) % p
mq = (m ** ((q+1)/4)) % q
r1 = ((int(yp)*int(p)*int(mq)) + ((int(yq)*int(q)*int(mp)))) % int(sk['N'])
r2 = int(sk['N']) - int(r1)
s1 = (int(yp)*int(p)*int(mq) - int(yq)*int(q)*int(mp)) % int(sk['N'])
s2 = int(sk['N']) - int(s1)
if(((int((integer(r1) ** 2) % sk['N'] - m)) == 0) or ((int((integer(r2) ** 2) % sk['N'] - m)) == 0) or ((int((integer(s1) ** 2) % sk['N'] - m)) == 0) or ((int((integer(s2) ** 2) % sk['N'] - m)) == 0)):
break
S = { 's1':r1, 's2':r2, 's3':s1, 's4':s2 }
if debug:
print("Signing")
print("m =>", m)
print("em =>", em)
print("S =>", S)
return S
def verify(self, pk, M, S, salt=None):
#M = b'This is a malicious message'
octetlen = int(ceil(int(pk['N']).bit_length() / 8.0))
sig_mess = (integer(S['s1']) ** 2) % pk['N']
sig_mess = Conversion.IP2OS(int(sig_mess), octetlen)
if debug: print("OS1 =>", sig_mess)
dec_mess = self.paddingscheme.decode(sig_mess)
if debug:
print("Verifying")
print("sig_mess =>", sig_mess)
print("dec_mess =>", dec_mess)
print("S =>", S)
return (dec_mess == M)
|
# encoding: utf-8
import os
import io
import sys
from setuptools import setup, find_packages, Command
from shutil import rmtree
NAME = 'reelog'
DESCRIPTION = 'python log best practice.'
URL = ''
EMAIL = 'samrui0129@gmail.com'
AUTHOR = 'Sam Rui'
REQUIRES_PYTHON = '>=2.7.0'
VERSION = '1.6.7'
REQUIRED = [
]
EXTRAS = {
}
here = os.path.abspath(os.path.dirname(__file__))
try:
with io.open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = '\n' + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
class UploadCommand(Command):
"""Support setup.py upload."""
description = 'Build and publish the package.'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
print('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
except OSError:
pass
print('Building Source and Wheel (universal) distribution…')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
print('Uploading the package to PyPI via Twine…')
os.system('twine upload dist/*')
# print('Pushing git tags…')
# os.system('git tag v{0}'.format(VERSION))
# os.system('git push --tags')
sys.exit()
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=long_description,
long_description_content_type='text/x-rst',
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(exclude=('tests',)),
install_requires=REQUIRED,
extras_require=EXTRAS,
include_package_data=True,
zip_safe=False,
license='Apache License',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
],
cmdclass={
'upload': UploadCommand,
}
)
|
import pandas as pd
import os
import numpy as np
from manatee.preprocess import parse_weekly_timestamps
from manatee.shapelet_train import train_shapelets, batch_events_to_rates
import pickle
series_size = 240 * 60
num_bins = 300
min_points = 5
filter_bandwidth = 2
density = True
data = pd.read_pickle('../all_emails_kmeans_clustered.pkl')
data = parse_weekly_timestamps(data) # add weekly timestamps
index = data['file']
if os.path.isfile("manatee/rate_values/kmeans/sz_{}_hr_bins_{}_min_pts_{}_filter_width_{}_density_{}/series_values.npy".format(series_size / 60 / 60, num_bins, min_points, filter_bandwidth, density)):
dir_path = "manatee/rate_values/kmeans/sz_{}_hr_bins_{}_min_pts_{}_filter_width_{}_density_{}".format(series_size / 60 / 60, num_bins, min_points, filter_bandwidth, density)
series_values = np.load(dir_path + "/series_values.npy")
# change this line from 'labels.npy' to 'labels_multi.npy' for binary vs. multiclass
labels = np.load(dir_path + "/labels.npy")
'''
pkl_file = open(dir_path + "/val_series_count.pkl", 'rb')
val_series_count = pickle.load(pkl_file)
pkl_file.close()
series_count = 0
for val in index.unique():
series_count += val_series_count[val]
print("\nDataset Summary: {} total time series, length = {} hr, sampled {} times\n".format(series_count, series_size / 60 / 60, num_bins))
for val in index.unique():
ct = val_series_count[val]
print("{} time series ({} % of total) were added from cluster: {}".format(ct, round(ct / series_count * 100, 1), val))
'''
else:
# uncomment for binary shapelet classifier
'''
labels_dict = {}
for val in data['file'].unique():
if val == 'enron.jsonl':
labels_dict[val] = 0
else:
labels_dict[val] = 1
'''
# uncomment for multiclass shapelet classifier
labels_dict = {}
for val in data['file'].unique():
labels_dict[val] = val
## TODO - BATCH EVENTS TO RATES hp optimization / fidelity - series_size, num_bins, min_points, filter_bandwidth
# train multiclass shapelet classifier without transfer learning
series_values, series_times, labels, val_series_count = \
batch_events_to_rates(data['Weekly Timestamp'], index, labels_dict, series_size = series_size, min_points = min_points,
num_bins = num_bins, filter_bandwidth = filter_bandwidth, density = density)
# randomly shuffle before splitting into training / test / val
np.random.seed(0)
randomize = np.arange(len(series_values))
np.random.shuffle(randomize)
series_values = series_values[randomize]
labels = labels[randomize]
# train
train_split = int(0.9 * series_values.shape[0])
train_shapelets(series_values[:train_split].reshape(-1, series_values.shape[1], 1), labels[:train_split])
# test eval
train_shapelets(series_values[:train_split].reshape(-1, series_values.shape[1], 1), labels[:train_split],
val_data = (series_values[train_split:].reshape(-1, series_values.shape[1], 1), labels[train_split:]))
# CHANGES FOR MULTICLASS
# 1. change p_threshold
# 2. uncomment target_names in shapelet_train.py
# 3. change labels to labels_multi
# (transfer) 4. add transfer = True flag
|
#questao 2 - condição
saldoInicial = float(input('Insira seu saldo inicial: '))
debitos = float(input('Insira o total de debitos: '))
creditos = float(input('Insira o total de creditos: '))
saldoFinal = saldoInicial + (creditos - debitos)
if saldoFinal > 0:
print("Saldo porsitivo em R$",saldoFinal)
elif saldoFinal < 0:
print("Saldo negativo em R$",-saldoFinal)
else:
print("Saldo zero")
|
from django.db import models
POSITIONS = (
("GK", "Goal Keeper"),
("DF", "Defender"),
("DF", "Defender"),
("MF", "Midfielder"),
("FW", "Forward"),
)
class League(models.Model):
name = models.CharField(max_length = 100)
start_date = models.DateField()
end_date = models.DateField(null = True, blank = True)
edition = models.IntegerField()
#TODO: add more fields
def __unicode__(self):
return self.name
class Club(models.Model):
name = models.CharField(max_length = 100)
leagues = models.ManyToManyField(League)
#TODO: add more fields
def coach(self):
"""
Returns the coach (if exist) of curren Team
"""
try:
return Coach.objects.get(club = self.id).full_name()
except:
return "No coach yet"
def __unicode__(self):
return self.name
class Person(models.Model):
"""
This model will be used for reusing code for Coach and Player
"""
first_name = models.CharField(max_length = 50)
last_name = models.CharField(max_length = 50)
country = models.CharField(max_length = 50)
club = models.ForeignKey(Club)
#TODO: add more fields
def full_name(self):
return "%s %s" % (self.first_name, self.last_name)
def __unicode__(self):
return self.full_name()
class Coach(Person):
start_date = models.DateField()
end_date = models.DateField(null = True, blank = True)
#TODO: add more fields
class Player(Person):
rating = models.IntegerField(default = 0)
foot = models.CharField(choices = (("R", "Right"), ("L", "Left")), max_length = 2)
pace = models.IntegerField(default = 0)
shooting = models.IntegerField(default = 0)
passing = models.IntegerField(default = 0)
dribbling = models.IntegerField(default = 0)
defending = models.IntegerField(default = 0)
heading = models.IntegerField(default = 0)
height = models.IntegerField(default = 0)
age = models.IntegerField(default = 0)
position = models.CharField(help_text = "Select the best position of this player", choices = POSITIONS, max_length = 2)
#TODO: add more fields
|
def cast_params_to_ufloat(params, stdev=0.1):
from uncertainties import ufloat
u = {}
for p, val in params.items():
if isinstance(val, dict):
u[p] = cast_params_to_ufloat(val)
if isinstance(val, float):
u[p] = ufloat(val, val * stdev, tag=p)
else:
u[p] = val
return u
|
"""Noticeable difference model for CIE Lab color space."""
def cieLabJND(markSize):
"""Calculate the interval for two CIE Lab colors to be noticeable different.
Calculate the minimum interval needed along CIE L, a, and b axis for two
colors of a certain size to be noticeably different. Here we use the a model
proposed by Stone, Szafir, and Setlur:
http://www.danielleszafir.com/2014CIC_48_Stone_v3.pdf.
"""
ndL = 5.079 + 0.751 / markSize
ndA = 5.339 + 1.541 / markSize
ndB = 5.349 + 2.871 / markSize
return (ndL, ndA, ndB)
|
import pytest
from LayerClient import LayerClient
class MockRequestResponse(object):
def __init__(self, ok, json=None, text=None, status_code=200):
self.ok = ok
self._json = json
self.text = text
self.status_code = status_code
def json(self):
if self._json is None:
raise ValueError
return self._json
class TestPlatformClient(object):
@pytest.yield_fixture
def layerclient(self):
client = LayerClient.PlatformClient(
'TEST_APP_UUID',
'TEST_BEARER_TOKEN',
)
yield client
|
"""PyTorch Dataset class for visual search stimuli"""
from pathlib import Path
import imageio
import numpy as np
import pandas as pd
import torch
from torch.utils.data import Dataset
class Searchstims(Dataset):
"""dataset of visual search stimuli"""
def __init__(self,
csv_file,
split,
transform=None,
target_transform=None):
"""
Parameters
----------
csv_file : str
name of .csv file generated by searchnets.data.split
split : str
Split of entire dataset to use. One of {'train', 'val', 'test'}.
transform : callable
transform to be applied to a single image from the dataset
target_transform : callable
transform to be applied to target
"""
if split not in {'train', 'val', 'test'}:
raise ValueError("split must be one of: {'train', 'val', 'test'}")
self.csv_file = csv_file
self.transform = transform
self.split = split
df = pd.read_csv(csv_file)
df = df[df['split'] == split]
self.df = df
img_files = df['img_file'].values
root_output_dir = df['root_output_dir'].values
self.img_paths = np.asarray(
[str(Path(root).joinpath(img_file))
for root, img_file in zip(root_output_dir, img_files)]
)
target_condition = df['target_condition'].values
target_condition = np.asarray(
[1 if tc == 'present' else 0 for tc in target_condition]
)
self.target_condition = target_condition
self.transform = transform
self.target_transform = target_transform
self.set_size = df['set_size'].values
def __len__(self):
return len(self.img_paths)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img = imageio.imread(self.img_paths[idx])
target = self.target_condition[idx]
if self.transform:
img = self.transform(img)
if self.target_transform:
target = self.target_transform(target)
sample = {
'img': img,
'target': target,
'set_size': self.set_size[idx],
}
return sample
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-11 17:41
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("organisations", "0015_organisationdivisionset_mapit_generation_id")
]
def add_curies(apps, schema_editor):
OrganisationDivision = apps.get_model(
"organisations", "OrganisationDivision"
)
qs = OrganisationDivision.objects.exclude(gss="")
for div in qs:
div.gss = "gss:{}".format(div.gss)
div.save()
def do_nothing(apps, schema_editor):
pass
operations = [
migrations.AlterModelOptions(
name="organisationdivisionset",
options={"ordering": ("start_date",)},
),
migrations.RunPython(add_curies, do_nothing),
migrations.RenameField(
"organisationdivision", "gss", "geography_curie"
),
]
|
import pytest
import redislite
from pydantic_aioredis.config import RedisConfig
from pydantic_aioredis.model import Model
from pydantic_aioredis.store import Store
@pytest.fixture()
def redis_server(unused_tcp_port):
"""Sets up a fake redis server we can use for tests"""
instance = redislite.Redis(serverconfig={"port": unused_tcp_port})
yield unused_tcp_port
instance.close()
|
from flask import render_template, flash, redirect, g, session, request, url_for
from app import app
from .forms import LoginForm, RegisterForm
from flask.ext.login import login_user , logout_user , current_user , login_required
from app import db, models, lm
# u = models.User(nickname='john', password='12345')
# db.session.add(u)
# db.session.commit()
@lm.user_loader
def load_user(id):
return models.User.query.get(int(id))
@app.route('/index', methods=['GET', 'POST'])
@login_required
def index():
return render_template('index.html',
title='Home')
@app.route('/login' , methods=['GET','POST'])
def login():
form = LoginForm()
if request.method == 'GET':
return render_template('login.html', title='Login', form=form)
form = LoginForm()
nickname = request.form['nickname']
password = request.form['password']
registered_user = models.User.query.filter_by(nickname=nickname).first()
if registered_user is None:
flash('Mauvais identifiants' , 'error')
return render_template('login.html', title='Authentification',form=form)
if not registered_user.check_password(password):
flash('Mauvais mot de passe','error')
return render_template('login.html', title='Authentification',form=form)
login_user(registered_user)
flash('Vous ètes connecté')
return redirect(request.args.get('next') or url_for('index'))
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('login'))
@lm.unauthorized_handler
def unauthorized():
flash("Vous n'ètes pas connecté")
return redirect(url_for('login'))
@app.route('/register' , methods=['GET','POST'])
def register():
form = RegisterForm()
if request.method == 'GET':
return render_template('register.html', title='Inscription', form=form)
user = models.User(request.form['nickname'] , request.form['password'])
db.session.add(user)
db.session.commit()
flash('Utilisateur enregistré')
return redirect(url_for('login'))
|
from django.db import models
from django_mysql.models import JSONField
class Movies(models.Model):
popularity = models.FloatField(null=True, blank=True)
director = models.CharField(max_length=256)
genre = JSONField(null=True, blank=True)
imdb_score = models.FloatField(null=True, blank=True)
name = models.TextField()
STATUS_CHOICES = (
('active', 'active'),
('deleted', 'deleted')
)
status = models.CharField(choices=STATUS_CHOICES, max_length=20, default='active')
class Meta:
db_table = 'movies'
|
#!/usr/bin/python2.7
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rc
import math
#plt.rcParams["legend.fontsize"]=13
plt.rcParams["legend.fontsize"]=47
plt.rcParams["font.size"]=45
p = plt.figure(figsize=(24,12),dpi=200)
rc('font',**{'family':'serif','serif':['Times']})
rc('text', usetex=True)
#data = np.loadtxt('dbrubbery.exp.txt',skiprows=2)
#x1 = data[:,0]
##y1 = data[:,1]
# y2 = data[:,2]
# plot1=plt.plot(x1,y2,'kx',markersize=17,markeredgewidth=3,markeredgecolor='black',markerfacecolor='white',label='Exp. Antoci et al.[20]')
data2 = np.loadtxt('dbfsi-Liao2014-exp.txt',skiprows=1)
x2 = data2[::2,0]
y2 = data2[::2,1]
plot1=plt.plot(x2,y2,'ok',linewidth=1.5,markersize=17,markeredgewidth=3,markeredgecolor='black',markerfacecolor='white',label="Exp. (Liao et al. [26])")
##plot2=plt.plot(x2,y2,'b--',label="Vertical displacement (Hwang)")
data4 = np.loadtxt('dbfsi-Liao2014-num.txt')
x4 = data4[:,0]
y4 = data4[:,1]
plot1=plt.plot(x4,y4,'--k',linewidth=3,label='Numerical results (Liao et al. [26])')
#data6 = np.loadtxt('./1e-5.dat')
data6 = np.loadtxt('./displacementTip154.dat')
x6 = data6[:,0]
y6 = data6[:,1]
plot1=plt.plot(x6,y6,'-r',linewidth=3,label='The present work')
#Add arrow:
text = "The second \n impact"
startx = 0.55
starty = 0.015
endx = 0.69
endy = 0.058
plt.annotate(text,xy=(endx,endy),xytext=(startx,starty),arrowprops=dict(arrowstyle="->",connectionstyle="arc3",linewidth=4))
plt.xlim(0.0,1.00)
plt.ylim(-0.08,0.08)
plt.xlabel ('Time (s)',fontsize=47)
plt.ylabel ('Horizontal displacement (m)',fontsize=47)
l=plt.legend(loc='best',numpoints=1)
l.get_frame().set_edgecolor('k')
#plt.legend(loc='upper left',numpoints=1)
#plt.show()
#plt.grid(True)
plt.savefig("dbfsi-deflection.eps")
#plt.savefig("rubber-displacementH-result.tif")
|
#encoding:utf-8
import requests,json
class Controller:
username = 'admin'
password = '123654789'
#Login
def __init__(self):
self.session = requests.Session()
self.session.verify = False
def login(self):
LOGIN_PARAM = {'username': self.username,
'password': self.password
}
try:
response = self.session.post('https://192.168.103.201:8443/api/login', json=LOGIN_PARAM, timeout=3)
return response.status_code
except:
return 400
def auth(self,usermac,seconds,apmac):
Response_Login = self.login()
if Response_Login ==200:
AUTH_PARAM = {"cmd": "authorize-guest", "mac": usermac, "minutes": seconds, "ap_mac": apmac}
Response_Auth = self.session.post('https://192.168.103.201:8443/api/s/default/cmd/stamgr',json=AUTH_PARAM)
self.session.post('https://192.168.103.201:8443/api/logout')
return Response_Auth.status_code
else:
return Response_Login.status_code
def restart_AP(self,apmac):
Response_Login = self.login()
if Response_Login == 200:
RESTART_PARAM = {'mac': apmac,
'cmd':'restart'}
r = self.session.post('https://192.168.103.201:8443/api/s/default/cmd/devmgr',json = RESTART_PARAM)
else:
print ('login error')
def get_AP_MAC(self):
Response_Login = self.login()
APMAC = []
params = ({'_depth': 2, 'test': 0})
if Response_Login == 200:
texts = self.session.get('https://192.168.103.201:8443/api/s/default/stat/device',data = params)
res = json.loads(texts.text.encode('utf8')) ##unicode 转 str,str转dic
for ap in res['data']:
try:
APMAC.append(ap['vap_table'][0]['ap_mac'])
except:
pass
return APMAC
#print res['data'][0]['vap_table'][0]['ap_mac']
# print res['data'][1]['vap_table'][0]['ap_mac']
# for AP in res['data']:
# print AP
|
import copy
import json
import time
from django.urls import reverse
from rest_framework import status
valid_data = {
"citizens": [
{
"citizen_id": 2,
"town": "Москва",
"street": "Льва Толстого",
"building": "16к7стр5",
"apartment": 7,
"name": "Иванов Сергей Иванович",
"birth_date": "01.04.1997",
"gender": "male",
"relatives": [3]
},
{
"citizen_id": 3,
"town": "Керчь",
"street": "Иосифа Бродского",
"building": "2",
"apartment": 11,
"name": "Романова Мария Леонидовна",
"birth_date": "23.11.1986",
"gender": "female",
"relatives": [2]
}
]
}
def test_valid_import(client):
response = client.post(reverse('create_import'), json.dumps(valid_data), content_type='application/json')
assert response.status_code == status.HTTP_201_CREATED, response.data
def test_invalid_date_format(client):
invalid_cases = [
"1.12.1997",
"12.1.1997",
"31.02.1997",
"12.20.1997",
"12-1-1997",
"12.01.1997г",
"1997",
"00.05.206",
"12/12/2012",
"hello world",
2012,
"",
]
for case in invalid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0]['birth_date'] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert 'birth_date' in response.data['citizens'][0]
def test_invalid_string_format(client):
fields = ["town", "street", "building"]
invalid_cases = [
"!@#",
"",
" ",
"\n",
"))(*&^%%$#@#$%^&",
None,
]
for field in fields:
for case in invalid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0][field] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert field in response.data['citizens'][0]
def test_valid_string_format(client):
fields = ["town", "street", "building"]
valid_cases = [
"1",
"hello ",
" hello 123",
"h))(*&^%%$#@#$%^&",
]
for field in fields:
for case in valid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0][field] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_201_CREATED
def test_not_empty_name(client):
fields = ["name"]
valid_cases = [
"1",
"hello",
" (()",
"!@#",
]
for field in fields:
for case in valid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0][field] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_201_CREATED
def test_empty_name_invalid(client):
fields = ["name"]
invalid_cases = [
"",
None,
[]
]
for field in fields:
for case in invalid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0][field] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert field in response.data['citizens'][0]
def test_non_unique_citizen_ids(client):
data = copy.deepcopy(valid_data)
data['citizens'][0]['citizen_id'] = data['citizens'][1]['citizen_id']
data['citizens'][0]['relatives'] = data['citizens'][1]['relatives'] = []
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_incorrect_number_fields(client):
fields = ["apartment", "citizen_id"]
invalid_cases = [
"",
None,
[],
-1,
-20,
"test",
{"name": 1}
]
for field in fields:
for case in invalid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0][field] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert field in response.data['citizens'][0]
def test_gender_field(client):
fields = ["gender"]
invalid_cases = [
None,
"",
"gender",
"Male",
"Female",
"another random string"
]
valid_cases = [
"male",
"female"
]
for field in fields:
for case in invalid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0][field] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert field in response.data['citizens'][0]
for case in valid_cases:
data = copy.deepcopy(valid_data)
data['citizens'][0][field] = case
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_201_CREATED
def test_citizens_relatives_not_exist(client):
data = copy.deepcopy(valid_data)
data['citizens'][0]['relatives'] = [5]
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_citizens_relatives_not_valid(client):
data = copy.deepcopy(valid_data)
data['citizens'][0]['relatives'] = []
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_disallow_excess_field(client):
data = copy.deepcopy(valid_data)
data['citizens'][0]["excess_field"] = "value"
response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_retrieve_citizens(client):
data = copy.deepcopy(valid_data)
create_response = client.post(reverse('create_import'), json.dumps(data), content_type='application/json')
assert create_response.status_code == status.HTTP_201_CREATED
import_id = create_response.data['data']['import_id']
retrieve_response = client.get(reverse('retrieve_import', args=[import_id]))
assert retrieve_response.status_code == status.HTTP_200_OK
assert retrieve_response.data['data'] == data['citizens']
|
import pandas as pd
import os
data = pd.read_csv("elements.csv")
#print(data.iloc[5:6,5:6])
#print(data.index[1])
#a = data.iloc[5:6,5:6]
#float(a)
#print("Melting point", a)
#print(data[["Element", "Symbol"]])
col = data.iloc[:,0:0]
print(col)
s1 = data['Number']
print((data['Number'])[3])
|
'''
169. Majority Element
Given an array nums of size n, return the majority element.
The majority element is the element that appears more than ⌊n / 2⌋ times. You may assume that the majority element always exists in the array.
Example 1:
Input: nums = [3,2,3]
Output: 3
Example 2:
Input: nums = [2,2,1,1,1,2,2]
Output: 2
Constraints:
n == nums.length
1 <= n <= 5 * 104
-231 <= nums[i] <= 231 - 1
Follow-up: Could you solve the problem in linear time and in O(1) space?
'''
from typing import DefaultDict, List
class Solution:
def majorityElement(self, nums: List[int]) -> int:
# O(n) time, O(n) space
count = DefaultDict(int)
for n in nums:
count[n] += 1
if count[n] > len(nums) // 2:
return n
# O(nlogn) time, O(1) space
# sort the list then the majority element must be the middle element
nums.sort()
return nums[len(nums) // 2]
|
from models.exceptions import ACCESS_DENIED, BIRTHDAY_NOT_FOUND
from utils.database import SQLite3Instance
class Birthdays:
def __init__(self, user_id):
self.user_id = user_id
self.db = SQLite3Instance()
def get_birthdays(self) -> list:
""" Метод получает все записи ДР
:return: list(dict)
"""
where_condition = f'WHERE user_id={self.user_id}'
query = self.db.select('birthdays', [], where=where_condition)
return query
def add_birthday(self, in_data):
""" Метод добавления ДР
:param in_data: словарь входных данных (name, gender, birthday, comment)
:return: None
"""
in_data['user_id'] = self.user_id
self.db.insert('birthdays', in_data)
def del_birthday(self, birthday_id):
""" Метод удаления ДР
:param birthday_id: идентификатор др
:return: None
"""
# Проверка существования ДР с данным id
where_condition = f'WHERE id={birthday_id}'
query = self.db.select('birthdays', ['user_id'], where_condition)
if not query:
raise BIRTHDAY_NOT_FOUND
# Проверяем принадлежность ДР с данных id пользователю
if query[0]['user_id'] != self.user_id:
raise ACCESS_DENIED
# Удаляем
self.db.delete('birthdays', where_condition)
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Address'
db.create_table('competition_address', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('street_1', self.gf('django.db.models.fields.CharField')(max_length=100)),
('street_2', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=100)),
('state', self.gf('django.contrib.localflavor.us.models.USStateField')(max_length=2)),
('zip', self.gf('django.db.models.fields.CharField')(max_length=5)),
))
db.send_create_signal('competition', ['Address'])
# Adding model 'BrewerProfile'
db.create_table('competition_brewerprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)),
('address', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['competition.Address'], unique=True)),
('phone_number', self.gf('django.contrib.localflavor.us.models.PhoneNumberField')(max_length=20, blank=True)),
('entries_paid', self.gf('django.db.models.fields.IntegerField')(blank=True)),
('insert_date', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('competition', ['BrewerProfile'])
# Adding model 'Category'
db.create_table('competition_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('category_id', self.gf('django.db.models.fields.IntegerField')()),
('category_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('category_type', self.gf('django.db.models.fields.CharField')(default='Lager', max_length=200)),
))
db.send_create_signal('competition', ['Category'])
# Adding model 'Style'
db.create_table('competition_style', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['competition.Category'])),
('style_id', self.gf('django.db.models.fields.CharField')(max_length=1)),
('style_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal('competition', ['Style'])
# Adding model 'Submission'
db.create_table('competition_submission', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('brewer', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['competition.BrewerProfile'])),
('style', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['competition.Style'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('comments', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('competition', ['Submission'])
# Adding model 'Judge'
db.create_table('competition_judge', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('phone_number', self.gf('django.contrib.localflavor.us.models.PhoneNumberField')(max_length=20, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=254)),
('club_affiliation', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('judge_pref', self.gf('django.db.models.fields.CharField')(default='Judge', max_length=100)),
('qualification', self.gf('django.db.models.fields.CharField')(default='AP', max_length=3)),
('bjcp_registration', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('notes', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('competition', ['Judge'])
# Adding M2M table for field cat_pref_yes on 'Judge'
db.create_table('competition_judge_cat_pref_yes', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('judge', models.ForeignKey(orm['competition.judge'], null=False)),
('category', models.ForeignKey(orm['competition.category'], null=False))
))
db.create_unique('competition_judge_cat_pref_yes', ['judge_id', 'category_id'])
# Adding M2M table for field cat_pref_no on 'Judge'
db.create_table('competition_judge_cat_pref_no', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('judge', models.ForeignKey(orm['competition.judge'], null=False)),
('category', models.ForeignKey(orm['competition.category'], null=False))
))
db.create_unique('competition_judge_cat_pref_no', ['judge_id', 'category_id'])
def backwards(self, orm):
# Deleting model 'Address'
db.delete_table('competition_address')
# Deleting model 'BrewerProfile'
db.delete_table('competition_brewerprofile')
# Deleting model 'Category'
db.delete_table('competition_category')
# Deleting model 'Style'
db.delete_table('competition_style')
# Deleting model 'Submission'
db.delete_table('competition_submission')
# Deleting model 'Judge'
db.delete_table('competition_judge')
# Removing M2M table for field cat_pref_yes on 'Judge'
db.delete_table('competition_judge_cat_pref_yes')
# Removing M2M table for field cat_pref_no on 'Judge'
db.delete_table('competition_judge_cat_pref_no')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'competition.address': {
'Meta': {'object_name': 'Address'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'state': ('django.contrib.localflavor.us.models.USStateField', [], {'max_length': '2'}),
'street_1': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'street_2': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '5'})
},
'competition.brewerprofile': {
'Meta': {'object_name': 'BrewerProfile'},
'address': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['competition.Address']", 'unique': 'True'}),
'entries_paid': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'insert_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'phone_number': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'competition.category': {
'Meta': {'ordering': "['category_id']", 'object_name': 'Category'},
'category_id': ('django.db.models.fields.IntegerField', [], {}),
'category_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'category_type': ('django.db.models.fields.CharField', [], {'default': "'Lager'", 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'competition.judge': {
'Meta': {'object_name': 'Judge'},
'bjcp_registration': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'cat_pref_no': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'no+'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['competition.Category']"}),
'cat_pref_yes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'yes+'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['competition.Category']"}),
'club_affiliation': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'judge_pref': ('django.db.models.fields.CharField', [], {'default': "'Judge'", 'max_length': '100'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'phone_number': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'blank': 'True'}),
'qualification': ('django.db.models.fields.CharField', [], {'default': "'AP'", 'max_length': '3'})
},
'competition.style': {
'Meta': {'object_name': 'Style'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['competition.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'style_id': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'style_name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'competition.submission': {
'Meta': {'object_name': 'Submission'},
'brewer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['competition.BrewerProfile']"}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'style': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['competition.Style']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['competition']
|
#!/bin/python3
import re
import sys
from sys import stdin
colnames = []
reqColumns = sys.argv[1:]
for i in stdin:
record = i.replace("\n", "").split(',')
selectedColumns = []
if record[0] == 'id':
colnames = record
continue
for i in range(0,len(record)):
if colnames[i] in reqColumns:
selectedColumns.append(record[i])
print(selectedColumns)
|
def matrixReshape(nums, r, c):
if r * c != len(nums) * len(nums[0]):
return nums
rtn_val = []
i = 0
new_num = []
for comp in nums:
for num in comp:
new_num.append(num)
i += 1
if i == c:
i = 0
rtn_val.append(new_num)
new_num = []
return rtn_val
print(matrixReshape([[1,2],[3,4]], 1, 4))
|
"""Tests relating to submitting solutions"""
# pylint: disable=invalid-name, no-name-in-module, import-error
import auacm, unittest
from unittest.mock import patch
from mocks import MockResponse, MockFile, PROBLEMS_RESPONSE
class SubmitTests(unittest.TestCase):
"""Tests relating to submits"""
@patch('builtins.open')
@patch('requests.get')
@patch('requests.post')
def testGoodSubmit(self, mock_post, mock_get, mock_open):
"""A valid submission"""
mock_open.return_value = MockFile()
mock_get.side_effect = [
MockResponse(json=PROBLEMS_RESPONSE),
MockResponse(json={'data': {'status': 'start'}}),
MockResponse(json={'data': {'status': 'good'}})]
mock_post.return_value = MockResponse(
json={'data': {'submissionId': '0'}})
result = auacm.submit.submit(['problem 1', 'fake.c'], False)
self.assertIn('running', result.lower())
self.assertIn('correct', result.lower())
@patch('builtins.open')
def testBadFileSubmit(self, mock_open):
"""Attempt to submit a bad file"""
mock_open.side_effect = IOError
self.assertRaises(
auacm.exceptions.InvalidSubmission,
auacm.submit.submit, ['problem 1', 'notafile.cpp'])
if __name__ == '__main__':
unittest.main()
|
import matplotlib.pyplot as plt
import numpy as np
# path in computer and clusters
path_comp_moumita = "/media/moumita/Research/Files/University_Colorado/Work/work4/Spells_data_results/results/CNN/imp_results/graph/"
path_comp_brandon = ""
path_cluster = "/projects/mosa2108/spells/"
path = path_comp_moumita
x = np.array([1, 2, 3])
x_labels = ['class 1', 'class 2', 'class 3']
# all are kept in order as class1, class 2, class 3
precision_3layer_uwnd = [.87,.72,.83]
precision_3layer_vwnd = [.79,.73,.84]
precision_3layer_hgt = [.89,.74,.77]
precision_3layer_comb = [.91,.78,.85]
recall_3layer_uwnd = [.77,.9,.72]
recall_3layer_vwnd = [.82,.78,.74]
recall_3layer_hgt = [.79,.77,.79]
recall_3layer_comb = [.86,.85,.81]
fscore_3layer_uwnd = [.82,.8,.77]
fscore_3layer_vwnd = [.8,.75,.79]
fscore_3layer_hgt = [.84,.75,.77]
fscore_3layer_comb = [.88,.81,.83]
accuracy_3layer = [.79, .78, .78, .85] # in order uwnd, vwnd, hgt, comb
correct_uwnd = [612,0,569]
correct_vwnd = [648,0,585]
correct_hgt = [624, 0, 629]
correct_comb = [679,0,644]
x_labels_accuracy =['uwnd', 'vwnd','hgt','uwnd+vwnd+hgt']
comp_1layers_3hist = [.87,.75,.85,0,.85,.85,.75,0,.86,.8,.8,0,.81] # all results for comb-- precision, recall, fscore, accuracy of all classes respectively
comp_3layers_3hist = [.91,.78,.85,0,.86,.85,.81,0,.88,.81,.83,0,.85]
#### Plot graph.................................................................
# plt.figure()
# plt.plot(x,precision_3layer_uwnd, marker = 'x', linestyle = '--', linewidth = '0.5', color = 'g', label= 'uwnd')
# plt.plot(x,precision_3layer_vwnd, marker = 'x', linestyle = '--', linewidth = '0.5',color = 'c', label= 'vwnd')
# plt.plot(x,precision_3layer_hgt, marker = 'x', linestyle = '--', linewidth = '0.5',color = 'brown', label= 'hgt')
# plt.plot(x,precision_3layer_comb, marker = 'X', linestyle = '--', color = 'b', label= 'uwnd+vwnd+hgt')
# plt.legend()
# plt.grid(axis='y', linestyle = ':', linewidth = '0.5')
#
# plt.xlabel('Daily rainfall classes', fontweight='bold')
# plt.ylabel('Precision', fontweight='bold')
# plt.xticks(x, x_labels, rotation=0)
# plt.title('Classification at lead 3 (pressure layers: 3, history of features: 3)')
# plt.tight_layout()
# plt.savefig(path+'precision_comp.png')
# plt.show()
#### Bar graph.................................................................
# plt.figure()
# plt.bar([1,2,3,4], accuracy_3layer, color=('g','c','brown','b'), width=0.6)
# plt.grid(axis='y', linestyle = ':', linewidth = '0.5')
# plt.xlabel('Variables', fontweight='bold')
# plt.ylabel('Overall Accuracy', fontweight='bold')
# plt.xticks([1,2,3,4], x_labels_accuracy, rotation=0)
# plt.title('Classification at lead 3 (pressure layers: 3, history of features: 3)')
# plt.tight_layout()
# plt.savefig(path+'accuracy_comp.png')
# plt.show()
# #### Group Bar graph.................................................................
# plt.figure()
#
# # set width of bar
# barWidth = 0.25
# # Set position of bar on X axis
# r1 = np.arange(len(correct_uwnd))
# r2 = [x + barWidth for x in r1]
# r3 = [x + barWidth for x in r2]
# r4 = [x + barWidth for x in r3]
#
#
# plt.bar(r1, correct_uwnd, color='g', width=barWidth, edgecolor='white', label='uwnd')
# plt.bar(r2, correct_vwnd, color='c', width=barWidth, edgecolor='white', label='vwnd')
# plt.bar(r3, correct_hgt, color='brown', width=barWidth, edgecolor='white', label='hgt')
# plt.bar(r4, correct_comb, color='b', width=barWidth, edgecolor='white', label='uwnd+vwnd+hgt')
#
# plt.grid(axis='y', linestyle = ':', linewidth = '0.5')
# plt.ylabel('Correctly classified samples', fontweight='bold')
# # Add xticks on the middle of the group bars
# plt.xlabel('Rainfall classes', fontweight='bold')
# plt.xticks([r + barWidth for r in range(len(correct_uwnd))], ['class 1', '', 'class 3'])
#
# # Create legend & Show graphic
# plt.legend()
# plt.title('Classification at lead 3 (pressure layers: 3, history of features: 3)')
# plt.tight_layout()
# plt.savefig(path+'correctly classified_comp.png')
# plt.show()
# #### Group Bar graph 2.................................................................
plt.figure()
# set width of bar
barWidth = 0.25
# Set position of bar on X axis
r1 = np.arange(len(comp_1layers_3hist))
r2 = [x + barWidth for x in r1]
plt.bar(r1, comp_1layers_3hist, color='gray', width=barWidth, edgecolor='white', label='pressure layer: 1')
plt.bar(r2, comp_3layers_3hist, color='b', width=barWidth, edgecolor='white', label='pressure layer: 3')
plt.grid(axis='y', linestyle = ':', linewidth = '0.5')
plt.ylabel('Classification measures', fontweight='bold')
# Add xticks on the middle of the group bars
plt.xlabel('precision recall f-score accuracy', fontweight='bold')
plt.xticks([r + barWidth for r in range(len(comp_1layers_3hist))], ['class 1', 'class 2', 'class 3','', 'class 1', 'class 2', 'class 3','','class 1', 'class 2', 'class 3','','overall'], rotation = 45)
# Create legend & Show graphic
plt.legend(loc = 4)
plt.title('Classification at lead 3 (uwnd+vwnd+hgt)')
plt.tight_layout()
plt.savefig(path+'pressurelayers_comp.png')
plt.show()
|
class Solution(object):
def searchInsert(self, nums, target):
"""
https://leetcode.com/problems/search-insert-position/
binary search problem. needed help as i was using wrong mid value in conditions.
"""
if target <= nums[0]:
return 0
if target > nums[-1]:
return len(nums)
low = 0
high = len(nums)-1
while(low<=high):
mid = (low+high)//2
if target > nums[mid-1] and target<= nums[mid]:
break
elif nums[mid] > target:
high = mid-1
else:
low = mid+1
# print(mid)
return mid
|
# Generated by Django 2.2.7 on 2019-12-26 13:11
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('statics', '0007_reply_replies'),
]
operations = [
migrations.AlterField(
model_name='reply',
name='replies',
field=models.ManyToManyField(blank=True, related_name='_reply_replies_+', to='statics.Reply'),
),
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(choices=[('comment_review', 'Comment Review'), ('comment_reply', 'Comment Reply'), ('like_review', 'Like Review')], max_length=255, verbose_name='알림 유형')),
('date', models.DateTimeField(auto_now_add=True)),
('from_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='from_notifications', to=settings.AUTH_USER_MODEL)),
('reply', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='statics.Reply')),
('review', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='statics.Review')),
('to_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='to_notifications', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': '알림',
'verbose_name_plural': '알림',
'ordering': ['-id'],
},
),
]
|
from django.shortcuts import render
from tickets.models import Ticket
from tickets.forms import TicketForm
# Create your views here.
def ticket_search(request):
tickets = Ticket.objects.filter(ticketName__icontains=request.GET['query'])
form = TicketForm(instance=None)
return render(request, "search_results.html", {"tickets": tickets, 'form':form})
|
from django.contrib import admin
from .models import Token
class TokenAdmin(admin.ModelAdmin):
readonly_fields = ("token",)
admin.site.register(Token, TokenAdmin)
|
import mnist
import numpy as np
import matplotlib.pyplot as plt
from numpy.lib.stride_tricks import as_strided
import classifiers
from scipy.linalg import svd
import math
PEGASOS = 0
SGDQN = 1
ASGD = 2
data = mnist.read_data_sets("MNIST_data/", one_hot=True)
print data.train.images.shape
print data.train.labels.shape
train_image = data.train.images.copy()
train_label = data.train.labels.copy()
train_label[train_label == 0 ] = -1
test_image = data.test.images.copy()
test_label = data.test.labels.copy()
test_label[test_label == 0] = -1
classifier_type = 1
if classifier_type == SGDQN:
reg = 1e-4
model = classifiers.SGDQN(reg, 1e5, 10, X=train_image, Y=train_label, maxiter = 1e8, check = True)
for x in xrange(0,50000):
model.update(train_image[[x]], train_label[[x]])
print "accuracy", model.score(test_image, test_label)
elif classifier_type == ASGD:
reg = 1e-4
model = classifiers.ASGD(reg, X=train_image, Y=train_label, maxiter = 1e8, check = True)
for x in xrange(0,50000):
model.update(train_image[[x]], train_label[[x]])
print "accuracy", model.score(test_image, test_label)
elif classifier_type == PEGASOS:
reg = 1e-4
model = classifiers.Pegasos(1e-4, 1, X=train_image, Y=train_label, maxiter = 1e8, check = True)
for x in xrange(0,50000):
model.update(train_image[[x]], train_label[[x]])
print "accuracy", model.score(test_image, test_label)
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.util.osutil import OS_ALIASES, _values, normalize_os_name
def test_alias_normalization() -> None:
for normal_os, aliases in OS_ALIASES.items():
for alias in aliases:
assert normal_os == normalize_os_name(alias)
def test_keys_in_aliases() -> None:
for key in OS_ALIASES.keys():
assert key in _values(OS_ALIASES)
def test_no_warnings_on_known_names(caplog) -> None:
for name in _values(OS_ALIASES):
normalize_os_name(name)
assert len(caplog.records) == 0
def test_warnings_on_unknown_names(caplog) -> None:
name = "I really hope no one ever names an operating system with this string."
normalize_os_name(name)
assert len(caplog.records) == 1
assert "Unknown operating system name" in caplog.text
|
import os
import io
from flask import json, jsonify
from app import app
AUTH_TOKEN = ""
def get_token():
print('get_token')
token_file = 'auth_token.txt'
with open(token_file, 'r') as f:
os.environ["AUTH_TOKEN"] = f.readline()
print(os.environ["AUTH_TOKEN"])
print('')
def upload_file():
print('testing /files/upload/users/avatar POST')
# print(os.environ["AUTH_TOKEN"])
with app.test_client() as c:
# token = os.environ["AUTH_TOKEN"]
print('Uploading file 1')
'''
data = {
'filename': 'eb4b4851-a5e4-483a-a1d0-3f3feedae3a6.png',
'file': (io.BytesIO(b"abcdef"), 'test1.png')
}
data = {
'filename': '79c2b036-2efb-4f82-bd8b-6158fe0f36de.jpeg',
'file': (io.BytesIO(b"5yt5fhg"), 'test2.jpeg')
}
rv = c.post('/files/upload/users/avatar',
content_type='multipart/form-data',
data = data)
print(rv.data)
print('status: ', rv.status_code)
'''
# File 2
data = {
'filename': '79c2b036-2efb-4f82-bd8b-6158fe0f36de.jpeg',
'file': (io.BytesIO(b"5yt5fhg"), 'tynvnmm.jpeg')
}
rv = c.post('/files/upload/users/avatar',
content_type='multipart/form-data',
data=data)
print(rv.data)
print('status: ', rv.status_code)
# File 3
print('Uploading file 2')
data = {
'filename': '27e264dd-aa46-4c98-8c04-aecace218d9e.png',
'file': (io.BytesIO(b"8yt5JHdrrdg445"), 'heyhere.png')
}
rv = c.post('/files/upload/users/avatar',
content_type='multipart/form-data',
data=data)
print(rv.data)
print('status: ', rv.status_code)
def test_create():
print('testing /users POST')
print(os.environ["AUTH_TOKEN"])
# Create new user from sign in form
with app.test_client() as c:
token = os.environ["AUTH_TOKEN"]
print(token)
authorization = 'Bearer ' + str(token)
headers = {
# 'Content-Type': 'application/json',
'Authorization': authorization
}
rv = c.post('/users', json={
"id": None,
"email": "vovaprivalov@gmail.com", # "marcel_flann@host.com", # "ralf_stone@host.com",
"firstName": None, # "Vladimir", # "Ralf",
"lastName": None, # "Privalov", # "Stone",
"phoneNumber": "34645346", # "6733292", # "23241945",
"role": "user",
"authenticationUid": None,
"disabled": False, # True,
"avatar": []
}, headers=headers)
# json_data = rv.get_json()
# print(json_data)
print(rv.data)
print('status: ', rv.status_code)
def test_get_all():
print('testing /users GET')
with app.test_client() as c:
rv = c.get('/users', json={})
print(rv.data)
print('status: ', rv.status_code)
def test_get_user():
print('testing /users/<user_id> GET')
id = "c99aa62f-a553-4d09-8fba-2a0a7d834ddd"
with app.test_client() as c:
rv = c.get('/users/%s' % id, json={})
print(rv.data)
print('status: ', rv.status_code)
def test_put_user():
print('testing /users/<user_id> PUT')
token = os.environ["AUTH_TOKEN"]
print(token)
authorization = 'Bearer ' + str(token)
headers = {
# 'Content-Type': 'application/json',
'Authorization': authorization
}
user_id = "4a652f44-0b07-49a2-a0d2-20dca2b93223" # "4a58a55d-866b-4dba-9ea5-0f00670b5882"
with app.test_client() as c:
rv = c.put('/users/%s' % user_id, json={
"id": user_id,
"email": "vovaprivalov90@gmail.com", # "marcel_flann18@host.com", # "billy_xavier@host.com",
"firstName": "Nick", # "Kenny", # "Simon",
"lastName": None, # "Xaviert",
"phoneNumber": "233235894", # "2211945",
"role": "admin", # "user", #
"disabled": False,
"avatar": [] #{"id": 'eb4b4851-a5e4-483a-a1d0-3f3feedae3a6', "name": "test1.png", "new": True, "sizeInBytes": 342800, "privateUrl": "users/avatar/eb4b4851-a5e4-483a-a1d0-3f3feedae3a6.png", "publicUrl": "http://127.0.0.1:5000/files/download?privateUrl=users/avatar/avatar/eb4b4851-a5e4-483a-a1d0-3f3feedae3a6.png" }]
}, headers=headers)
print(rv.data)
print('status: ', rv.status_code)
def test_delete_user():
print('testing /users/<user_id> DELETE')
token = os.environ["AUTH_TOKEN"]
authorization = 'Bearer ' + str(token)
headers = {
# 'Content-Type': 'application/json',
'Authorization': authorization
}
user_id = "d17014dd-7ce3-4520-9faf-3d66612d2d8c"
with app.test_client() as c:
rv = c.delete('/users/%s' % user_id, json={}, headers=headers)
print(rv.data)
print('status: ', rv.status_code)
if __name__ == '__main__':
get_token()
upload_file()
#test_create()
#test_get_all()
# test_get_user()
#test_put_user()
# test_delete_user()
|
from django import forms
from . models import Technologies
class TechnologiesForm(forms.ModelForm):
class Meta:
model = Technologies
fields = ["techno"]
|
import math, sys
import os.path
import cmath
from math import sqrt
#################################################################################################################
#################################################################################################################
# data structure to store information about each residue with the docked ligand.
class Mol:
def __init__(self,name,atom_list,bond_list,residue_list):
self.name = str(name)
self.atom_list = atom_list
self.bond_list = bond_list
self.residue_list = residue_list
class atom:
def __init__(self,X,Y,Z,Q,type,name,num,resnum,resname):
self.X = float(X)
self.Y = float(Y)
self.Z = float(Z)
self.Q = float(Q)
self.heavy_atom = False
self.type = type
self.name = name
self.num = int(num)
self.resnum = int(resnum)
self.resname = resname
class bond:
def __init__(self,a1_num,a2_num,num,type):
self.a1_num = int(a1_num)
self.a2_num = int(a2_num)
self.num = int(num)
self.type = type
class residue:
def __init__(self,atom_list,resnum,resname):
self.atom_list = atom_list
self.resnum = int(resnum)
self.resname = resname
#################################################################################################################
#################################################################################################################
def read_Mol2_file(file):
# reads in data from multi-Mol2 file.
file1 = open(file,'r')
lines = file1.readlines()
file1.close()
atom_list = []
bond_list = []
residue_list = {}
mol_list = []
flag_atom = False
flag_bond = False
flag_substr = False
flag_mol = False
flag_getName = False
i = 0 # i is the num of molecules read so far
for line in lines:
linesplit = line.split() #split on white space
if (len(linesplit) == 1):
if(linesplit[0] == "@<TRIPOS>MOLECULE"):
i = i + 1
#print "READING IN MOL #" + str(i)
#print "read in molecule info:"
line_num = 0
flag_mol = True
flag_atom = False
flag_bond = False
flag_substr = False
if(linesplit[0] == "@<TRIPOS>ATOM"):
#print "read in atom info:"
flag_atom = True
flag_bond = False
flag_substr = False
flag_mol = False
if(linesplit[0] == "@<TRIPOS>BOND"):
#print "read in bond info:"
flag_bond = True
flag_substr = False
flag_mol = False
flag_atom = False
if(linesplit[0] == "@<TRIPOS>SUBSTRUCTURE"):
#print "read in substructure info:"
flag_substr = True
flag_mol = False
flag_atom = False
flag_bond = False
if (flag_mol and (not flag_getName) and len(linesplit)==1 ):
if (line_num == 1):
line_num = 0
Name = linesplit[0]
flag_getName = True
line_num = line_num + 1
if ((len(linesplit) >= 9 )and (flag_atom)):
atom_num = linesplit[0]
atom_name = linesplit[1]
X = linesplit[2]
Y = linesplit[3]
Z = linesplit[4]
atom_type = linesplit[5]
res_num = int(linesplit[6])
res_name = linesplit[7]
Q = linesplit[8]
temp_atom = atom(X,Y,Z,Q,atom_type,atom_name,atom_num,res_num,res_name)
atom_list.append(temp_atom)
if residue_list.has_key(res_num):
residue_list[res_num].append(temp_atom)
else:
residue_list[res_num] = [temp_atom]
elif (len(linesplit) == 4 and flag_bond):
bond_num = linesplit[0]
a1_num = linesplit[1]
a2_num = linesplit[2]
bond_type = linesplit[3]
temp_bond = bond(a1_num,a2_num,bond_num,bond_type)
bond_list.append(temp_bond)
elif (flag_substr):
ID_heavy_atoms(atom_list)
data = Mol(Name,atom_list,bond_list,residue_list)
mol_list.append(data)
flag_getName = False
flag_substr = False
atom_list = [];bond_list = []
return mol_list
#################################################################################################################
# Does not work with grid
#def write_mol2(molecule,filename):
#
# outmol2 = open(filename,'w')
# outmol2.write("@<TRIPOS>MOLECULE\n") #start the MOLECULE RTI (Record Type Indicator)
# outmol2.write(molecule.name+'\n') #print MOL2FILE name of the molecule
# outmol2.write(" %d %d %d 0 0\n" % (len(molecule.atom_list),
# len(molecule.bond_list), len(molecule.residue_list.keys())))
# # For now, the number of residues is hard-coded to 1. To be fixed.
# outmol2.write("SMALL\n") #mol_type
# outmol2.write("USER_CHARGES\n") #charge_type
#
# outmol2.write("\n@<TRIPOS>ATOM\n") #start the ATOM RTI (Record Type Indicator)
# for j in range(0,len(molecule.atom_list)):
# outmol2.write("%6d %-4s %9.4f %9.4f %9.4f %-5s %4s %6s %9.4f\n" %
# (j+1, molecule.atom_list[j].name, molecule.atom_list[j].X, molecule.atom_list[j].Y,
# molecule.atom_list[j].Z, molecule.atom_list[j].type, molecule.atom_list[j].resnum,
# molecule.atom_list[j].resname, molecule.atom_list[j].Q))
#
# outmol2.write("@<TRIPOS>BOND\n")
# for m in range(0,len(molecule.bond_list)):
# outmol2.write("%7d %5d %-5d %s\n" % (molecule.bond_list[m].num,
# molecule.bond_list[m].a1_num, molecule.bond_list[m].a2_num, molecule.bond_list[m].type))
#
# outmol2.write("@<TRIPOS>SUBSTRUCTURE\n")
# for resnum in molecule.residue_list.keys():
# outmol2.write("%7d %8s %5d RESIDUE 1 A\n" % (resnum,
# molecule.residue_list[resnum][0].resname, # residue name
# molecule.residue_list[resnum][0].num )) # atom num of first atom in this residue
# outmol2.close()
# return
#################################################################################################################
#################################################################################################################
def write_mol2(molecule,filename):
outmol2 = open(filename,'w')
outmol2.write("@<TRIPOS>MOLECULE\n") #start the MOLECULE RTI (Record Type Indicator)
outmol2.write(molecule.name+'\n') #print MOL2FILE name of the molecule
outmol2.write(" %d %d %d 0 0\n" % (len(molecule.atom_list),
len(molecule.bond_list), len(molecule.residue_list.keys())))
# For now, the number of residues is hard-coded to 1. To be fixed.
outmol2.write("SMALL\n") #mol_type
outmol2.write("USER_CHARGES\n") #charge_type
#outmol2.write("\n@<TRIPOS>ATOM\n") #start the ATOM RTI (Record Type Indicator)
outmol2.write("@<TRIPOS>ATOM\n") #start the ATOM RTI (Record Type Indicator)
for j in range(0,len(molecule.atom_list)):
outmol2.write("%-5d %-5s %9.4f %9.4f %9.4f %-5s %4s %-6s %8.4f\n" %
(j+1, molecule.atom_list[j].name, molecule.atom_list[j].X, molecule.atom_list[j].Y,
molecule.atom_list[j].Z, molecule.atom_list[j].type, molecule.atom_list[j].resnum,
molecule.atom_list[j].resname, molecule.atom_list[j].Q))
outmol2.write("@<TRIPOS>BOND\n")
for m in range(0,len(molecule.bond_list)):
outmol2.write("%-7d %5d %-5d %s\n" % (molecule.bond_list[m].num,
molecule.bond_list[m].a1_num, molecule.bond_list[m].a2_num, molecule.bond_list[m].type))
outmol2.write("@<TRIPOS>SUBSTRUCTURE\n")
for resnum in molecule.residue_list.keys():
outmol2.write("%-7d %8s %5d RESIDUE 1 A %3s 1\n" % (resnum,
molecule.residue_list[resnum][0].resname, # residue name
molecule.residue_list[resnum][0].num, # atom num of first atom in this residue
molecule.residue_list[resnum][0].resname[0:3] )) # residue
outmol2.close()
return
#################################################################################################################
def get_pdbcode_list(filename):
systems_list = open(file,'r')
lines = systems_list.readlines()
return lines
#################################################################################################################
def ID_heavy_atoms(atom_list):
for i in range(len(atom_list)):
if (atom_list[i].type[0] != 'H'):
atom_list[i].heavy_atom = True
return atom_list
#################################################################################################################
#################################################################################################################
def distance2(vector1,vector2):
distance2 = 0
distance2 += (vector1.X-vector2.X)**2
distance2 += (vector1.Y-vector2.Y)**2
distance2 += (vector1.Z-vector2.Z)**2
return distance2
#################################################################################################################
#################################################################################################################
def norm(vector1):
norm = 0
for i in range(len(vector1)):
norm += (vector1[i])*(vector1[i])
return sqrt(norm)
#################################################################################################################
#################################################################################################################
# Make sure the heavy atoms are being declared as heavy
# i.e call ID_heavy atoms function
def heavy_atom_RMSD(ref,pose):
if (len(ref.atom_list) != len(pose.atom_list)):
return -1 # when atom numbers do not agree
sum = 0.0
num_hvy_atoms = 0
for i in range(len(ref.atom_list)):
if (ref.atom_list[i].heavy_atom and pose.atom_list[i].heavy_atom):
sum += distance2(ref.atom_list[i],pose.atom_list[i])
num_hvy_atoms+=1
return sqrt(sum/num_hvy_atoms)
#################################################################################################################
def formal_charge(molecule):
total = 0
for i in range(len(molecule.atom_list)):
total += molecule.atom_list[i].Q
return total
#################################################################################################################
def centre_of_mass(molecule):
# Dictionary of atomic weights of elements
atom_mass = {'O':15.9994 ,'N':14.00674 ,'C':12.011 ,'F':18.9984032 ,'Cl':35.4527 ,'Br':79.904
,'I':126.90447 ,'H':1.00794 ,'B':10.811 ,'S':32.066 ,'P':30.973762 ,'Li':6.941 ,'Na':22.98968
,'Mg':24.3050 ,'Al':26.981539 ,'Si':28.0855 ,'K':39.0983 ,'Ca':40.078 ,'Cr':51.9961 ,'Mn':54.93805
,'Fe':55.847 ,'Co':58.93320 ,'Cu':63.546 ,'Zn':65.39 ,'Se':78.96 ,'Mo':95.94 ,'Sn':118.710 ,'LP':0.0 }
cmass = [0,0,0]
centroid = [0,0,0]
molecular_weight = 0
for k in range(0,len(molecule.atom_list)):
element = molecule.atom_list[k].type.split('.')[0]
cmass[0] += molecule.atom_list[k].X * atom_mass[element]
cmass[1] += molecule.atom_list[k].Y * atom_mass[element]
cmass[2] += molecule.atom_list[k].Z * atom_mass[element]
centroid[0] += molecule.atom_list[k].X
centroid[1] += molecule.atom_list[k].Y
centroid[2] += molecule.atom_list[k].Z
molecular_weight += atom_mass[element]
#print "Molecular Weight =",molecular_weight
cmass[0] /= molecular_weight
cmass[1] /= molecular_weight
cmass[2] /= molecular_weight
centroid[0] /= len(molecule.atom_list)
centroid[1] /= len(molecule.atom_list)
centroid[2] /= len(molecule.atom_list)
#print 'Centroid =',centroid
return cmass
#################################################################################################################
def molecular_weight(molecule):
# Dictionary of atomic weights of elements
atom_mass = {'O':15.9994 ,'N':14.00674 ,'C':12.011 ,'F':18.9984032 ,'Cl':35.4527 ,'Br':79.904
,'I':126.90447 ,'H':1.00794 ,'B':10.811 ,'S':32.066 ,'P':30.973762 ,'Li':6.941 ,'Na':22.98968
,'Mg':24.3050 ,'Al':26.981539 ,'Si':28.0855 ,'K':39.0983 ,'Ca':40.078 ,'Cr':51.9961 ,'Mn':54.93805
,'Fe':55.847 ,'Co':58.93320 ,'Cu':63.546 ,'Zn':65.39 ,'Se':78.96 ,'Mo':95.94 ,'Sn':118.710 ,'LP':0.0 }
molecular_weight = 0
for k in range(0,len(molecule.atom_list)):
element = molecule.atom_list[k].type.split('.')[0]
molecular_weight += atom_mass[element]
return molecular_weight
#################################################################################################################
def calc_dipole_moment(molecule):
uIsum=0
uJsum=0
uKsum=0
dipolemoment=0
conversion = 4.796 # Convert partialcharge*angstroms --> Coulombs*meters (Debye)
cmass = centre_of_mass(molecule)
#print "Centre of mass = ",cmass
#cmass = [molecule.atom_list[0].X, molecule.atom_list[0].Y, molecule.atom_list[0].Z]
for k in range(0,len(molecule.atom_list)):
uIsum += molecule.atom_list[k].Q * (molecule.atom_list[k].X - cmass[0])
uJsum += molecule.atom_list[k].Q * (molecule.atom_list[k].Y - cmass[1])
uKsum += molecule.atom_list[k].Q * (molecule.atom_list[k].Z - cmass[2])
umag = sqrt( (uIsum*uIsum) + (uJsum*uJsum) + (uKsum*uKsum) )
dipolemoment = umag*conversion;
uvector = [uIsum,uJsum,uKsum]
return uvector, dipolemoment
#################################################################################################################
# Takes a single Mol object and returns a Mol object without the hydrogens
# Have to remove H from atom_list, bond_list and residue_list
def remove_hydrogens(m):
atom_list = []
bond_list = []
residue_list = {}
# Retain only heavy atoms in atom_list
num_hvy_atoms = 0
for i in range(len(m.atom_list)):
if (m.atom_list[i].heavy_atom):
atom_list.append(m.atom_list[i])
num_hvy_atoms+=1
# Retain only bonds containing heavy atoms
for bond_id in range(len(m.bond_list)):
retain_bond = True
for atom_id in range(len(m.atom_list)):
if (m.atom_list[atom_id].heavy_atom):
continue
# Atoms down here are always hydrogen
if (m.bond_list[bond_id].a1_num == m.atom_list[atom_id].num):
retain_bond = False
if (m.bond_list[bond_id].a2_num == m.atom_list[atom_id].num):
retain_bond = False
if (retain_bond):
bond_list.append(m.bond_list[bond_id])
# Assuming that residue list does not change
data = Mol(m.name,atom_list,bond_list,m.residue_list)
ID_heavy_atoms(data.atom_list);
return data
#################################################################################################################
|
from template.db import Database
from template.query import Query
from template.transaction import Transaction
from template.transaction_worker import TransactionWorker
from template.config import init
from random import choice, randint, sample, seed
init()
db = Database()
db.open('./ECS165')
grades_table = db.create_table('Grades', 5, 0)
num_threads = 8
try:
grades_table.index.create_index(1)
grades_table.index.create_index(2)
grades_table.index.create_index(3)
grades_table.index.create_index(4)
except Exception as e:
print('Index API not implemented properly, tests may fail.')
keys = []
records = {}
seed(3562901)
insert_transactions = []
transaction_workers = []
for i in range(num_threads):
insert_transactions.append(Transaction())
transaction_workers.append(TransactionWorker())
transaction_workers[i].add_transaction(insert_transactions[i])
for i in range(0, 1000):
key = 92106429 + i
keys.append(key)
records[key] = [key, randint(0, 20), randint(0, 20), randint(0, 20), randint(0, 20)]
q = Query(grades_table)
t = insert_transactions[i % num_threads]
t.add_query(q.insert, *records[key])
# Commit to disk
for i in range(num_threads):
transaction_workers[i].run()
db.close()
|
print("Enter quantity")
q = int(input())
print("enter Price")
p = int(input())
total = q * p
if total >= 100000:
d = total * 0.1
print("Your guantity is = " + str(q) + " your number per item is = " + str(p) + " your Total is = " + str(total))
|
import json
def create_case(mesh_file="",
lx=4,
source_term="noforce",
initial_condition="uniform",
nsamples=0,
dt=0.001,
T_end=0.0,
uinf=[1.0,0.0,0.0]) :
default = {
"case" : {
"mesh_file" : mesh_file,
"fluid_scheme" : "pnpn",
"lx" : lx,
"source_term" : source_term,
"initial_condition" : initial_condition
},
"parameters" : {
"nsamples" : nsamples,
"dt" : dt,
"T_end" : T_end,
"uinf" : uinf,
"ksp_vel" : {
"type" : "cg",
"pc" : "jacobi",
"abstol" : 1e-09
},
"ksp_prs": {
"type" : "gmres",
"pc" : "hsmg",
"abstol" : 1e-09
}
}
}
return json.loads(json.dumps(default))
|
# linreg.py
#
# Standalone Python/Spark program to perform linear regression.
# Performs linear regression by computing the summation form of the
# closed form expression for the ordinary least squares estimate of beta.
#
# TODO: Write this.
#
# Takes the yx file as input, where on each line y is the first element
# and the remaining elements constitute the x.
#
# Usage: spark-submit linreg.py <inputdatafile>
# Example usage: spark-submit linreg.py yxlin.csv
#
#
## Submitted by Sai Nikhil Gundu, Id: 800962726
#groupId = org.apache.spark
#artifactId = spark-core_2.11
#version = 2.1.0
#groupId = org.apache.hadoop
#artifactId = hadoop-client
#version = Hadoop 2.6.0-cdh5.8.0
# Code developed using the skeleton code provided as base
import sys
import numpy as np
from pyspark import SparkContext
#Set the precision value for the output.
np.set_printoptions(precision=13)
# defining the keys for computing beta values. (using the class lecture notes)
if __name__ == "__main__":
sc = SparkContext(appName="Content Based")
# Input yx file has y_i as the first element of each line
# and the remaining elements constitute x_i
ratings_raw_data = sc.textFile(sys.argv[1])
ratings_raw_data_header = ratings_raw_data.take(1)[0]
movies_raw_data = sc.textFile(sys.argv[2])
movies_raw_data_header = movies_raw_data.take(1)[0]
#yxinputFile = sc.textFile(sys.argv[1])
#yxlines = yxinputFile.map(lambda line: line.split(','))
ratings_data = ratings_raw_data.filter(lambda line: line!=ratings_raw_data_header)\
.map(lambda line: line.split(",")).map(lambda tokens: (tokens[0],tokens[1],tokens[2])).cache()
ratings_1_data_forM_U = ratings_raw_data.filter(lambda line: line!=ratings_raw_data_header)\
.map(lambda line: line.split(",")).map(lambda tokens: (tokens[1],tokens[0],tokens[2])).cache()
movies_data = movies_raw_data.filter(lambda line: line!=movies_raw_data_header)\
.map(lambda line: line.split(",")).map(lambda tokens: (tokens[0],tokens[1])).cache()
#Calculating training and test splits-Using entire data as training
training_RDD, test_RDD = ratings_data.randomSplit([10,0], seed=9)
training_RDD1,test_RDD1 = ratings_1_data_forM_U.randomSplit([10,0], seed=9)
# Creating sparse representation of A matrix with users as rows and items as columns
user_item_ratings = (training_RDD
.map(lambda p: (p[0],p[-2:])).groupByKey()).cache()
item_user_ratings=(training_RDD1
.map(lambda p: (p[0],p[-2:])).groupByKey()).cache()
user_ratings=ratings_data.map(lambda X: (X[0], X[2]))
movie=ratings_data.map(lambda x: (x[2],x[1]))
userId = (sys.argv[3])
ur_broadcast = sc.broadcast({
k: v for (k, v) in user_ratings.collect()
})
u_r= np.array(user_ratings.lookup(userId))
#Movie broadcast
m_broadcast = sc.broadcast({
k: v for (k, v) in movie.collect()
})
movies_broadcast = sc.broadcast({
k: v for (k, v) in movies_data.collect()
})
array1 = u_r[u_r.argsort()[-5:]]
top_movie=m_broadcast.value[array1[0]]
#top_movie=m_broadcast.value['3.0']
print top_movie
g_m=movies_raw_data.filter(lambda line: line!=movies_raw_data_header)\
.map(lambda line: line.split(",")).map(lambda tokens: (tokens[0],tokens[2])).cache()
m_g=movies_raw_data.filter(lambda line: line!=movies_raw_data_header)\
.map(lambda line: line.split(",")).map(lambda tokens: (tokens[2],tokens[0])).cache()
m_broadcast = sc.broadcast({
k: v for (k,v) in g_m.collect()
})
top_genre=m_broadcast.value[top_movie]
g_broadcast = sc.broadcast({
v: k for (k,v) in g_m.collect()
})
m_r_broadcast = sc.broadcast({
v: k for (k,v) in movie.collect()
})
lookup_content=np.array(m_g.lookup(top_genre))
genre_ratings=[0.0000]*len(lookup_content)
for i, j in zip(range(5),lookup_content):
genre_ratings[i]=m_r_broadcast.value[j]
g_r=np.array(genre_ratings)
top_movies_genre=lookup_content[g_r.argsort()[-5:]]
print "----------------------------------------------------------------------"
print "Based on the genre, top rated movies calculation"
print "----------------------------------------------------------------------"
print "Recommended movies for userId", userId
for i, j in zip(range(5),top_movies_genre):
print("Movie Id=", j, " with title=", movies_broadcast.value[top_movies_genre[i]])
sc.stop()
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import rospy
from std_msgs.msg import String, Empty, UInt8
from silbot3_msgs.srv import SpeechRecognitionStart, SpeechRecognitionStartResponse
import os
import signal
import traceback
import speech_recognition as sr
from speech_recognition import Microphone
class RecognizerNode:
def __init__(self):
self.recognizer = sr.Recognizer()
self.recognizer.operation_timeout = rospy.get_param('~operation_timeout', 10)
self.recognizer.dynamic_energy_threshold = rospy.get_param('~dynamic_threshold', False)
self.recognizer.pause_threshold = rospy.get_param('~pause_threshold', 0.5)
self.recognizer.energy_threshold = rospy.get_param('~energy_threshold', 500)
self.mic_name = rospy.get_param('~mic_name', 'default')
self.ambient_noise = rospy.get_param('~ambient_noise', True)
rospy.loginfo('mic_name: %s, ambient_noise: %s', self.mic_name, str(self.ambient_noise))
self.is_working = False
rospy.Service('/silbot3_stt/start', SpeechRecognitionStart, self.start)
def start(self, req):
timeout = req.timeout
response = SpeechRecognitionStartResponse()
response.recognized_text = ''
if self.is_working:
response.recognition_result = SpeechRecognitionStartResponse.RECOGNITION_RESULT_BUSY
return response
self.is_working = True
mic_idx = 0
for i, microphone_name in enumerate(Microphone.list_microphone_names()):
if self.mic_name in microphone_name:
mic_idx = i
rospy.loginfo('Found mic: %d=%s', mic_idx, microphone_name)
break
try:
with sr.Microphone(device_index=mic_idx) as source:
if self.ambient_noise:
self.recognizer.adjust_for_ambient_noise(source)
rospy.loginfo('Recognition start')
audio = self.recognizer.listen(source, timeout=timeout)
result = self.recognizer.recognize_google(audio, language='ko-KR')
result = result.encode('utf-8')
rospy.loginfo('Recognition result: %s', result)
response.recognized_text = result
except (sr.UnknownValueError, sr.RequestError) as e:
response.recognition_result = SpeechRecognitionStartResponse.RECOGNITION_RESULT_FAILED
except sr.WaitTimeoutError:
response.recognition_result = SpeechRecognitionStartResponse.RECOGNITION_RESULT_TIMEOUT
finally:
self.is_working = False
return response
def exit(signum, frame):
pass
def init():
rospy.init_node('silbot3_stt', anonymous=True)
r = RecognizerNode()
rospy.spin()
if __name__ == '__main__':
signal.signal(signal.SIGINT, exit)
try:
init()
except rospy.ROSInterruptException:
pass
|
"""
Copyright MIT and Harvey Mudd College
MIT License
Summer 2020
Defines the interface of the Controller module of the racecar_core library.
"""
import abc
from enum import IntEnum
from typing import Tuple
class Controller(abc.ABC):
"""
Handles input from the controller and exposes constant input state per frame.
"""
class Button(IntEnum):
"""
The buttons on the controller.
"""
A = 0 # A button
B = 1 # B button
X = 2 # X button
Y = 3 # Y button
LB = 4 # Left bumper
RB = 5 # Right bumper
LJOY = 6 # Left joystick button
RJOY = 7 # Right joystick button
class Trigger(IntEnum):
"""
The triggers on the controller.
"""
LEFT = 0
RIGHT = 1
class Joystick(IntEnum):
"""
The joysticks on the controller.
"""
LEFT = 0
RIGHT = 1
@abc.abstractmethod
def is_down(self, button: Button) -> bool:
"""
Returns whether a certain button is currently pressed.
Args:
button: Which button to check.
Returns:
True if button is currently pressed.
Note:
The button argument must be an associated value of the Button enum,
which is defined in the Controller module.
Example::
# This update function will print a message for every frame in which
# the A button is held down. Thus, multiple messages will be printed
# if we press and hold the A button
def update():
if rc.controller.is_down(rc.controller.Button.A):
print("The A button is currently pressed.")
"""
pass
@abc.abstractmethod
def was_pressed(self, button: Button) -> bool:
"""
Returns whether a certain button was pressed this frame.
Args:
button: Which button to check.
Returns:
True if button is currently pressed and was not pressed last frame.
Note:
The button argument must be an associated value of the Button enum,
which is defined in the Controller module.
Example::
# This update function will print a single message each time the A
# button is pressed on the controller
def update():
if rc.controller.was_pressed(rc.controller.Button.A):
print("The A button was pressed")
"""
pass
@abc.abstractmethod
def was_released(self, button: Button) -> bool:
"""
Returns whether a certain button was released this frame.
Args:
button: Which button to check.
Returns:
True if button is currently released and was pressed last frame.
Note:
The button argument must be an associated value of the Button enum,
which is defined in the Controller module.
Example::
# This update function will print a single message each time the A
# button is released on the controller
def update():
if rc.controller.was_pressed(rc.controller.Button.A):
print("The A button was released")
"""
pass
@abc.abstractmethod
def get_trigger(self, trigger: Trigger) -> float:
"""
Returns the position of a certain trigger as a value from 0.0 to 1.0.
Args:
trigger: Which trigger to check.
Returns:
A value ranging from 0.0 (not pressed) to 1.0 (fully pressed) inclusive.
Note:
The trigger argument must be an associated value of the Trigger enum,
which is defined in the Controller module.
Example::
# Speed will receive a value from 0.0 to 1.0 based on how much the left
# trigger is pressed
speed = rc.controller.get_trigger(rc.controller.Trigger.LEFT)
"""
pass
@abc.abstractmethod
def get_joystick(self, joystick: Joystick) -> Tuple[float, float]:
"""
Returns the position of a certain joystick as an (x, y) tuple.
Args:
joystick: Which joystick to check.
Returns:
The x and y coordinate of the joystick, with each axis ranging from
-1.0 (left or down) to 1.0 (right or up).
Note:
The joystick argument must be an associated value of the Joystick enum,
which is defined in the Controller module.
Example::
# x and y will be given values from -1.0 to 1.0 based on the position of
# the left joystick
(x, y) = rc.controller.get_joystick(rc.controller.Joystick.LEFT)
"""
pass
|
"""
This module lets you practice using Create MOVEMENT and SENSORS,
in particular the DISTANCE and ANGLE sensors.
Authors: David Mutchler, Valerie Galluzzi, Mark Hays, Amanda Stouder,
their colleagues and PUT_YOUR_NAME_HERE. September 2015.
""" # TODO: 1. PUT YOUR NAME IN THE ABOVE LINE.
from new_create_ie_old import new_create
import time
def main():
""" Tests the go_by_time function. """
# Test 1:
go_by_time(100, 50) # Fast robot that goes 1 meter.
time.sleep(2) # A robot needs to PAUSE before RE-connecting.
# ------------------------------------------------------------------
# TODO: 2. Add a second test, with any reasonable arguments you like.
# ------------------------------------------------------------------
def go_by_time(distance_in_CM, speed_in_CM_per_second):
"""
1. Constructs (and hence connects to) a Create robot.
2. Makes the robot go FORWARD (in a straight line)
** FOR THE GIVEN DISTANCE ** at the given speed,
using what we will call the GO_BY_TIME algorithm:
1. Compute the number of SECONDS the robot must move to
achieve the given DISTANCE at the given SPEED.
2. Start the robot moving at the given speed.
3. Sleep the COMPUTED number of seconds.
4. Stop the robot.
3. Prints the DISTANCE traveled DURING THE FORWARD MOTION
(as measured by the robot's distance sensor).
4. Shuts down the robot.
Preconditions: Both arguments are positive.
"""
# ------------------------------------------------------------------
# TODO: 3. Implement and test this function.
#
# CAUTION: Do NOT use 'time' as a VARIABLE since it is
# the name of a MODULE that you need. Instead, consider
# using something like 'seconds' for the seconds to move.
#
# HINT: *** First solve this problem BY HAND on an example! ***
# ------------------------------------------------------------------
# ----------------------------------------------------------------------
# TODO: 4. ** OPTIONAL **
# Get out a yardstick and MEASURE how far the robot ACTUALLY went.
# Compare that to:
# -- How far you TOLD the robot to move.
# -- How far the robot REPORTED that it moved.
# Nothing to turn in for this TODO, but do it if you have time.
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# Calls main to start the ball rolling.
# ----------------------------------------------------------------------
main()
|
from src import king_bot, settings
import sys
# these could be read in via arguments, file or login manually - read documentation
gameworld = "com3" # choose uppercase (exact world name) - optional
email = "vlrizkidz93@tuta.io" # optional
password = "melodies" # optional
proxy = "" # optional
# increase the number if your internet connecion is slow
settings.browser_speed = 1.0
kingbot = king_bot(
email=email,
password=password,
gameworld=gameworld,
proxy=proxy,
start_args=sys.argv,
debug=True,
)
# place your actions below
# kingbot.start_adventures(1000)
kingbot.robber_hideout(village=0, interval=600, units={4: 100, 10: -1})
kingbot.robber_camp(village=0, interval=600, units={4: 100, 10: -1})
|
from django import forms
from apps.forms import FormMixin
from apps.news.models import News,Banner
from apps.course.models import Course
class EditNewsCategoryForm(forms.Form,FormMixin):
pk=forms.IntegerField(error_messages={'required':'必须传入分类的id!'})
name=forms.CharField(max_length=100)
class WriteNewsForm(forms.ModelForm,FormMixin):
category=forms.IntegerField()
class Meta:
model=News
exclude=['category','author','pub_time']
class EditNewsForm(forms.ModelForm,FormMixin):
category=forms.IntegerField()
pk=forms.IntegerField()
class Meta:
model=News
exclude=['category','author','pub_time']
class AddBannerForm(forms.ModelForm,FormMixin):
class Meta:
model=Banner
fields=['priority','image_url','link_to']
class EditBannerForm(forms.ModelForm,FormMixin):
pk=forms.IntegerField()
class Meta:
model=Banner
fields=['priority','image_url','link_to']
class PubCourseForm(forms.ModelForm,FormMixin):
category_id=forms.IntegerField()
teacher_id=forms.IntegerField()
class Meta:
model=Course
exclude=['category','teacher']
|
import re
text_grid_1 = open('F:\Projects\Active Projects\Project Intern_IITB\Rishabh_FA_Audio\Test\hehe\\08024satishpilena_56580b937e63f5035c0025f5_57fe36059ee20a04985ba1a0_9_00020200000000022002200020002020.TextGrid', 'r')
text_grid_2 = open('F:\Projects\Active Projects\Project Intern_IITB\Rishabh_FA_Audio\Test\hehe\\08024satishpilena_56580b937e63f5035c0025f5_57fe36059ee20a04985ba1a0_9_00020200000000022002200020002020PE_NEW.TextGrid', 'r')
data_1 = text_grid_1.read()
data_2 = text_grid_2.read()
time_1 = []
time_2 = []
for m in re.finditer('text = "', data_1):
if data_1[m.start() - 33] == '=':
time_1.append(float(
data_1[m.start() - 32] + data_1[m.start() - 31] + data_1[m.start() - 30] + data_1[m.start() - 29] + data_1[m.start() - 28] + data_1[
m.start() - 27] + data_1[m.start() - 26]))
time_1.append(float(
data_1[m.start() - 13] + data_1[m.start()-12] + data_1[m.start() - 11] + data_1[m.start() - 10] + data_1[m.start() - 9] + data_1[m.start() - 8] + data_1[
m.start() - 7] + data_1[m.start() - 6] + data_1[m.start() - 5]))
else:
time_1.append(float(
data_1[m.start() - 33] + data_1[m.start() - 32] + data_1[m.start() - 31] + data_1[m.start() - 30] + data_1[m.start() - 29] + data_1[
m.start() - 28] + data_1[m.start() - 27] + data_1[m.start() - 26]))
time_1.append(float(
data_1[m.start() - 13] + data_1[m.start() - 12] + data_1[m.start() - 11] + data_1[m.start() - 10] + data_1[
m.start() - 9] + data_1[m.start() - 8] + data_1[
m.start() - 7] + data_1[m.start() - 6] + data_1[m.start() - 5]))
if data_1[m.start() + 9] == '"':
time_1.append(data_1[m.start() + 8])
elif data_1[m.start() + 10] == '"':
time_1.append(data_1[m.start() + 8] + data_1[m.start() + 9])
else:
time_1.append(data_1[m.start() + 8] + data_1[m.start() + 9] + data_1[m.start() + 10])
for m in re.finditer('"Vowel"', data_2):
time_2.append(float(
data_2[m.start() - 34] + data_2[m.start() - 33] + data_2[m.start() - 32] + data_2[m.start() - 31] + data_2[
m.start() - 30] + data_2[m.start() - 29]))
time_2.append(float(
data_2[m.start()-17] + data_2[m.start() - 16] + data_2[m.start() - 15] + data_2[m.start() - 14] + data_2[m.start() - 13] + data_2[
m.start() - 12]))
def count(vowel):
for vw in re.finditer(vowel, data_1):
time_1.append(float(
data_1[vw.start() - 38] + data_1[vw.start() - 37] + data_1[vw.start() - 36] + data_1[vw.start() - 35] + data_1[
vw.start() - 34]))
time_1.append(float(
data_1[vw.start() - 19] + data_1[vw.start() - 18] + data_1[vw.start() - 17] + data_1[vw.start() - 16] + data_1[
vw.start() - 15]))
return data_1.count(vowel)
listing = []
print time_1
print time_2
for j in range(0, len(time_2), 2):
for i in range(0, len(time_1), 3):
# print 'Out', time_1[i], time_1[i + 1]
if time_1[i] <= time_2[j] < time_1[i+1] and time_1[i] < time_2[j+1] <= time_1[i+1]:
listing.append(time_1[i+2])
# if time_1[i] <= time_2[j+2] < time_1[i + 1] and time_1[i] < time_2[j + 3] <= time_1[i + 1]:
# listing.append(time_1[i + 2])
for j in range(0, len(time_2), 2):
for i in range(0, len(time_1) - 3, 3):
if time_1[i] < time_2[j] < time_1[i+1] and time_1[i+3] < time_2[j+1] < time_1[i+4]:
# if time_1[i+1] - time_2[j] > time_2[j+1] - time_1[i+3]:
listing.append(time_1[i+2])
# else:
listing.append(time_1[i+5])
# first = time_1[i + 1] - time_2[j]
# second = time_2[j + 1] - time_1[i + 3]
# overall = time_2[j + 1] - time_2[j]
# print listing
count_2 = data_2.count('"Vowel"')
print "Count of Vowel according to one of algo's is: ", count_2
c1 = count('"aa"')
c2 = count('"AA"')
c3 = count('"ae"')
c4 = count('"aw"')
c5 = count('"ay"')
c6 = count('"ee"')
c7 = count('"ex"')
c8 = count('"ii"')
c9 = count('"II"')
c10 = count('"oo"')
c11 = count('"OO"')
c12 = count('"oy"')
c13 = count('"uu"')
c14 = count('"UU"')
c = c1 + c2 + c3 + c4 + c5 + c6 + c7 + c8 + c9 + c10 + c11 + c12 + c13 + c14
print "Count of vowel according to FA TextGrid is : ", c
count = 0
vowel_data = ['aa', 'AA', 'ae', 'aw', 'ay', 'ee', 'ex', 'ii', 'II', 'oo', 'OO', 'oy', 'uu', 'UU']
print listing
for vowel_sound in listing:
# print vowel_sound
if vowel_sound in vowel_data:
# print vowel_sound
count += 1
print "No of vowel coincident are", count
|
def getSubstrings(s):
""" get_substrings == PEP8 (forced mixedCase by CodeWars) """
s = s.lower()
length = len(s)
seen = set()
for a in xrange(length):
for b in xrange(1, length + 1):
end = a + b
if end > length:
break
seen.add(s[a:end])
return len(seen)
|
import json
from django.conf import settings
from modeltranslation import admin
from modeltranslation.utils import build_localized_fieldname
class TranslationAdmin(admin.TranslationAdmin):
change_form_template = 'trans/admin/change_form.html'
def _get_translation_options(self, origin_lang):
options = {}
languages = list(dict(settings.LANGUAGES).keys())
languages.remove(origin_lang)
for field in self.trans_opts.fields.keys():
options[build_localized_fieldname(field, origin_lang)] = {
l: build_localized_fieldname(field, l) for l in languages
}
return options
def changeform_view(
self, request, object_id=None, form_url='', extra_context=None):
if getattr(settings, 'IS_ADMIN_FIELDS_TRANSLATION_ENABLED', False):
extra_context = {
'trans_options': json.dumps(self._get_translation_options(
request.LANGUAGE_CODE
))
}
return super(TranslationAdmin, self).changeform_view(
request, object_id, form_url, extra_context)
|
import sys
import random
def main(num):
"""Prints x random words where x is user supplied
Params: num - sys argv[1]
int -> ()
"""
with open('/usr/share/dict/words') as file:
words = file.readlines()
for x in range(int(num)):
rand = random.randint(0, len(words)-1)
print(words[rand].split('\n')[0], end=" ")
print()
if __name__ == "__main__":
arg = sys.argv[1]
main(arg)
|
from pytest_contextgen import create_context_in_tuple, \
parametrize_context_tuple, pair_context_with_doubles, get_contexts, get_apis
pytest_plugins = ['pytest_returnvalues']
def pytest_addoption(parser):
parser.addoption("--api", action="store",
help="API: cuda/ocl/supported",
# can't get API list from CLUDA, because if we import it here,
# it messes up with coverage results
# (modules get imported before coverage collector starts)
default="supported", choices=["cuda", "ocl", "supported"])
parser.addoption("--double", action="store",
help="Use doubles: no/yes/supported",
default="supported", choices=["no", "yes", "supported"])
parser.addoption("--fast-math", dest="fast_math", action="store",
help="Use fast math: no/yes/both",
default="yes", choices=["no", "yes", "both"])
parser.addoption("--device-include-mask", action="append",
help="Run tests on matching devices only",
default=[])
parser.addoption("--device-exclude-mask", action="append",
help="Run tests on matching devices only",
default=[])
parser.addoption("--platform-include-mask", action="append",
help="Run tests on matching platforms only",
default=[])
parser.addoption("--platform-exclude-mask", action="append",
help="Run tests on matching platforms only",
default=[])
parser.addoption("--include-duplicate-devices", action="store_true",
help="Run tests on all available devices and not only on uniquely named ones",
default=False)
pytest_funcarg__ctx_and_double = create_context_in_tuple
pytest_funcarg__ctx = create_context_in_tuple
pytest_funcarg__some_ctx = create_context_in_tuple
def pytest_report_header(config):
ccs, cc_ids = get_contexts(config)
devices = {cc.device_id:(cc.platform_name + ", " + cc.device_name) for cc in ccs}
if len(devices) == 0:
raise ValueError("No devices match the criteria")
print("Running tests on:")
for device_id in sorted(devices):
print(" " + device_id + ": " + devices[device_id])
def pytest_generate_tests(metafunc):
if 'ctx_and_double' in metafunc.funcargnames:
parametrize_context_tuple(metafunc, 'ctx_and_double', pair_context_with_doubles)
if 'ctx' in metafunc.funcargnames:
ccs, cc_ids = get_contexts(metafunc.config)
metafunc.parametrize('ctx', ccs, ids=cc_ids, indirect=True)
if 'some_ctx' in metafunc.funcargnames:
# Just some context for tests that only check context-independent stuff.
ccs, cc_ids = get_contexts(metafunc.config)
metafunc.parametrize('some_ctx', [ccs[0]], ids=[cc_ids[0]], indirect=True)
if 'cluda_api' in metafunc.funcargnames:
apis, api_ids = get_apis(metafunc.config)
metafunc.parametrize('cluda_api', apis, ids=api_ids)
|
# Generated by Django 2.2.6 on 2019-10-26 13:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Collage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('collage_name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Picture',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('picture_name', models.CharField(max_length=50)),
('collage', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wedding.Collage')),
],
),
]
|
import asyncio
async def publisher(q):
while True:
print('enqueued thing')
q.put_nowait({'a': 'thing'})
await asyncio.sleep(0.5)
async def worker(queue):
while True:
# Get a "work item" out of the queue.
my_dict = await queue.get()
print(f'processed {my_dict}')
# Notify the queue that the "work item" has been processed.
queue.task_done()
if __name__ == "__main__":
loop = asyncio.get_event_loop()
q = asyncio.Queue()
loop.run_until_complete(asyncio.gather(
publisher(q),
worker(q),
))
loop.close()
|
import logging
from .omaha import Omaha
# from .client import Client
# from .indicator import Indicator
# from .company import Company
from .version import __version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)
__all__ = ["__version__", "Client", "DataFrame"]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.