blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c630daa51e38e135e196be1383a6a6557486aa06
|
a68159363fb88fcd105f3a305d1ee5df445a832c
|
/letters_1000_numbers_problem-17.py
|
3304126eed5e9f6fab90ce65c2e180a52f130c1f
|
[] |
no_license
|
lokendra7512/Project-Euler-Solutions
|
8a6c8647cfdbfd71adddee9b86ee127fe1e69709
|
23a467af7807e154c9985bfebbe08f60b9418c4d
|
refs/heads/master
| 2022-12-24T07:18:18.025811
| 2020-09-30T19:05:11
| 2020-09-30T19:05:11
| 300,028,993
| 0
| 0
| null | 2020-09-30T19:03:47
| 2020-09-30T19:03:47
| null |
UTF-8
|
Python
| false
| false
| 815
|
py
|
'''
The way the program works
'''
letters ={1: 3, 2: 3, 3: 5, 4: 4, 5: 4, 6: 3, 7: 5, 8: 5, 9: 4, 10: 3, 11: 6, 12: 6, 13:8, 14: 8, 15: 7, 16: 7, 17: 9, 18: 8, 19: 8, 20: 6, 30: 6, 40: 5, 50: 5, 60: 5, 70: 7, 80: 6, 90: 6, 100: 7, 1000: 8}
sums = 0
for i in xrange(1,1001):
if i == 1000:
sums = sums + 11 #length of chars in one thousand
hundreds_value = i/100 #append the value for hundreds
rem = i%100 #get the remainder after removing hundred
tens_value = rem/10
unit_value = rem%10
if(hundreds_value > 0):
sums = sums + letters.get(hundreds_value)+ letters.get(100) + 3 #The three is the length of and
if(tens_value > 0):
sums = sums + letters.get((tens_value*10))
if(unit_value > 0):
sums = sums + letters.get(unit_value)
print sums
|
[
"abdulapopoola@gmail.com"
] |
abdulapopoola@gmail.com
|
29e3ba05a591d264cfda6485ad4b17677323be16
|
4fff448b20b92a929e105448f0ff01ffda5d3073
|
/GrabadoraDeVoz/Python/expandContractions.py
|
6fa43d58201b09bb8440d890b9bcbcb177092256
|
[] |
no_license
|
wsebastiangroves/SampleWork
|
9869b6417f3a2ac5e2d4114fd12d96191e6da9b9
|
b324e88effc35dbbf339c5356505d355d6ae368c
|
refs/heads/master
| 2020-04-28T07:09:42.669086
| 2020-03-10T16:40:59
| 2020-03-10T16:40:59
| 175,082,142
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,451
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Source: http://stackoverflow.com/questions/19790188/expanding-english-language-contractions-in-python
"""
import re
cList = {
"ain't": "am not",
"aren't": "are not",
"can't": "cannot",
"can't've": "cannot have",
"'cause": "because",
"could've": "could have",
"couldn't": "could not",
"couldn't've": "could not have",
"didn't": "did not",
"doesn't": "does not",
"don't": "do not",
"hadn't": "had not",
"hadn't've": "had not have",
"hasn't": "has not",
"haven't": "have not",
"he'd": "he would",
"he'd've": "he would have",
"he'll": "he will",
"he'll've": "he will have",
"he's": "he is",
"how'd": "how did",
"how'd'y": "how do you",
"how'll": "how will",
"how's": "how is",
"I'd": "I would",
"I'd've": "I would have",
"I'll": "I will",
"I'll've": "I will have",
"I'm": "I am",
"I've": "I have",
"isn't": "is not",
"it'd": "it had",
"it'd've": "it would have",
"it'll": "it will",
"it'll've": "it will have",
"it's": "it is",
"let's": "let us",
"ma'am": "madam",
"mayn't": "may not",
"might've": "might have",
"mightn't": "might not",
"mightn't've": "might not have",
"must've": "must have",
"mustn't": "must not",
"mustn't've": "must not have",
"needn't": "need not",
"needn't've": "need not have",
"o'clock": "of the clock",
"oughtn't": "ought not",
"oughtn't've": "ought not have",
"shan't": "shall not",
"sha'n't": "shall not",
"shan't've": "shall not have",
"she'd": "she would",
"she'd've": "she would have",
"she'll": "she will",
"she'll've": "she will have",
"she's": "she is",
"should've": "should have",
"shouldn't": "should not",
"shouldn't've": "should not have",
"so've": "so have",
"so's": "so is",
"that'd": "that would",
"that'd've": "that would have",
"that's": "that is",
"there'd": "there had",
"there'd've": "there would have",
"there's": "there is",
"they'd": "they would",
"they'd've": "they would have",
"they'll": "they will",
"they'll've": "they will have",
"they're": "they are",
"they've": "they have",
"to've": "to have",
"wasn't": "was not",
"we'd": "we had",
"we'd've": "we would have",
"we'll": "we will",
"we'll've": "we will have",
"we're": "we are",
"we've": "we have",
"weren't": "were not",
"what'll": "what will",
"what'll've": "what will have",
"what're": "what are",
"what's": "what is",
"what've": "what have",
"when's": "when is",
"when've": "when have",
"where'd": "where did",
"where's": "where is",
"where've": "where have",
"who'll": "who will",
"who'll've": "who will have",
"who's": "who is",
"who've": "who have",
"why's": "why is",
"why've": "why have",
"will've": "will have",
"won't": "will not",
"won't've": "will not have",
"would've": "would have",
"wouldn't": "would not",
"wouldn't've": "would not have",
"y'all": "you all",
"y'alls": "you alls",
"y'all'd": "you all would",
"y'all'd've": "you all would have",
"y'all're": "you all are",
"y'all've": "you all have",
"you'd": "you had",
"you'd've": "you would have",
"you'll": "you you will",
"you'll've": "you you will have",
"you're": "you are",
"you've": "you have"
}
c_re = re.compile('(%s)' % '|'.join(cList.keys()))
def expandContractions(text, c_re=c_re):
def replace(match):
return cList[match.group(0)]
return c_re.sub(replace, text)
|
[
"wsebastiangroves@gmail.com"
] |
wsebastiangroves@gmail.com
|
4453fb58e33a80b6a1510a8e4e5c633e06b4cdc2
|
e36985669a2b068dfb3e43b7f5870dc114bb158b
|
/python_code/dataExtraction.py
|
7722d25b7d06ff6e71446c9ef08cf4b970e527d8
|
[] |
no_license
|
assassint2017/Data-extraction-UI
|
b3f0f43dc48e12c0da158bdb4a7c2c9dd5d92ab5
|
d7e1b97100ad97b334f03b0fbf09c2a506339b1c
|
refs/heads/master
| 2020-04-11T06:18:50.417214
| 2018-12-21T12:38:47
| 2018-12-21T12:38:47
| 161,577,841
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,732
|
py
|
"""
数据提取代码
"""
import datetime
import pandas as pd
from numpy import nan
#-------------------------需要设置的部分-------------------------------
# 设定csv文件路径 路径中不要出现中文
# csvDir = 'C:\\Users\\14595\\Desktop\\2018HB example .csv.gz'
# 设定提取csv文件路径 路径中不要出现中文
# extDir = 'C:\\Users\\14595\\Desktop\\ext.csv'
# 各哨点数量汇总文件路径 路径中不要出现中文
# summaryDir = 'C:\\Users\\14595\\Desktop\\summary.csv'
# 设定时间区间
# start = pd.Timestamp(datetime.date(year=2018, month=1, day=1))
# end = pd.Timestamp(datetime.date(year=2018, month=5, day=30))
# 设定选定的地区
# locs = [42010200, 42050300, 42050600]
#---------------------------------------------------------------------
def dataExtraction(csvDir, extDir, summaryDir, start, end, locs):
# 读取csv文件
csv = pd.read_csv(csvDir, compression='gzip', encoding='gbk')
# 时间日期格式化处理
csv['诊断时间'] = pd.to_datetime(csv['诊断时间'], format='%Y/%m/%d')
# 根据条件进行筛选
if start is None and end is None: # 如果只选择了地区编码
csv = csv[csv['报告单位地区编码'].isin(locs)]
elif locs is None: # 如果只选择了诊断时间
csv = csv[(csv['诊断时间'] >= start) & (csv['诊断时间'] <= end)]
else: # 如果两种条件都选择了
csv = csv[(csv['诊断时间'] >= start) & (csv['诊断时间'] <= end) & (csv['报告单位地区编码'].isin(locs))]
# 保存提取数据到csv文件
csv.to_csv(extDir, index=0, encoding='gbk')
def removeSpace(item):
"""
去除在输入过程中误键入的空格
"""
return item.strip()
csv['录卡用户所属单位'].apply(removeSpace)
temp = pd.value_counts(csv['录卡用户所属单位'])
codes = []
for hospital in list(temp.index):
index = csv[csv['录卡用户所属单位'] == hospital].index.tolist()[0]
codes.append(csv['报告单位地区编码'][index])
summary = pd.DataFrame()
summary['报告单位地区编码'] = codes
summary['报告单位'] = list(temp.index)
summary['病例数'] = temp.values
summary.sort_values(by=['报告单位地区编码'], inplace=True)
summary.reset_index(drop=True, inplace=True)
nanlist = []
for i in range(1, len(summary['报告单位地区编码'])):
if summary.loc[i, '报告单位地区编码'] == summary.loc[i - 1, '报告单位地区编码']:
nanlist.append(i)
for i in nanlist:
summary.loc[i, '报告单位地区编码'] = nan
summary.to_csv(summaryDir, index=False, encoding='gbk')
|
[
"noreply@github.com"
] |
noreply@github.com
|
3c3353fd4690ad220cbbd644ee0bf66566894884
|
dbd65739cd4303679c4d81726982ba7b557c812c
|
/backend/objs/ReminderEntry.py
|
298ca6241a0a27a9ad5ebb744cf24719118b2728
|
[] |
no_license
|
hnjitlh/SITE_2
|
680f49c9288aff3e52485b6f7a9487370e82ba8e
|
02a654813bd118d78df29b2b78e37fe72c145772
|
refs/heads/master
| 2022-03-19T12:26:17.669016
| 2019-11-10T03:35:36
| 2019-11-10T03:35:36
| 220,728,996
| 0
| 0
| null | 2022-02-13T11:50:35
| 2019-11-10T02:02:54
|
HTML
|
UTF-8
|
Python
| false
| false
| 542
|
py
|
from app import db
from datetime import datetime
from .Messages import Message
class ReminderEntry(db.Model):
entry_id = db.column(db.Integer, primary_key=True)
user_id = db.column(db.Integer, db.ForeignKey('user.id'), nullable=False)
message = db.column(db.Integer, db.ForeignKey('message.msg_id'), nullable=True)
time = db.column(db.DateTime)
def get_message(self):
return Message.query.filter_by(msg_id=self.message).first()
def find_date_diff(self):
return (datetime.utcnow() - self.time).days
|
[
"berry64@outlook.com"
] |
berry64@outlook.com
|
99bb440e3d91a657af83b6b5699a5675b2c46f7c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03136/s297842517.py
|
a2a6230496234027046d6691748a5f445af9dd64
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 163
|
py
|
n = int(input())
a = list(map(int, input().split()))
b = [0]*n
b = sorted(a)
c = 0
for i in range(n-1):
c += b[i]
if c>b[n-1]:
print("Yes")
else:
print("No")
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
62fa5b544c8111890d1b4fd3779bb0e5afef0918
|
5e83d62064ea4fd954820960306fb06cc8f0f391
|
/ecommerce2/settings/__init__.py
|
53cfe2179c4f250cc57fedca627640353c668e53
|
[] |
no_license
|
bharatkumarrathod/cfe_ecommerce2_RESTapi
|
eff2fad0cbff7cb3def2c13de282b085aba7291d
|
a081cdbf10c1fbde58e128b9c9b287443c726071
|
refs/heads/master
| 2020-12-25T21:43:44.166109
| 2015-10-27T21:04:19
| 2015-10-27T21:04:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 123
|
py
|
from .base import *
try:
from .local import *
except:
pass
try:
from .production import *
except:
pass
|
[
"carlofusiello@gmail.com"
] |
carlofusiello@gmail.com
|
371ee8cb4b4f7e37636a6fbfe01b1f1ba8180744
|
f8b5aafac15f408a48fabf853a918015c927e6fe
|
/bk_tomo/venv/venv27/bin/openstack
|
ef4239b2369d1cd6ac9e4daa1bf696a84ace7ec5
|
[] |
no_license
|
to30/tmp
|
bda1ac0ca3fc61e96c2a1c491367b698d7e97937
|
ec809683970af6787728c2c41f161f416155982a
|
refs/heads/master
| 2021-01-01T04:25:52.040770
| 2016-05-13T16:34:59
| 2016-05-13T16:34:59
| 58,756,087
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 243
|
#!/home/tomo/venv/venv27/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from openstackclient.shell import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"tomonaga@mx2.mesh.ne.jp"
] |
tomonaga@mx2.mesh.ne.jp
|
|
d141035695650deff0fc75c74efaa0d6e66cd50c
|
b1c3529a652041dfb4a5b9d7684695f1e3cb8148
|
/settings/settings_50cm_open_4K.py
|
0f4dc7c1b6d4092502901684794f75535adce7d6
|
[] |
no_license
|
ksyoung/load_and_sensitivity
|
bc51ef7c146f652fa4b567414ed215d30af2d3cc
|
aa291c8d25a7c23ba79327718c53a34d15b350ab
|
refs/heads/master
| 2021-09-22T08:31:30.527291
| 2021-09-15T20:51:15
| 2021-09-15T20:51:15
| 99,721,664
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,934
|
py
|
# -*- coding: utf-8 -*-
import os
import sys
from pylab import sqrt
from pywtl.core.wtl_ConvertUtils import convert_squid
import pywtl.common.analysis.noise.analysis.NoisePred as NP
import pywtl.common.analysis.noise.analysis.ParameterLib as PL
import numpy as np
class Class(object):
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, str(self.__dict__))
settings = Class()
# Run time paramters
settings.freq = 'All_GHz'
settings.version = '50cm_open'
settings.name = '50cm_open'
settings.verbose = True #
# Telescope/ Receiver Optical Parameters
settings.mult_bands = True
#settings.band = [133.,167.] # GHz, lower, upper.
#settings.band = np.array(settings.band)*1e9 # Hz
settings.aperture_radius = 0.25 # aperture radius in meters (2.5 meter primary = 1.25m radius)
settings.f_number = 1.5 #
settings.edge_db = 10 # edge taper on primary mirror in dB. May later be calculated from pixel sizes.
settings.dB_scan = False#True # to scan or not to scan on dB.
settings.dB_array = np.linspace(.1,30,20)#[10.,15.,20] # array to scan over.
settings.dB_array = [ 0.1000, 0.13501524, 0.18228761, 0.24611127,
0.33228126, 0.44862162, 0.60569578, 0.81776571,
1.10408687, 1.49065655, 2.01257438, 2.7172293 ,
3.66860233, 4.95307593, 6.68727732, 9.02866797,
12.18984071, 16.45782268, 22.22013673, 30.0]
settings.mission_length = 4 # years
settings.sky_area = 41253 # deg (full sky)
# pixel size calculation parameters
settings.MCP = True # assumes MCPs and finds pixel diameters, edge tapers, counts, etc.
# if this is False then all bands have same edge taper, which is defined above.
settings.diameter_to_waist_ratio = 2.95 #from Toki's thesis.
settings.use_edge_dB = True ## this means calculate pixel sizes from edge taper given earlier.
settings.use_D_px = False ## use D_px from bands.csv import
if settings.use_edge_dB and settings.use_D_px:
print '\n use_edge_dB and use_D_px can\'t both be True!!\n\n'
sys.exit()
settings.calc_N_px_by_area_csv = False # if true uses FP sizes by band from csv.
settings.calc_N_px_rough = False # if true, estimates how many detectors would fit per band.
# if False, uses N_px from bands.csv input file
settings.x_fp_diameter = 1.20 #
settings.y_fp_diameter = 1.0 #
if settings.calc_N_px_by_area_csv and settings.calc_N_px_rough:
print '\n calc_N_px_by_area and calc_N_px_rough can\'t both be True!!\n\n'
sys.exit()
settings.calc_correlated_noise = False#True # if true then full focal plane calculations
# include correlated bunching photon white noise.
# Bolo parameters
settings.t_bath = 0.100 # Kelvin
settings.safety_factor = 2. # Unitless, ratio of p_sat to p_opt
settings.n = 2.0 # thermal power law exponent (EBEX was ~2)
settings.bolo_Rn = 1.33 # Ohms. TES resistance warm.
settings.bias_point = 0.75 # depth in transition assumed
settings.bolo_resistance = settings.bolo_Rn*settings.bias_point # Ohms
## old readout noise method
settings.readout_noise_amps = 7e-12 # Amps*rt(sec), number from Franky for scaling readout noise.
# More bolo parameters for franky's noise code.
settings.conv = convert_squid('Normalized_16ch')
settings.prediction_type = 'theoretical'
dfmux_settings = {}
squid_settings = {}
bolo_char = {}
settings.noise_type = "transition"
# boost factors for noise.
settings.johnson_and_readout_factor = None # use un-modified noise theory.
settings.system = {"T_warm_electronics": 300, # temperature of the warm electronics for Johnson noise
"T_squid_board": 4.2, # temperature of the SQUID board
"R_sq_x28": 20., # gain select from 1st stage SQUID ctrl
"R_sq_x25": 69800., # feedback resistor for 1st stage SQUID ctrl
"R_sq_x23": 10., # voltage divider after 1st stage SQUID ctrl (10 for EBEX, 20 for ETC[nominal])
"R_sq_x24": 121., # voltage divider after 1st stage SQUID ctrl
"R_sq_x21": 500., # feedback resistor for 2nd stage SQUID ctrl
"R_sq_x14": 100., # gain select #1 from 2nd stage SQUID ctrl
"R_sq_x15": 82.5, # gain select #2 from 2nd stage SQUID ctrl
"R_sq_x17": 50., # impedance matching to mezz
"R_sq_x000": 200., # voltage divider for bias resistor
"R_sq_x001": 100., # voltage divider for nuller
"R_sq_x44": 820., # current converters for nullers (4 of them)
"R_sq_11": 50., # current converting resistors for bias
"RA_me_13": 50., # impedance matching to SQUID ctrl
"RA_me_18": 200., # feedback resistor for demod 1st stage amplifier
"RA_me_Y": 10000., # Variable resistor for demod gain (2nd stage)
"RA_me_33": 50., # 50 ohm resistors in series with cariable resistor for demod gain (2nd stage)
"RA_me_32": 10000., # feedback resistor for demod 2nd stage amplifier
"RA_me_40": 50., # series R for votage divider before ADC (one per line)
"RA_me_41": 100., # middle R for votage divider before ADC
"RD_me_10": 50., # voltage divider by the carrier/nuller DAC (2 of these)
"RD_me_14": 100., # voltage divider by the carrier/nuller DAC (1 of these)
"RD_me_4": 200., # feedback resistor for 1st stage carrier/nuller
"RD_me_12": 200., # "shorting" resistor for 1st stage carrier
"RD_me_2": 50., # between 1st and 2nd stage of carrier/nuller (2 of these)
"RD_me_CX": 200., # gain selecting resistor for 2nd stage carrier ##############################
"RD_me_NX": 200., # gain selecting resistor for 2nd stage nuller ###############################
"RD_me_8": 1000., # feedback resistor for 2nd stage carrier/nuller
"RD_me_13": 50., # impedance matching to SQUID ctrl carrier/nuller
"R_bolo_termination": 50., # termination resistor in parallel to the bolos
"R_fl_bias": 50000., # resistor converting the flux bias
"R_bias": 0.03, # bias resistor
"R_sq": 100., # SQUID impedance
"C_ll": 1e-9, # C of the lead-lag filter
"Rw": 10., # R from thre wires, contributing to the lead-lag filter
"Zt": 500., # SQUID transimpedance
"G_digital": 2., # digital gain
"N_channel": 16, # Mux factor
}
### changes by Franky to lower readout noise, 9/22/17
#settings.optimize_gains = False
settings.optimize_gains = True
#settings.system["R_sq_x44"] = 820. # current converters for nullers (4 of them)
settings.system["R_sq_x44"] = 1640. # current converters for nullers (4 of them)
#settings.system["R_bias"] = 0.03 # bias resistor
settings.system["R_bias"] = 0.015 # bias resistor
settings.system["Zt"] = 750. # SQUID transimpedance
# DfMUX general setup
dfmux_settings['DAN_firmware'] = True
dfmux_settings['DAN_parser'] = False
dfmux_settings['bitshift'] = 8 # can be 8 for 24 bit.
dfmux_settings['fir'] = 6
dfmux_settings['fsamp'] = 25e6/2**(11+dfmux_settings['fir'])
# DAC/ADC settings
dfmux_settings['fc'] = 500000.
dfmux_settings['Gc'] = 1
dfmux_settings['fn'] = dfmux_settings['fc']
dfmux_settings['Gn'] = 0
dfmux_settings['fd'] = dfmux_settings['fc']
dfmux_settings['Gd'] = 0
# SQUID/cryostat settings
squid_settings['R_FB'] = 5000.
##calced in code # bolo_char['nu'] = 150e9
##calced in code # bolo_char['dnu'] = 34e9
bolo_char['Zt'] = settings.system["Zt"]
bolo_char['L_fll'] = PL.LoopGain(bolo_char['Zt'])
settings.R_wire = 10. # is warm wire, squid board to squid controller.
bolo_char['Tbath'] = settings.t_bath
# bolometer characteristics
##calced in code # bolo_char['Tc'] = 0.42625
##calced in code # bolo_char['Tbolo'] = bolo_char['Tc']
bolo_char['Rn'] = settings.bolo_Rn
bolo_char['R'] = settings.bolo_resistance
bolo_char['tau_etf'] = 0.010
##calced in code # bolo_char['Popt'] = .124127e-12
bolo_char['L'] = 25. ##### something reasonable, but could be any number.
bolo_char['xi'] = 1 ## assume all correlation noise.
# other bolometer characteristics
##calced in code # bolo_char['Psat'] = 2.5 * bolo_char['Popt']
bolo_char['n'] = settings.n
settings.cryo = 'EBEX'
# derived values from Psat, Tc, Tbath and n
##calced in code bolo_char['Gbar'] = bolo_char['Psat']/(bolo_char['Tc']-bolo_char['Tbath'])
##calced in code bolo_char['G'] = PL.G_dyn(bolo_char['Gbar'], bolo_char['Tbath'], bolo_char['Tc'], bolo_char['n'])
##calced in code bolo_char['gamma'] = round(PL.CalcGamma(bolo_char['Tc'], bolo_char['Tbath'], bolo_char['n']), 3)
# derived DfMUX settings
##calced in code dfmux_settings['Vb'] = bolo_char['R'] * (bolo_char['Gbar']*(bolo_char['Tbolo']-bolo_char['Tbath']) - bolo_char['Popt'])
##calced in code dfmux_settings['Vb'] = sqrt(dfmux_settings['Vb'])
##calced in code dfmux_settings['Ac'] = dfmux_settings['Vb'] / conv.DDStoVbias(Carrier_amplitude=1,
# Carrier_gain=dfmux_settings['Gc'],
# firmware_version='16ch')
##calced in code R_gain = [2000., 820., 200., 0.]
##calced in code dfmux_settings['An'] = dfmux_settings['Ac']/3. * \
# (R_gain[dfmux_settings['Gn']] + 100.) / (R_gain[dfmux_settings['Gc']] + 100.)
##calced in code bolo_char['Si'] = NP.Si(dfmux_settings['Vb']) # assumes deep in transition
# set a value to have noise in counts or Kcmb (from franky's code)
settings.A_per_count = None
settings.Kcmb_per_cnt = None
# copying those into settings
settings.dfmux_settings = dfmux_settings
settings.squid_settings = squid_settings
settings.bolo_char = bolo_char
# Paths
settings.base_path = '/home/astro/kyoung/Documents/load_and_sensitivity/'
settings.elements_path = os.path.join(settings.base_path,
'inputs/50cm_open_dragone.csv') # all telescope surfaces, lenses, etc.
# now being defined in code.
#settings.elements_out_path = os.path.join(settings.base_path,
# 'outputs/%s_%s_elements_out.csv ' %(settings.freq, settings.version)) # data that gets saved.
settings.bands_path = os.path.join(settings.base_path,
'inputs/CMBP_bands.csv') # csv of bands.
settings.FP_areas_path = os.path.join(settings.base_path,
'inputs/FP_areas_%s.csv' %settings.version) # csv of FP areas.
# unneeded stuff below this line.
'''
# Run time paramters
settings.name = '150'
settings.version = 'greg_f3'
settings.design = '1m_EBEX10K_f3_2016'
settings.do_point_source_analysis = False
settings.footer = True
# Telescope/ Receiver Optical Parameters
settings.frequency = 150.0 # Ghz
settings.frequency_range = (133, 167.0, 150.)
settings.bandwidth = settings.frequency_range[1] - settings.frequency_range[0] # Ghz
# Optics Parameters
settings.diameter_to_waist_ratio = 2.95 #
# Pixel Parameters
settings.pixel_diameter_step = 0.1
settings.pixel_diameter_range = [settings.pixel_diameter_step, 10 + settings.pixel_diameter_step] # in mm 0 to 20 mm
settings.default_pixel_diameter_m = 0.0042 # In meters
settings.default_pixel_diameter_mm = settings.default_pixel_diameter_m * 1000 # In millimeters
settings.lens_loss_tangent = 9e-5
settings.lens_index = 3.2
settings.lens_thickness = 0.050 # In meters
#Fmux parameters, Readout Noise Contribution
settings.readout_contribution = 0.10 # readout noise should increasee noise by 10%
settings.tes_accuracy = 1.15 # TES resistance value across wafer +/- 15%
settings.do_fmux = True # 1 to calculate fMUx params such as L value, cross talk etc
settings.fmux_max_freq = 1.0e6 # maximum frequency for fMux readout
settings.fmux_max_mux = 36 # mux factor
settings.capacitor_tan_d = 0.0002 # expected tand for interdigitated capacitor
settings.cross_talk_level = 0.01 # allowed cross-talk level
settings.capacitor_accuracy = 0.005 # fractional accuracy of capacitor
settings.f_delta_factor = 1.25 # factor to increase frequency spacing look at crosstalk_off to decide
settings.esr_contribution = 0.10 # allowed fractional ESR contribution to total R (10% = 0.01)
settings.readout_noise = 7e-12 # readout noise in A*sqrt(s)
settings.v_bias = 1e-6 #4 microVolt voltage bias
settings.nep_readout_fixed = 9.3e-18 # in Watts/root(Hz) this number from Franky July 7th, noise email to Shaul.
# Computatonal Parameters
settings.spatial_integration_accuracy = 10000 # numerical integration accuracy. ex: 100 splits integration space in 100 rectangles.
settings.frequency_bin_size = 0.384 # GHz
settings.integration_time = 0.5 # seconds
#Bolometer Parameters
settings.num_bolo_legs = 4
settings.bath_temp = 0.275 # Kelvin
settings.bolo_R_normal = 1.333333333 # Ohms. TES resistance warm.
settings.bias_fraction_rnormal = 0.75
settings.bolo_resistance = settings.bolo_R_normal*settings.bias_fraction_rnormal # Ohms
settings.thermal_carrier_exponent = 3.0
settings.a_over_l_bolo_leg = 149.2*1e-3*1e-12 # From Toki's script
settings.alpha = 250 # Measure for AlTi R v T curve d(logR)/dT
settings.tau_safety_factor = 5.8 # How much slower than readout bolo should be
#Design parameters/Goals
settings.psat_to_optical_power = 2.5 # Unitless
settings.bias_fraction_rnormal = 0.6
settings.target_num_pixels = 924
settings.num_sub_arrays = 6
settings.max_focal_plane_diameter_x = 0.25 # in m
settings.max_focal_plane_diameter_y = 0.25 # in m
settings.max_num_pixels = 10000
#settings.max_focal_plane_diameter = FOV_deg*(np.pi/180)*2.0*settings.aperture_radius*settings.f_number
settings.hex_outer_radius = 0.096 # in m
#Observing Parameters
settings.fract_sky = 0.5
settings.obs_time = 10 #flight time in days.
settings.obs_efficiency =.8 #percent of flight time when the telescope is acutally observing.
# Input Paths
settings.base_path = '/home/astro/kyoung/Documents/35cm-xdragone/Mapping_speed_code_py'
settings.atmospheric_windows_data_file = os.path.join(settings.base_path, 'input_data_products', 'LDB_34km_30deg_el.out') # Atmospheric Windows
settings.receiver_throughput_data_path = os.path.join(settings.base_path, 'input_data_products',
'1m_EBEX10K_2016_Throughput_%sGHz.csv' % str(int(settings.frequency))) # Aperture Illumination
#Aperture Output paths
settings.effective_aperture_data_path = os.path.join(settings.base_path,
'output/effective_aperture',
'1m_EBEX10K_f3_2016_Effective_Aperture_Output_%s_GHz_%s.dat' %(str(int(settings.frequency)),
settings.version)) # Aperture Illumination
settings.effective_aperture_png_path = os.path.join(settings.base_path,
'output/effective_aperture',
'1m_EBEX10K_f3_2016_Effective_Aperture_Output_%s_GHz_%s.png' %(str(int(settings.frequency)),
settings.version)) # Aperture Illumination
#Output Paths
settings.base_path = '/home/astro/kyoung/Documents/35cm-xdragone/Mapping_speed_code_py/output/mapping_speed/EBEX10K_f3_2016_gregorian'
settings.mapping_speed_output_png_path = os.path.join(settings.base_path, '1m_EBEX10K_f3_2016_mapping_speed_%sGHz_%s' % (str(int(settings.frequency)), settings.version))
settings.mapping_speed_output_data_path = os.path.join(settings.base_path, 'mapping_speed_%sGHz_%s.csv' % (str(int(settings.frequency)), settings.version))
settings.num_pixels_output_png_path = os.path.join(settings.base_path, 'num_pixels_%sGHz_%s.png' % (str(int(settings.frequency)), settings.version))
settings.throughput_path_out = os.path.join(settings.base_path, 'throughput_%sGHz_%s.csv' % (str(int(settings.frequency)), settings.version))
'''
|
[
"kyoung@astro.umn.edu"
] |
kyoung@astro.umn.edu
|
d10e49aa112d5e0cc239ab1fe98b7aa0083f8b76
|
40d5799cc031d234a1b35b7f57cd9784d01ab090
|
/manage.py
|
f5f8b98b330815659267578a77ecc90c6e757a1d
|
[] |
no_license
|
ramsaicharan/movielisting
|
bc7641c3b3df4a79cefa7647f798466ce0a302e1
|
76c84acb1f84a4c0e09db74a2643ea31104c4cfb
|
refs/heads/master
| 2020-06-25T13:49:06.216843
| 2020-06-03T06:31:43
| 2020-06-03T06:31:43
| 199,326,884
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 551
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'imdb.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
|
[
"noreply@github.com"
] |
noreply@github.com
|
a182c56ebe7dd48855684d7cf5c5f4884bd9b717
|
9e4290b8e96c260852fd9b0e624835e7e24e422d
|
/0007.py
|
ed7dc336e0ca9463dabb9bc3d6f2a9af8c511509
|
[] |
no_license
|
emojipeach/euler_problems_python
|
3d7b2bbb70b3f8b3e155751ca1f343417a2172fe
|
39d0e1969ea9ac9f466845cab4e255efa2f41bd9
|
refs/heads/master
| 2020-03-21T23:19:46.185326
| 2018-07-05T21:45:39
| 2018-07-05T21:45:39
| 139,180,546
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 520
|
py
|
def is_prime(test_number):
if test_number == 1:
return False
elif test_number < 4:
return True
elif test_number % 2 == 0:
return False
else:
x = int(test_number ** (1/2)) + 1
for i in range(3, x, 2):
if test_number % i == 0:
return False
return True
def next_prime(primes):
x = max(primes)
for i in range(x + 1, x + 100000):
if is_prime(i):
primes.append(i)
return
def nth_prime(n):
while n > len(primes):
next_prime(primes)
print(max(primes))
primes = [2, 3, 5]
nth_prime(10001)
|
[
"37852121+emojipeach@users.noreply.github.com"
] |
37852121+emojipeach@users.noreply.github.com
|
fc04e12dcdbbafa967a840973bd3e33969b3becb
|
54b7bae79af992c149c644c21a8fa09313841449
|
/Demo/SVM/svmMLiA.py
|
1f86b194b77772aa9bc3a153a8814b0e320d8405
|
[
"Apache-2.0"
] |
permissive
|
ViatorSun/GitChat_CNN
|
75f9542364c9a51a467d69625cc219e8f2a6795d
|
d2f16eb2d108afa58ab31e09956424af22d96c47
|
refs/heads/master
| 2021-07-11T15:09:03.579803
| 2019-01-22T16:00:55
| 2019-01-22T16:00:55
| 143,283,383
| 5
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,182
|
py
|
# !/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time : 2018.
# @Author : 绿色羽毛
# @Email : lvseyumao@foxmail.com
# @Blog : https://blog.csdn.net/ViatorSun
# @Note : 功能实现
from numpy import *
# from time import sleep
def loadDataSet(fileName):
dataMat = []; labelMat = []
fr = open(fileName)
for line in fr.readlines():
lineArr = line.strip().split('\t')
dataMat.append([float(lineArr[0]), float(lineArr[1])])
labelMat.append(float(lineArr[2]))
return dataMat,labelMat
def selectJrand(i,m):
j=i #we want to select any J not equal to i
while (j==i):
j = int(random.uniform(0,m))
return j
def clipAlpha(aj,H,L):
if aj > H:
aj = H
if L > aj:
aj = L
return aj
def smoSimple(dataMatIn, classLabels, C, toler, maxIter):
dataMatrix = mat(dataMatIn); labelMat = mat(classLabels).transpose()
b = 0; m,n = shape(dataMatrix)
alphas = mat(zeros((m,1)))
iter = 0
while (iter < maxIter):
alphaPairsChanged = 0
for i in range(m):
fXi = float(multiply(alphas,labelMat).T*(dataMatrix*dataMatrix[i,:].T)) + b
Ei = fXi - float(labelMat[i])#if checks if an example violates KKT conditions
if ((labelMat[i]*Ei < -toler) and (alphas[i] < C)) or ((labelMat[i]*Ei > toler) and (alphas[i] > 0)):
j = selectJrand(i,m)
fXj = float(multiply(alphas,labelMat).T*(dataMatrix*dataMatrix[j,:].T)) + b
Ej = fXj - float(labelMat[j])
alphaIold = alphas[i].copy(); alphaJold = alphas[j].copy()
if (labelMat[i] != labelMat[j]):
L = max(0, alphas[j] - alphas[i])
H = min(C, C + alphas[j] - alphas[i])
else:
L = max(0, alphas[j] + alphas[i] - C)
H = min(C, alphas[j] + alphas[i])
if L==H: print("L==H"); continue
eta = 2.0 * dataMatrix[i,:]*dataMatrix[j,:].T - dataMatrix[i,:]*dataMatrix[i,:].T \
- dataMatrix[j,:]*dataMatrix[j,:].T
if eta >= 0: print("eta>=0"); continue
alphas[j] -= labelMat[j]*(Ei - Ej)/eta
alphas[j] = clipAlpha(alphas[j],H,L)
if (abs(alphas[j] - alphaJold) < 0.00001): print("j not moving enough"); continue
alphas[i] += labelMat[j]*labelMat[i]*(alphaJold - alphas[j])#update i by the same amount as j
#the update is in the oppostie direction
b1 = b - Ei- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[i,:].T \
- labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[i,:]*dataMatrix[j,:].T
b2 = b - Ej- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[j,:].T \
- labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[j,:]*dataMatrix[j,:].T
if (0 < alphas[i]) and (C > alphas[i]): b = b1
elif (0 < alphas[j]) and (C > alphas[j]): b = b2
else: b = (b1 + b2)/2.0
alphaPairsChanged += 1
print("iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged))
if (alphaPairsChanged == 0): iter += 1
else: iter = 0
print("iteration number: %d" % iter)
return b,alphas
def kernelTrans(X, A, kTup): #calc the kernel or transform data to a higher dimensional space
m,n = shape(X)
K = mat(zeros((m,1)))
if kTup[0]=='lin': K = X * A.T #linear kernel
elif kTup[0]=='rbf':
for j in range(m):
deltaRow = X[j,:] - A
K[j] = deltaRow*deltaRow.T
K = exp(K/(-1*kTup[1]**2)) #divide in NumPy is element-wise not matrix like Matlab
else: raise NameError('Houston We Have a Problem -- \
That Kernel is not recognized')
return K
class optStruct:
def __init__(self,dataMatIn, classLabels, C, toler, kTup): # Initialize the structure with the parameters
self.X = dataMatIn
self.labelMat = classLabels
self.C = C
self.tol = toler
self.m = shape(dataMatIn)[0]
self.alphas = mat(zeros((self.m,1)))
self.b = 0
self.eCache = mat(zeros((self.m,2))) #first column is valid flag
self.K = mat(zeros((self.m,self.m)))
for i in range(self.m):
self.K[:,i] = kernelTrans(self.X, self.X[i,:], kTup)
def calcEk(oS, k):
fXk = float(multiply(oS.alphas,oS.labelMat).T*oS.K[:,k] + oS.b)
Ek = fXk - float(oS.labelMat[k])
return Ek
def selectJ(i, oS, Ei): #this is the second choice -heurstic, and calcs Ej
maxK = -1; maxDeltaE = 0; Ej = 0
oS.eCache[i] = [1,Ei] #set valid #choose the alpha that gives the maximum delta E
validEcacheList = nonzero(oS.eCache[:,0].A)[0]
if (len(validEcacheList)) > 1:
for k in validEcacheList: #loop through valid Ecache values and find the one that maximizes delta E
if k == i: continue #don't calc for i, waste of time
Ek = calcEk(oS, k)
deltaE = abs(Ei - Ek)
if (deltaE > maxDeltaE):
maxK = k; maxDeltaE = deltaE; Ej = Ek
return maxK, Ej
else: #in this case (first time around) we don't have any valid eCache values
j = selectJrand(i, oS.m)
Ej = calcEk(oS, j)
return j, Ej
""" after any alpha has changed update the new value in the cache """
def updateEk(oS, k):
Ek = calcEk(oS, k)
oS.eCache[k] = [1,Ek]
def innerL(i, oS):
Ei = calcEk(oS, i)
if ((oS.labelMat[i]*Ei < -oS.tol) and (oS.alphas[i] < oS.C)) or ((oS.labelMat[i]*Ei > oS.tol) and (oS.alphas[i] > 0)):
j,Ej = selectJ(i, oS, Ei) #this has been changed from selectJrand
alphaIold = oS.alphas[i].copy(); alphaJold = oS.alphas[j].copy()
if (oS.labelMat[i] != oS.labelMat[j]):
L = max(0, oS.alphas[j] - oS.alphas[i])
H = min(oS.C, oS.C + oS.alphas[j] - oS.alphas[i])
else:
L = max(0, oS.alphas[j] + oS.alphas[i] - oS.C)
H = min(oS.C, oS.alphas[j] + oS.alphas[i])
if L==H: print("L==H"); return 0
eta = 2.0 * oS.K[i,j] - oS.K[i,i] - oS.K[j,j] #changed for kernel
if eta >= 0: print("eta>=0"); return 0
oS.alphas[j] -= oS.labelMat[j]*(Ei - Ej)/eta
oS.alphas[j] = clipAlpha(oS.alphas[j],H,L)
updateEk(oS, j) #added this for the Ecache
if (abs(oS.alphas[j] - alphaJold) < 0.00001): print("j not moving enough"); return 0
oS.alphas[i] += oS.labelMat[j]*oS.labelMat[i]*(alphaJold - oS.alphas[j]) #update i by the same amount as j
updateEk(oS, i) #added this for the Ecache #the update is in the oppostie direction
b1 = oS.b - Ei- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.K[i,i] - oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.K[i,j]
b2 = oS.b - Ej- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.K[i,j]- oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.K[j,j]
if (0 < oS.alphas[i]) and (oS.C > oS.alphas[i]): oS.b = b1
elif (0 < oS.alphas[j]) and (oS.C > oS.alphas[j]): oS.b = b2
else: oS.b = (b1 + b2)/2.0
return 1
else: return 0
def smoP(dataMatIn, classLabels, C, toler, maxIter,kTup=('lin', 0)): #full Platt SMO
oS = optStruct(mat(dataMatIn),mat(classLabels).transpose(),C,toler, kTup)
iter = 0
entireSet = True; alphaPairsChanged = 0
while (iter < maxIter) and ((alphaPairsChanged > 0) or (entireSet)):
alphaPairsChanged = 0
if entireSet: #go over all
for i in range(oS.m):
alphaPairsChanged += innerL(i,oS)
print("fullSet, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged))
iter += 1
else:#go over non-bound (railed) alphas
nonBoundIs = nonzero((oS.alphas.A > 0) * (oS.alphas.A < C))[0]
for i in nonBoundIs:
alphaPairsChanged += innerL(i,oS)
print("non-bound, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged))
iter += 1
if entireSet: entireSet = False #toggle entire set loop
elif (alphaPairsChanged == 0): entireSet = True
print("iteration number: %d" % iter)
return oS.b,oS.alphas
def calcWs(alphas,dataArr,classLabels):
X = mat(dataArr); labelMat = mat(classLabels).transpose()
m,n = shape(X)
w = zeros((n,1))
for i in range(m):
w += multiply(alphas[i]*labelMat[i],X[i,:].T)
return w
def testRbf(k1=1.3):
dataArr,labelArr = loadDataSet('testSetRBF.txt')
b,alphas = smoP(dataArr, labelArr, 200, 0.0001, 10000, ('rbf', k1)) #C=200 important
datMat=mat(dataArr); labelMat = mat(labelArr).transpose()
svInd=nonzero(alphas.A>0)[0]
sVs=datMat[svInd] #get matrix of only support vectors
labelSV = labelMat[svInd]
print("there are %d Support Vectors" % shape(sVs)[0])
m,n = shape(datMat)
errorCount = 0
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],('rbf', k1))
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print("the training error rate is: %f" % (float(errorCount)/m))
dataArr,labelArr = loadDataSet('testSetRBF2.txt')
errorCount = 0
datMat=mat(dataArr)
labelMat = mat(labelArr).transpose()
m,n = shape(datMat)
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],('rbf', k1))
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print("the test error rate is: %f" % (float(errorCount)/m))
def img2vector(filename):
returnVect = zeros((1,1024))
fr = open(filename)
for i in range(32):
lineStr = fr.readline()
for j in range(32):
returnVect[0,32*i+j] = int(lineStr[j])
return returnVect
def loadImages(dirName):
from os import listdir
hwLabels = []
trainingFileList = listdir(dirName) #load the training set
m = len(trainingFileList)
trainingMat = zeros((m,1024))
for i in range(m):
fileNameStr = trainingFileList[i]
fileStr = fileNameStr.split('.')[0] #take off .txt
classNumStr = int(fileStr.split('_')[0])
if classNumStr == 9: hwLabels.append(-1)
else: hwLabels.append(1)
trainingMat[i,:] = img2vector('%s/%s' % (dirName, fileNameStr))
return trainingMat, hwLabels
def testDigits(kTup=('rbf', 10)):
dataArr,labelArr = loadImages('trainingDigits')
b,alphas = smoP(dataArr, labelArr, 200, 0.0001, 10000, kTup)
datMat=mat(dataArr); labelMat = mat(labelArr).transpose()
svInd=nonzero(alphas.A>0)[0]
sVs=datMat[svInd]
labelSV = labelMat[svInd]
print("there are %d Support Vectors" % shape(sVs)[0])
m,n = shape(datMat)
errorCount = 0
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],kTup)
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print("the training error rate is: %f" % (float(errorCount)/m))
dataArr,labelArr = loadImages('testDigits')
errorCount = 0
datMat=mat(dataArr);
labelMat = mat(labelArr).transpose()
m,n = shape(datMat)
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],kTup)
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print("the test error rate is: %f" % (float(errorCount)/m))
""" Non-Kernel VErsions below """
class optStructK:
def __init__(self,dataMatIn, classLabels, C, toler): # Initialize the structure with the parameters
self.X = dataMatIn
self.labelMat = classLabels
self.C = C
self.tol = toler
self.m = shape(dataMatIn)[0]
self.alphas = mat(zeros((self.m,1)))
self.b = 0
self.eCache = mat(zeros((self.m,2))) #first column is valid flag
def calcEkK(oS, k):
fXk = float(multiply(oS.alphas,oS.labelMat).T*(oS.X*oS.X[k,:].T)) + oS.b
Ek = fXk - float(oS.labelMat[k])
return Ek
def selectJK(i, oS, Ei): #this is the second choice -heurstic, and calcs Ej
maxK = -1; maxDeltaE = 0; Ej = 0
oS.eCache[i] = [1,Ei] #set valid #choose the alpha that gives the maximum delta E
validEcacheList = nonzero(oS.eCache[:,0].A)[0]
if (len(validEcacheList)) > 1:
for k in validEcacheList: #loop through valid Ecache values and find the one that maximizes delta E
if k == i: continue #don't calc for i, waste of time
Ek = calcEk(oS, k)
deltaE = abs(Ei - Ek)
if (deltaE > maxDeltaE):
maxK = k; maxDeltaE = deltaE; Ej = Ek
return maxK, Ej
else: #in this case (first time around) we don't have any valid eCache values
j = selectJrand(i, oS.m)
Ej = calcEk(oS, j)
return j, Ej
def updateEkK(oS, k):#after any alpha has changed update the new value in the cache
Ek = calcEk(oS, k)
oS.eCache[k] = [1,Ek]
def innerLK(i, oS):
Ei = calcEk(oS, i)
if ((oS.labelMat[i]*Ei < -oS.tol) and (oS.alphas[i] < oS.C)) or ((oS.labelMat[i]*Ei > oS.tol) and (oS.alphas[i] > 0)):
j,Ej = selectJ(i, oS, Ei) #this has been changed from selectJrand
alphaIold = oS.alphas[i].copy(); alphaJold = oS.alphas[j].copy()
if (oS.labelMat[i] != oS.labelMat[j]):
L = max(0, oS.alphas[j] - oS.alphas[i])
H = min(oS.C, oS.C + oS.alphas[j] - oS.alphas[i])
else:
L = max(0, oS.alphas[j] + oS.alphas[i] - oS.C)
H = min(oS.C, oS.alphas[j] + oS.alphas[i])
if L==H: print("L==H"); return 0
eta = 2.0 * oS.X[i,:]*oS.X[j,:].T - oS.X[i,:]*oS.X[i,:].T - oS.X[j,:]*oS.X[j,:].T
if eta >= 0: print("eta>=0"); return 0
oS.alphas[j] -= oS.labelMat[j]*(Ei - Ej)/eta
oS.alphas[j] = clipAlpha(oS.alphas[j],H,L)
updateEk(oS, j) #added this for the Ecache
if (abs(oS.alphas[j] - alphaJold) < 0.00001): print("j not moving enough"); return 0
oS.alphas[i] += oS.labelMat[j]*oS.labelMat[i]*(alphaJold - oS.alphas[j])#update i by the same amount as j
updateEk(oS, i) #added this for the Ecache #the update is in the oppostie direction
b1 = oS.b - Ei- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.X[i,:]*oS.X[i,:].T \
- oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.X[i,:]*oS.X[j,:].T
b2 = oS.b - Ej- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.X[i,:]*oS.X[j,:].T \
- oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.X[j,:]*oS.X[j,:].T
if (0 < oS.alphas[i]) and (oS.C > oS.alphas[i]): oS.b = b1
elif (0 < oS.alphas[j]) and (oS.C > oS.alphas[j]): oS.b = b2
else: oS.b = (b1 + b2)/2.0
return 1
else: return 0
# full Platt SMO
def smoPK(dataMatIn, classLabels, C, toler, maxIter):
oS = optStruct(mat(dataMatIn),mat(classLabels).transpose(),C,toler)
iter = 0
entireSet = True; alphaPairsChanged = 0
while (iter < maxIter) and ((alphaPairsChanged > 0) or (entireSet)):
alphaPairsChanged = 0
# go over all
if entireSet:
for i in range(oS.m):
alphaPairsChanged += innerL(i,oS)
print("fullSet, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged))
iter += 1
# go over non-bound (railed) alphas
else:
nonBoundIs = nonzero((oS.alphas.A > 0) * (oS.alphas.A < C))[0]
for i in nonBoundIs:
alphaPairsChanged += innerL(i,oS)
print("non-bound, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged))
iter += 1
# toggle entire set loop
if entireSet:
entireSet = False
elif (alphaPairsChanged == 0):
entireSet = True
print("iteration number: %d" % iter)
return oS.b,oS.alphas
|
[
"noreply@github.com"
] |
noreply@github.com
|
917f801689c9521ae3cd9db9f38c779b8466c316
|
d695adfe54f6019ecaf3e0ad741391654c348666
|
/introduction to requests.py
|
d6ba2c9e05da2890600a46d9baecdb146a18adf1
|
[] |
no_license
|
yangmiaohong/Python-02_codes
|
98a048334479b89c877a5d53a515bd03759cdcf4
|
80be2800acdf0ea9493cd91f9a177bb22daf7d15
|
refs/heads/master
| 2020-06-17T02:11:36.572815
| 2016-11-28T14:58:45
| 2016-11-28T14:58:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,675
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Nov 12 21:21:16 2016
@author: chen
"""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
import requests
#---------------------------------------------------------------
#Make a request
#get method
r = requests.get('http://esf.xm.fang.com/house/i32/')
#post method
'''
headers_2 = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.8',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
'Content-Type': 'application/x-www-form-urlencoded',
# 'cookie':cookie,
'Host': 'www.landchina.com',
'Origin': 'http://www.landchina.com',
'Referer': 'http://www.landchina.com/default.aspx?tabid=263&wmguid=75c72564-ffd9-426a-954b-8ac2df0903b7&p=9f2c3acd-0256-4da2-a659-6949c4671a2a%3A'+str(self.start_time)+'~'+str(self.end_time),
'Upgrade-Insecure-Requests': '1',
'User-Agent':'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.21 Safari/537.36',
#'http':proxy,
}
data = {
'VIEWSTATE': '/wEPDwUJNjkzNzgyNTU4D2QWAmYPZBYIZg9kFgICAQ9kFgJmDxYCHgdWaXNpYmxlaGQCAQ9kFgICAQ8WAh4Fc3R5bGUFIEJBQ0tHUk9VTkQtQ09MT1I6I2YzZjVmNztDT0xPUjo7ZAICD2QWAgIBD2QWAmYPZBYCZg9kFgJmD2QWBGYPZBYCZg9kFgJmD2QWAmYPZBYCZg9kFgJmDxYEHwEFIENPTE9SOiNEM0QzRDM7QkFDS0dST1VORC1DT0xPUjo7HwBoFgJmD2QWAgIBD2QWAmYPDxYCHgRUZXh0ZWRkAgEPZBYCZg9kFgJmD2QWAmYPZBYEZg9kFgJmDxYEHwEFhwFDT0xPUjojRDNEM0QzO0JBQ0tHUk9VTkQtQ09MT1I6O0JBQ0tHUk9VTkQtSU1BR0U6dXJsKGh0dHA6Ly93d3cubGFuZGNoaW5hLmNvbS9Vc2VyL2RlZmF1bHQvVXBsb2FkL3N5c0ZyYW1lSW1nL3hfdGRzY3dfc3lfamhnZ18wMDAuZ2lmKTseBmhlaWdodAUBMxYCZg9kFgICAQ9kFgJmDw8WAh8CZWRkAgIPZBYCZg9kFgJmD2QWAmYPZBYCZg9kFgJmD2QWAmYPZBYEZg9kFgJmDxYEHwEFIENPTE9SOiNEM0QzRDM7QkFDS0dST1VORC1DT0xPUjo7HwBoFgJmD2QWAgIBD2QWAmYPDxYCHwJlZGQCAg9kFgJmD2QWBGYPZBYCZg9kFgJmD2QWAmYPZBYCZg9kFgJmD2QWAmYPFgQfAQUgQ09MT1I6I0QzRDNEMztCQUNLR1JPVU5ELUNPTE9SOjsfAGgWAmYPZBYCAgEPZBYCZg8PFgIfAmVkZAICD2QWBGYPZBYCZg9kFgJmD2QWAmYPZBYCAgEPZBYCZg8WBB8BBYYBQ09MT1I6I0QzRDNEMztCQUNLR1JPVU5ELUNPTE9SOjtCQUNLR1JPVU5ELUlNQUdFOnVybChodHRwOi8vd3d3LmxhbmRjaGluYS5jb20vVXNlci9kZWZhdWx0L1VwbG9hZC9zeXNGcmFtZUltZy94X3Rkc2N3X3p5X2pnZ2dfMDEuZ2lmKTsfAwUCNDYWAmYPZBYCAgEPZBYCZg8PFgIfAmVkZAIBD2QWAmYPZBYCZg9kFgJmD2QWAgIBD2QWAmYPFgQfAQUgQ09MT1I6I0QzRDNEMztCQUNLR1JPVU5ELUNPTE9SOjsfAGgWAmYPZBYCAgEPZBYCZg8PFgIfAmVkZAIDD2QWAgIDDxYEHglpbm5lcmh0bWwFtwY8cCBhbGlnbj0iY2VudGVyIj48c3BhbiBzdHlsZT0iZm9udC1zaXplOiB4LXNtYWxsIj4mbmJzcDs8YnIgLz4NCiZuYnNwOzxhIHRhcmdldD0iX3NlbGYiIGhyZWY9Imh0dHA6Ly93d3cubGFuZGNoaW5hLmNvbS8iPjxpbWcgYm9yZGVyPSIwIiBhbHQ9IiIgd2lkdGg9IjI2MCIgaGVpZ2h0PSI2MSIgc3JjPSIvVXNlci9kZWZhdWx0L1VwbG9hZC9mY2svaW1hZ2UvdGRzY3dfbG9nZS5wbmciIC8+PC9hPiZuYnNwOzxiciAvPg0KJm5ic3A7PHNwYW4gc3R5bGU9ImNvbG9yOiAjZmZmZmZmIj5Db3B5cmlnaHQgMjAwOC0yMDE0IERSQ25ldC4gQWxsIFJpZ2h0cyBSZXNlcnZlZCZuYnNwOyZuYnNwOyZuYnNwOyA8c2NyaXB0IHR5cGU9InRleHQvamF2YXNjcmlwdCI+DQp2YXIgX2JkaG1Qcm90b2NvbCA9ICgoImh0dHBzOiIgPT0gZG9jdW1lbnQubG9jYXRpb24ucHJvdG9jb2wpID8gIiBodHRwczovLyIgOiAiIGh0dHA6Ly8iKTsNCmRvY3VtZW50LndyaXRlKHVuZXNjYXBlKCIlM0NzY3JpcHQgc3JjPSciICsgX2JkaG1Qcm90b2NvbCArICJobS5iYWlkdS5jb20vaC5qcyUzRjgzODUzODU5YzcyNDdjNWIwM2I1Mjc4OTQ2MjJkM2ZhJyB0eXBlPSd0ZXh0L2phdmFzY3JpcHQnJTNFJTNDL3NjcmlwdCUzRSIpKTsNCjwvc2NyaXB0PiZuYnNwOzxiciAvPg0K54mI5p2D5omA5pyJJm5ic3A7IOS4reWbveWcn+WcsOW4guWcuue9kTxiciAvPg0K5aSH5qGI5Y+3OiDkuqxJQ1DlpIcwOTA3NDk5MuWPtyDkuqzlhaznvZHlronlpIcxMTAxMDIwMDA2NjYoMikmbmJzcDs8YnIgLz4NCjwvc3Bhbj4mbmJzcDsmbmJzcDsmbmJzcDs8YnIgLz4NCiZuYnNwOzwvc3Bhbj48L3A+HwEFZEJBQ0tHUk9VTkQtSU1BR0U6dXJsKGh0dHA6Ly93d3cubGFuZGNoaW5hLmNvbS9Vc2VyL2RlZmF1bHQvVXBsb2FkL3N5c0ZyYW1lSW1nL3hfdGRzY3cyMDEzX3l3XzEuanBnKTtkZFgrT4ZXzyk2fvKb+ZQdNgDE7amPUgf1dsAbA0tQEzbS',
'__EVENTVALIDATION': '/wEWAgKNgPHpAgLN3cj/BMeqdKR8EqyZqeFW25/wiD3Dqo+sG7dks/liloBmr6j/',
'hidComName': 'default',
'TAB_QueryConditionItem': '9f2c3acd-0256-4da2-a659-6949c4671a2a',
'TAB_QuerySortItemList': '282:False',
'TAB_QuerySubmitConditionData': '9f2c3acd-0256-4da2-a659-6949c4671a2a:'+str(self.start_time)+'~'+str(self.end_time),
'TAB_RowButtonActionControl': '',
'TAB_QuerySubmitPagerData': str(self.page),
'TAB_QuerySubmitSortData': ''
}
requests.post(self.url, data, headers=headers_2,cookies=self.cookie)
'''
#---------------------------------------------------------------
#Response Content
print len(r.content)
print len(r.text)
print r.encoding
#-----------------------------------------------------------------
#Passing Parameters In URLs
parameters= {'keyword': 'T-shirt', 'enc': 'utf-8',"cid3":"1349"}
p = requests.get("http://search.jd.com/search", params=parameters)
print (p.url)
#-----------------------------------------------------------------
#parsing JSON
location = '厦门大学经济学院'
url_1 = 'http://apis.map.qq.com/ws/geocoder/v1/?address='
url_2 = urllib.unquote(location)
url_3 = '&key='
url_4 = ''
url = ''.join([url_1,url_2,url_3,url_4])
print requests.get(url).content
print requests.get(url).json()
print type(requests.get(url).json())
print requests.get(url).json()['result']['title']
l = eval(requests.get(url).content)
print type(l)
print l['result']['title']
#-----------------------------------------------------------------
#Headers,Proxies,timeout
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.8',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
'Content-Type': 'application/x-www-form-urlencoded',
# 'cookie':cookie,
#'Host': '',
#'Origin': '',
#'Referer': ',
'Upgrade-Insecure-Requests': '1',
'User-Agent':'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.21 Safari/537.36',
}
#q = requests.get('http://esf.xm.fang.com/house/i32/',headers=headers,timeout=10)
#q = requests.get('http://esf.xm.fang.com/house/i32/',headers=headers,timeout=10,proxies={})
#-----------------------------------------------------------------
#Session
#s = requests.session()
#login_data={"account":"","password":""}
#res=s.post("http://mail.163.com/",login_data)
#print res.status_code
#print res.content
#print res.headers
#print res.cookies
#print res.json()
|
[
"noreply@github.com"
] |
noreply@github.com
|
dd4e74a77ce8690082c0f3839317d11c0ef60a3b
|
b73071a23f22e025f2c723654160e1cc5722cb4b
|
/cuadruplos.py
|
d68c9f384365369ae4c68b53e8ee8b094fdb4a41
|
[] |
no_license
|
omarcarreon/drawmycode
|
d6c381910906381d5a56209b23b4a22965fd6945
|
aa62bf0f3caec411c876ff4a041afd328d025f2b
|
refs/heads/master
| 2021-05-03T06:01:05.084952
| 2016-05-01T23:38:39
| 2016-05-01T23:38:39
| 53,284,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,538
|
py
|
from structs import *
from cubosemantico import *
from tablas import *
from dmcparser import *
import sys
pilaO = Stack()
pOper = Stack()
pTipos = Stack()
pSaltos = Stack()
pEjecucion = Stack()
pDimensionada = Stack()
# Inicia con el index 0
cuadruplos = []
# Inicia con el index 0
contSaltos = 0
actualAccessDIM = 1
actualAccessMatrix = {}
actualIDDim = None
actualDirBaseMatrix = None
class Cuadruplo:
def __init__(self, operador, operandoIzq, operandoDer, temp):
self.op = operador
self.opdoIzq = operandoIzq
self.opdoDer = operandoDer
self.res = temp
'''
===================================================
Inserta estructura Cuadruplo en lista de cuadruplos
===================================================
'''
def push_cuadruplo(cuadruplo):
global cuadruplos
global contSaltos
cuadruplos.append(cuadruplo)
contSaltos+=1
def goto_main_quad():
global pSaltos
genera_cuadruplo = Cuadruplo("GOTO","","","")
push_cuadruplo(genera_cuadruplo)
pSaltos.push(0);
def iniciaMain():
global pSaltos
inicioMain = pSaltos.pop()
cuadruplos[inicioMain].res = contSaltos
'''
============================================
1. Meter direccion y tipo del ID en pilaO
============================================
'''
def exp_1(dirvar,tipo):
global pilaO
global pTipos
pilaO.push(dirvar)
pTipos.push(tipo)
'''
============================================
2. Meter * o / en pOper
============================================
'''
def exp_2(product_division):
global pOper
pOper.push(product_division)
'''
============================================
3. Meter + o - en pOper
============================================
'''
def exp_3(plus_minus):
global pOper
pOper.push(plus_minus)
'''
============================================
4. Si top(pOper) == '*' o '/'
============================================
'''
def exp_4():
global pOper
global pTipos
global pilaO
if not pOper.isEmpty():
if pOper.peek() == '*' or pOper.peek() == '/':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
pTipos.push(tipoRes)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
5. Si top(pOper) == '+' o '-'
============================================
'''
def exp_5():
global pOper
global pTipos
global pilaO
if not pOper.isEmpty():
if pOper.peek() == '+' or pOper.peek() == '-':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
pTipos.push(tipoRes)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
6. Meter Fondo Falso en pOper
============================================
'''
def exp_6():
global pOper
pOper.push('[')
'''
============================================
7. Sacar Fondo Falso
============================================
'''
def exp_7():
global pOper
pOper.pop()
'''
============================================
8. Meter AND/OR en pOper
============================================
'''
def exp_8(and_or):
global pOper
pOper.push(and_or)
'''
=====================================================
9. Si top(pOper) es and o or , sacar and/or de pOper
=====================================================
'''
def exp_9():
global pOper
global pTipos
global pilaO
#printPilas()
if not pOper.isEmpty():
if pOper.peek() == 'and' or pOper.peek() == 'or':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pTipos.push(tipoRes)
pilaO.push(temp)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
10. Meter < <= > >= <> == en pOper
============================================
'''
def exp_10(oper_logic):
global pOper
pOper.push(oper_logic)
'''
=====================================================
11. Si top(pOper) es < <= > >= <> == , sacar de pOper
====================================================
'''
def exp_11():
global pOper
global pTipos
global pilaO
#printPilas()
if not pOper.isEmpty():
if pOper.peek() == '<' or pOper.peek() == '<=' or pOper.peek() == '>' or pOper.peek() == '>=' or pOper.peek() == '<>' or pOper.peek() == '==':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pTipos.push(tipoRes)
pilaO.push(temp)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
12. Meter = en pOper
============================================
'''
def exp_12(asignOper):
global pOper
pOper.push(asignOper)
'''
============================================
13. Si top(pOper) es = , sacar = de pOper
============================================
'''
def exp_13():
global pOper
global pTipos
global pilaO
if not pOper.isEmpty():
if pOper.peek() == '=':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
res = pilaO.pop()
tipoRes = pTipos.pop()
revisaTipoRes = cuboSemantico[tipoRes][tipoDer][op];
if revisaTipoRes != "Error":
genera_cuadruplo = Cuadruplo(op,opdoDer,None,res)
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
Estatuto PRINT
============================================
'''
def estatuto_print():
global pilaO
global pTipos
res = pilaO.pop()
pTipos.pop()
genera_cuadruplo = Cuadruplo("PRINT", "", "", res)
push_cuadruplo(genera_cuadruplo)
'''
============================================
Estatuto IF 1
============================================
'''
def estatuto_if_1():
global pilaO
global pTipos
global pSaltos
auxTipo = pTipos.pop()
if auxTipo != "bool":
sys.exit("Error Semantico.")
else:
res = pilaO.pop()
genera_cuadruplo = Cuadruplo("GOTOF", res, "", "")
push_cuadruplo(genera_cuadruplo)
pSaltos.push(contSaltos-1)
'''
============================================
Estatuto ELSE
============================================
'''
def estatuto_else():
global pilaO
global pTipos
global cuadruplos
global pSaltos
genera_cuadruplo = Cuadruplo("GOTO", "", "", "")
push_cuadruplo(genera_cuadruplo)
falso = pSaltos.pop()
cuadruplos[falso].res = contSaltos
pSaltos.push(contSaltos-1)
'''
============================================
Estatuto ENDIF
============================================
'''
def estatuto_endif():
global pilaO
global pTipos
global cuadruplos
global pSaltos
fin = pSaltos.pop()
cuadruplos[fin].res = contSaltos
'''
============================================
Estatuto WHILE 1
============================================
'''
def estatuto_while_1():
global pSaltos
pSaltos.push(contSaltos)
'''
============================================
Estatuto WHILE 2
============================================
'''
def estatuto_while_2():
global pilaO
global pTipos
global pSaltos
auxTipo = pTipos.pop()
if auxTipo != "bool":
sys.exit("Error Semantico.")
else:
res = pilaO.pop()
genera_cuadruplo = Cuadruplo("GOTOF", res, "", "")
push_cuadruplo(genera_cuadruplo)
pSaltos.push(contSaltos-1)
'''
============================================
Estatuto WHILE 3
============================================
'''
def estatuto_while_3():
global pSaltos
falso = pSaltos.pop()
retorno = pSaltos.pop()
genera_cuadruplo = Cuadruplo("GOTO", "", "", retorno)
push_cuadruplo(genera_cuadruplo)
cuadruplos[falso].res = contSaltos
'''
=========================================================
Regresa numero del cuadruplo en el que inicia la funcion
=========================================================
'''
def altaInicioFunc():
global contSaltos
return contSaltos
'''
=========================================================
Genera Accion Retorno cuando termina una funcion
=========================================================
'''
def generaAccionRetorno(funcActual):
totalTempInts = get_Total_Temp_Int()
totalTempFloats = get_Total_Temp_Float()
totalTempBools = get_Total_Temp_Bool()
totalTempStrings = get_Total_Temp_String()
if (funcActual != 'main'):
genera_cuadruplo = Cuadruplo("RET", "", "", "")
push_cuadruplo(genera_cuadruplo)
return {'totalTempInts' : totalTempInts,'totalTempFloats':totalTempFloats,'totalTempBools':totalTempBools,'totalTempStrings':totalTempStrings}
'''
=========================================================
Genera Accion End al final de MAIN
=========================================================
'''
def generaAccionEndMain():
genera_cuadruplo = Cuadruplo("END", "", "", "")
push_cuadruplo(genera_cuadruplo)
'''
=========================================================
Estatuto Llamada Funcion 2
=========================================================
'''
def estatuto_llamadafunc_2(funcLlamada, tamMemoriaLocalLlamadaFunc):
genera_cuadruplo = Cuadruplo("ERA",tamMemoriaLocalLlamadaFunc,funcLlamada,"")
push_cuadruplo(genera_cuadruplo)
'''
=========================================================
Estatuto Llamada Funcion 3
=========================================================
'''
def estatuto_llamadafunc_3(dirParamActual, tipoParamActual):
global pilaO
argumento = pilaO.pop()
tipoArgumento = pTipos.pop()
if (tipoArgumento == tipoParamActual):
genera_cuadruplo = Cuadruplo("PARAM",argumento,"",dirParamActual)
push_cuadruplo(genera_cuadruplo)
else:
sys.exit('Error. Tipo de argumento y parametro no coinciden.')
'''
=========================================================
Estatuto Llamada Funcion 6
=========================================================
'''
def estatuto_llamadafunc_6(funcLlamada,quadInicioFuncLlamada,tipoFuncLlamada,dirFuncLlamada):
global contSaltos
global pEjecucion
global pilaO
global pTipos
pEjecucion.push(contSaltos)
genera_cuadruplo = Cuadruplo("GOSUB",funcLlamada,"",quadInicioFuncLlamada)
push_cuadruplo(genera_cuadruplo)
if (tipoFuncLlamada != 'void'):
res = set_dir_temp(tipoFuncLlamada)
genera_cuadruplo = Cuadruplo("=",dirFuncLlamada,"",res)
push_cuadruplo(genera_cuadruplo)
pilaO.push(res)
pTipos.push(tipoFuncLlamada)
'''
============================================
Estatuto RETURN
============================================
'''
def estatuto_return(funcActual, tipoFuncActual):
global pilaO
global pTipos
tipoVarRetorno = pTipos.pop()
tipoFunc = tipoFuncActual
if (tipoFuncActual != 'void') and (tipoVarRetorno==tipoFunc):
varRetorno = pilaO.pop()
genera_cuadruplo = Cuadruplo("RETURN",funcActual,"",varRetorno)
push_cuadruplo(genera_cuadruplo)
elif (tipoFuncActual=='void') or (tipoVarRetorno!=tipoFunc):
sys.exit("Error. Tipo de variable retorno no coincide con tipo de la funcion.")
'''
============================================
Estatuto Variable Dimensionada 2
============================================
'''
def acceso_dimvar_2(accessingMatrix):
global pilaO
global pDimensionada
global pOper
global actualAccessDIM
global actualIDDim
global actualAccessMatrix
actualAccessMatrix = accessingMatrix;
idDim = pilaO.pop()
actualIDDim = idDim
actualAccessDIM = 1
pDimensionada.push([idDim,actualAccessDIM])
pOper.push('[')
'''
============================================
Estatuto Variable Dimensionada 3
============================================
'''
def acceso_dimvar_3():
global pilaO
global pTipos
global actualDirBaseMatrix
Li_DIM = actualAccessMatrix['Dim'][actualAccessDIM]['Li']
Ls_DIM = actualAccessMatrix['Dim'][actualAccessDIM]['Ls']
m_DIM = actualAccessMatrix['Dim'][actualAccessDIM]['m']
actualDirBaseMatrix = actualAccessMatrix['Dir']
genera_cuadruplo = Cuadruplo("VERIFICA",pilaO.peek(),Li_DIM,Ls_DIM)
push_cuadruplo(genera_cuadruplo)
if actualAccessDIM == 1: #Si siguiente dimension es diferente de nulo
aux = pilaO.pop()
pTipos.pop()
temp = set_dir_temp('int')
genera_cuadruplo = Cuadruplo("*",aux,m_DIM,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
if actualAccessDIM == 2:
aux2 = pilaO.pop()
aux1 = pilaO.pop()
pTipos.pop()
temp = set_dir_temp('int')
genera_cuadruplo = Cuadruplo("+",aux1,aux2,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
'''
============================================
Estatuto Variable Dimensionada 4
============================================
'''
def acceso_dimvar_4():
global actualAccessDIM
global pDimensionada
actualAccessDIM = actualAccessDIM + 1
pDimensionada.push([actualIDDim,actualAccessDIM])
'''
============================================
Estatuto Variable Dimensionada 5
============================================
'''
def acceso_dimvar_5():
global pilaO
global pTipos
global pOper
global pDimensionada
aux1 = pilaO.pop()
temp = set_dir_temp('int')
genera_cuadruplo = Cuadruplo("+",aux1,actualDirBaseMatrix,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
pOper.pop()
pDimensionada.pop()
'''
============================================
Funcion integrada RANDOM
============================================
'''
def opfunc_random():
global pilaO
global pTipos
superior = pilaO.pop()
tipoSuperior = pTipos.pop()
inferior = pilaO.pop()
tipoInferior = pTipos.pop()
temp = set_dir_temp('int')
pilaO.push(temp)
pTipos.push('int')
genera_cuadruplo = Cuadruplo("RANDOM",inferior,superior,temp)
push_cuadruplo(genera_cuadruplo)
'''
============================================
Line Width
============================================
'''
def dibujafunc_linewidth():
global pilaO
global pTipos
width = pilaO.pop()
tipoWidth = pTipos.pop()
genera_cuadruplo = Cuadruplo("LINEWIDTH",width,"","")
push_cuadruplo(genera_cuadruplo)
'''
============================================
Line Color
============================================
'''
def dibujafunc_linecolor():
global pilaO
global pTipos
blue = pilaO.pop()
tipoBlue= pTipos.pop()
green = pilaO.pop()
tipoGreen = pTipos.pop()
red = pilaO.pop()
tipoRed = pTipos.pop()
if tipoRed == 'int' and tipoGreen == 'int' and tipoBlue == 'int':
genera_cuadruplo = Cuadruplo("LINECOLOR",[red,green,blue],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion lineColor deben ser de tipo entero.")
'''
============================================
Dibuja una linea
============================================
'''
def dibujafunc_line():
global pilaO
global pTipos
cordY2 = pilaO.pop()
tipoCordY2= pTipos.pop()
cordX2 = pilaO.pop()
tipoCordX2 = pTipos.pop()
cordY1 = pilaO.pop()
tipoCordY1 = pTipos.pop()
cordX1 = pilaO.pop()
tipoCordX1 = pTipos.pop()
if tipoCordX1 == 'int' and tipoCordY1 == 'int' and tipoCordX2 == 'int' and tipoCordY2 == 'int':
genera_cuadruplo = Cuadruplo("LINE",[cordX1,cordY1,cordX2,cordY2],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion line deben ser de tipo entero.")
'''
============================================
Dibuja un cuadrado
============================================
'''
def dibujafunc_square():
global pilaO
global pTipos
tamano = pilaO.pop()
tipoTamano= pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoTamano == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("SQUARE",[cordX,cordY,tamano],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion square deben ser de tipo entero.")
'''
============================================
Dibuja un circulo
============================================
'''
def dibujafunc_circle():
global pilaO
global pTipos
radio = pilaO.pop()
tipoRadio = pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoRadio == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("CIRCLE",[cordX,cordY,radio],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion circle deben ser de tipo entero.")
'''
============================================
Dibuja una estrella
============================================
'''
def dibujafunc_star():
global pilaO
global pTipos
tamano = pilaO.pop()
tipoTamano= pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoTamano == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("STAR",[cordX,cordY,tamano],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion star deben ser de tipo entero.")
'''
============================================
Dibuja un triangulo
============================================
'''
def dibujafunc_triangle():
global pilaO
global pTipos
tamano = pilaO.pop()
tipoTamano= pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoTamano == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("TRIANGLE",[cordX,cordY,tamano],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion triangle deben ser de tipo entero.")
'''
============================================
Dibuja un arco
============================================
'''
def dibujafunc_arc():
global pilaO
global pTipos
grados = pilaO.pop()
tipoGrados = pTipos.pop()
cordY2 = pilaO.pop()
tipoCordY2= pTipos.pop()
cordX2 = pilaO.pop()
tipoCordX2 = pTipos.pop()
cordY1 = pilaO.pop()
tipoCordY1 = pTipos.pop()
cordX1 = pilaO.pop()
tipoCordX1 = pTipos.pop()
if tipoCordX1 == 'int' and tipoCordY1 == 'int' and tipoCordX2 == 'int' and tipoCordY2 == 'int' and tipoGrados == 'int':
genera_cuadruplo = Cuadruplo("ARC",[cordX1,cordY1,cordX2,cordY2,grados],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion arc deben ser de tipo entero.")
'''
============================================
Indice que comienza a rellenar figura
============================================
'''
def dibujafunc_startfill():
global pilaO
global pTipos
blue = pilaO.pop()
tipoBlue= pTipos.pop()
green = pilaO.pop()
tipoGreen = pTipos.pop()
red = pilaO.pop()
tipoRed = pTipos.pop()
if tipoRed == 'int' and tipoGreen == 'int' and tipoBlue == 'int':
genera_cuadruplo = Cuadruplo("STARTFILL",[red,green,blue],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion startFill deben ser de tipo entero.")
'''
============================================
Indica que termina de rellenar figura
============================================
'''
def dibujafunc_stopfill():
genera_cuadruplo = Cuadruplo("STOPFILL",255,255,255)
push_cuadruplo(genera_cuadruplo)
'''
============================================
Imprime las pilas
============================================
'''
def printPilas():
print "pilaO ", pilaO.getElements()
print "pTipos ", pTipos.getElements()
print "pOper ", pOper.getElements()
print "pSaltos ", pSaltos.getElements()
print "pDimensionada" , pDimensionada.getElements()
print_cuadruplos(cuadruplos)
'''
============================================
Imprime los cuadruplos
============================================
'''
def print_cuadruplos(currentCuadList):
print "Tabla Cuadruplos"
index = 0
for currentCuad in currentCuadList:
if currentCuad:
print index, " " ,currentCuad.op, " , ", currentCuad.opdoIzq, " , ", currentCuad.opdoDer," , ",currentCuad.res
else:
print "List is empty"
index += 1
pass
|
[
"oomarcarreon@gmail.com"
] |
oomarcarreon@gmail.com
|
d6f17420ce83772b4d31a9811f043fc6f9240f6b
|
8e30082f46128316d78504c5efa13319726a52fd
|
/django/salon/polls/models.py
|
6927af7977f957aa28db99a135854209e9a702bf
|
[] |
no_license
|
vbitjp/Python
|
8d08bcaa8fac6348798bbc199b6cc965b163c81c
|
abfe098ab2152ded87d121c42c1df5ca37c7e837
|
refs/heads/master
| 2021-09-12T17:56:28.100117
| 2018-04-19T14:28:41
| 2018-04-19T14:28:41
| 106,935,158
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 832
|
py
|
import datetime
from django.db import models
from django.utils import timezone
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.question_text
def was_published_recently(self):
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.choice_text
|
[
"7ywrxsxrw7z571g@vbit.jp"
] |
7ywrxsxrw7z571g@vbit.jp
|
3d0de491cc84f31ed3d96fa5ed6bc7a73bae9020
|
a5c14053b5a763dd9bbe28a0a9c34a88bc280584
|
/app/admin/views.py
|
1b1505c53465756ff2c71231a41296616627a067
|
[] |
no_license
|
smile0304/flask_movie_project
|
dbcbda194438d324fa89e682f3f8c5e1abf0ae53
|
c228821bf90468af8d20f9632ad4e2735c0c7239
|
refs/heads/master
| 2021-06-25T09:05:34.090540
| 2017-09-11T13:02:06
| 2017-09-11T13:02:06
| 103,136,236
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,397
|
py
|
#coding:utf8
from . import admin
from flask import render_template,redirect,url_for,flash,session,request,abort
from app.admin.forms import LoginForm,TagForm,MovieForm,PrivateForm,PwdForm,AuthForm,RoleForm,AdminForm
from app.models import Admin,Tag,Movie,Preview,User,Comment,Movicecol,Oplog,Adminlog,Userlog,Auth,Role
from functools import wraps
from app import db,app
from werkzeug.utils import secure_filename
import os
import uuid
import datetime
#上下文处理器
@admin.context_processor
def tpl_extra():
data = dict(
online_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
)
return data
#登录装饰器
def admin_login_req(f):
@wraps(f)
def decorated_function(*args,**kwargs):
if "admin" not in session:
#next记录的是上次点击的链接,登录成功后,则直接跳转到上次想要访问的链接
return redirect(url_for("admin.login",next=request.url))
return f(*args,**kwargs)
return decorated_function
#权限控制装饰器
def admin_auth(f):
@wraps(f)
def decorated_function(*args, **kwargs):
admin = Admin.query.join(
Role
).filter(
Role.id == Admin.role_id,
Admin.id == session["admin_id"]
).first()
if admin.is_super != 0:
auths = admin.role.auths
auths = list(map(lambda v: int(v), auths.split(",")))
auth_list = Auth.query.all()
urls = [v.url for v in auth_list for val in auths if val == v.id]
rule = request.url_rule
if str(rule) not in urls:
abort(404)
return f(*args, **kwargs)
return decorated_function
#修改文件名称
def change_filename(filename):
fileinfo = os.path.splitext(filename)
filename = datetime.datetime.now().strftime("%Y%m%d%H%M%S")+str(uuid.uuid4().hex)+fileinfo[1]
return filename
@admin.route("/")
@admin_login_req
def index():
return render_template("admin/index.html")
#登录
@admin.route("/login/",methods=["GET","POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
data = form.data
admin = Admin.query.filter_by(name=data["account"]).first()
if not admin.check_pwd(data["pwd"]):
flash("密码错误!","err")
return redirect(url_for("admin.login"))
session["admin"] = data["account"]
session["admin_id"] = admin.id
adminlogin = Adminlog(
admin_id = admin.id,
IP = request.remote_addr
)
db.session.add(adminlogin)
db.session.commit()
# next记录的是上次点击的链接,登录成功后,则直接跳转到上次想要访问的链接
return redirect(request.args.get("next") or url_for("admin.index"))
return render_template("admin/login.html",form=form)
@admin.route("/logout/")
@admin_login_req
def logout():
session.pop("admin",None)
session.pop("admin_id",None)
return redirect(url_for('admin.login'))
#修改密码
@admin.route("/pwd/",methods=["GET","POST"])
@admin_login_req
def pwd():
form = PwdForm()
if form.validate_on_submit():
data = form.data
admin = Admin.query.filter_by(name=session["admin"]).first()
from werkzeug.security import generate_password_hash
admin.pwd = generate_password_hash(data["new_pwd"])
db.session.add(admin)
db.session.commit()
flash("修改密码成功,请重新登录!", "ok")
return redirect(url_for('admin.logout'))
return render_template("admin/pwd.html",form=form)
#添加标签
@admin.route("/tag/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def tag_add():
form = TagForm()
if form.validate_on_submit():
data = form.data
tag = Tag.query.filter_by(name=data["name"]).count()
if tag == 1:
flash("该标签名已存在","err")
return redirect(url_for('admin.tag_add'))
tag = Tag(
name=data["name"]
)
db.session.add(tag)
db.session.commit()
flash("添加标签成功!","ok")
oplog = Oplog(
admin_id = session["admin_id"],
IP = request.remote_addr,
reason = "添加标签%s" % data["name"]
)
db.session.add(oplog)
db.session.commit()
return redirect(url_for('admin.tag_add'))
return render_template("admin/tag_add.html",form=form)
#标签列表
@admin.route("/tag/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def tag_list(page=None):
if page is None:
page = 1
page_data = Tag.query.order_by(
Tag.addtime.desc()
).paginate(page=page,per_page=10)
return render_template("admin/tag_list.html",page_data=page_data)
#编辑标签
@admin.route("/tag/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def tag_edit(id=None):
form = TagForm()
tag = Tag.query.get_or_404(id)
if form.validate_on_submit():
data = form.data
tag_count = Tag.query.filter_by(name=data["name"]).count()
if tag.name !=data["name"] and tag_count == 1:
flash("该标签名已存在","err")
return redirect(url_for('admin.tag_edit',id=id))
tag.name = data["name"]
db.session.add(tag)
db.session.commit()
flash("修改标签成功!","ok")
return redirect(url_for('admin.tag_edit',id=id))
return render_template("admin/tag_edit.html",form=form,tag=tag)
#标签删除
@admin.route("/tag/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def tag_del(id=None):
tag = Tag.query.filter_by(id=id).first_or_404()
db.session.delete(tag)
db.session.commit()
flash("删除标签成功!", "ok")
return redirect(url_for('admin.tag_list',page=1))
#添加电影
@admin.route("/movie/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def movie_add():
form = MovieForm()
if form.validate_on_submit():
data = form.data
file_url = secure_filename(form.url.data.filename)
file_logo = secure_filename(form.logo.data.filename)
if not os.path.exists(app.config["UP_DIR"]):
os.makedirs(app.config["UP_DIR"])
os.chmod(app.config["UP_DIR"], "rw")
url = change_filename(file_url)
logo = change_filename(file_logo)
form.url.data.save(app.config["UP_DIR"] + url)
form.logo.data.save(app.config["UP_DIR"] + logo)
movie = Movie(
title=data["title"],
url=url,
info=data["info"],
logo=logo,
star=int(data["star"]),
playnum=0,
commentnum=0,
tag_id=int(data["tag_id"]),
area=data["area"],
release_time=data["release_time"],
length=data["length"]
)
db.session.add(movie)
db.session.commit()
flash("添加电影成功!", "ok")
return redirect(url_for('admin.movie_add'))
return render_template("admin/movie_add.html", form=form)
#电影列表
@admin.route("/movie/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def movie_list(page=None):
if page is None:
page = 1
page_data = Movie.query.join(Tag).filter(
Tag.id == Movie.tag_id
).order_by(
Movie.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/movie_list.html",page_data=page_data)
#删除电影
@admin.route("/movie/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def movie_del(id=None):
movie = Movie.query.get_or_404(int(id))
db.session.delete(movie)
db.session.commit()
flash("删除电影成功!", "ok")
return redirect(url_for('admin.movie_list',page=1))
#编辑电影
@admin.route("/movie/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def movie_edit(id=None):
form = MovieForm()
form.url.validators = []
form.logo.validators = []
movie = Movie.query.get_or_404(int(id))
if request.method == 'GET':
form.info.data = movie.info
form.tag_id.data = movie.tag_id
form.star.data = movie.star
if form.validate_on_submit():
data = form.data
movie_conut = Movie.query.filter_by(title=data["title"]).count()
if movie_conut == 1 and movie.title != data["title"]:
flash("片名已经存在!", "err")
return redirect(url_for('admin.movie_edit', id=movie.id))
if not os.path.exists(app.config["UP_DIR"]):
os.makedirs(app.config["UP_DIR"])
os.chmod(app.config["UP_DIR"], "rw")
if form.url.data.filename != "":
file_url = secure_filename(form.url.data.filename)
movie.url = change_filename(file_url)
form.url.data.save(app.config["UP_DIR"] + movie.url)
if form.logo.data.filename != "":
file_logo = secure_filename(form.logo.data.filename)
movie.logo = change_filename(file_logo)
form.logo.data.save(app.config["UP_DIR"] + movie.logo)
movie.star = data["star"]
movie.tag_id = data["tag_id"]
movie.info = data["info"]
movie.title = data["title"]
movie.area = data["area"]
movie.length = data["length"]
movie.release_time = data["release_time"]
db.session.add(movie)
db.session.commit()
flash("修改电影成功!", "ok")
return redirect(url_for('admin.movie_edit',id=movie.id))
return render_template("admin/movie_edit.html", form=form,movie=movie)
#添加预告
@admin.route("/preview/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def preview_add():
form = PrivateForm()
if form.validate_on_submit():
data = form.data
file_logo = secure_filename(form.logo.data.filename)
if not os.path.exists(app.config["UP_DIR"]):
os.makedirs(app.config["UP_DIR"])
os.chmod(app.config["UP_DIR"], "rw")
logo = change_filename(file_logo)
form.logo.data.save(app.config["UP_DIR"] + logo)
preview = Preview(
title = data["title"],
logo= logo
)
db.session.add(preview)
db.session.commit()
flash("修改预告成功!", "ok")
return redirect(url_for('admin.preview_add'))
return render_template("admin/preview_add.html",form=form)
#预告列表
@admin.route("/preview/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def preview_list(page=None):
if page is None:
page = 1
page_data = Preview.query.order_by(
Preview.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/preview_list.html",page_data=page_data)
#预告删除
@admin.route("/preview/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def preview_del(id=None):
preview = Preview.query.get_or_404(int(id))
db.session.delete(preview)
db.session.commit()
flash("删除预告成功!", "ok")
return redirect(url_for('admin.preview_list', page=1))
#修改预告
@admin.route("/preview/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def preview_edit(id=None):
form = PrivateForm()
form.logo.validators= []
preview = Preview.query.get_or_404(int(id))
if request.method == "GET":
form.title.data = preview.title
if form.validate_on_submit():
data = form.data
if form.logo.data.filename != "":
file_logo = secure_filename(form.logo.data.filename)
preview.logo = change_filename(file_logo)
form.logo.data.save(app.config["UP_DIR"] + preview.logo)
preview.title = data["title"]
db.session.add(preview)
db.session.commit()
flash("修改预告成功!", "ok")
return redirect(url_for('admin.preview_add',id=id))
return render_template("admin/preview_edit.html",form=form,preview=preview)
#会员列表
@admin.route("/user/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def user_list(page=None):
if page is None:
page = 1
page_data = User.query.order_by(
User.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/user_list.html",page_data=page_data)
#会员详情页面
@admin.route("/user/view/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def user_view(id=None):
user = User.query.get_or_404(int(id))
return render_template("admin/user_view.html",user=user)
#会员删除
@admin.route("/user/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def user_del(id=None):
user = User.query.get_or_404(int(id))
db.session.delete(user)
db.session.commit()
flash("删除会员成功!", "ok")
return redirect(url_for('admin.user_list', page=1))
#评论列表
@admin.route("/comment/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def comment_list(page=None):
if page is None:
page = 1
page_data = Comment.query.join(
Movie
).join(
User
).filter(
Movie.id == Comment.movie_id,
User.id == Comment.user_id
).order_by(
Comment.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/comment_list.html",page_data=page_data)
#删除评论
@admin.route("/comment/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def comment_del(id=None):
comment = Comment.query.get_or_404(int(id))
db.session.delete(comment)
db.session.commit()
flash("删除评论成功!", "ok")
return redirect(url_for('admin.comment_list', page=1))
#收藏列表
@admin.route("/moviecol/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def moviecol_list(page=None):
if page is None:
page = 1
page_data = Movicecol.query.join(
Movie
).join(
User
).filter(
Movie.id == Movicecol.movie_id,
User.id == Movicecol.user_id
).order_by(
Movicecol.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/moviecol_list.html",page_data=page_data)
#删除收藏
@admin.route("/moviecol/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def moviecol_del(id=None):
moviecol = Movicecol.query.get_or_404(int(id))
db.session.delete(moviecol)
db.session.commit()
flash("删除评论成功!", "ok")
return redirect(url_for('admin.moviecol_list', page=1))
#操作日志
@admin.route("/oplog/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def oplog_list(page=None):
if page is None:
page = 1
page_data = Oplog.query.join(
Admin
).filter(
Admin.id == Oplog.admin_id
).order_by(
Oplog.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/oplog_list.html",page_data=page_data)
#管理员登录日志
@admin.route("/adminloginlog/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def adminloginlog_list(page=None):
if page is None:
page = 1
page_data = Adminlog.query.join(
Admin
).filter(
Admin.id == Adminlog.admin_id
).order_by(
Adminlog.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/adminloginlog_list.html",page_data=page_data)
#用户登录日志
@admin.route("/userloginlog/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def userloginlog_list(page=None):
if page is None:
page = 1
page_data = Userlog.query.join(
User
).filter(
User.id == Userlog.user_id
).order_by(
Userlog.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/userloginlog_list.html",page_data=page_data)
#添加角色
@admin.route("/role/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def role_add():
form = RoleForm()
if form.validate_on_submit():
data = form.data
role = Role(
name = data["name"],
auths =",".join(map(lambda v:str(v),data["auths"]))
)
db.session.add(role)
db.session.commit()
flash("添加角色成功", "ok")
return render_template("admin/role_add.html",form=form)
#角色列表
@admin.route("/role/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def role_list(page=None):
if page is None:
page = 1
page_data = Role.query.order_by(
Role.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/role_list.html",page_data=page_data)
#角色删除
@admin.route("/role/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def role_del(id=None):
role = Role.query.filter_by(id=id).first_or_404()
db.session.delete(role)
db.session.commit()
flash("删除角色成功!", "ok")
return redirect(url_for('admin.role_list',page=1))
#编辑角色
@admin.route("/role/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def role_edit(id=None):
form = RoleForm()
role = Role.query.get_or_404(id)
if request.method == "GET":
auths = role.auths
form.auths.data = list(map(lambda v:int(v), auths.split(',')))
if form.validate_on_submit():
data = form.data
role.name = data["name"]
auths = ",".join(map(lambda v: str(v), data["auths"]))
db.session.add(role)
db.session.commit()
flash("编辑角色成功!", "ok")
return render_template("admin/role_edit.html",form=form,role=role)
#权限添加
@admin.route("/auth/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def auth_add():
form = AuthForm()
if form.validate_on_submit():
data = form.data
auth = Auth(
name=data["name"],
url=data["url"]
)
db.session.add(auth)
db.session.commit()
flash("添加权限成功", "ok")
return render_template("admin/auth_add.html", form=form)
#权限列表
@admin_auth
@admin.route("/auth/list/<int:page>/",methods=["GET"])
@admin_login_req
def auth_list(page=None):
if page is None:
page = 1
page_data = Auth.query.order_by(
Auth.addtime.desc()
).paginate(page=page,per_page=10)
return render_template("admin/auth_list.html",page_data=page_data)
#权限删除
@admin.route("/auth/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def auth_del(id=None):
auth = Auth.query.filter_by(id=id).first_or_404()
db.session.delete(auth)
db.session.commit()
flash("删除权限成功!", "ok")
return redirect(url_for('admin.auth_list',page=1))
#编辑权限@admin_auth
@admin.route("/auth/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def auth_edit(id=None):
form = AuthForm()
auth = Auth.query.get_or_404(id)
if form.validate_on_submit():
data = form.data
tag_count = Tag.query.filter_by(name=data["name"]).count()
auth.url = data["url"]
auth.name = data["name"]
db.session.add(auth)
db.session.commit()
flash("修改权限成功!","ok")
return redirect(url_for('admin.auth_edit',id=id))
return render_template("admin/auth_edit.html",form=form,auth=auth)
#添加管理员
@admin.route("/admin/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def admin_add():
form = AdminForm()
from werkzeug.security import generate_password_hash
if form.validate_on_submit():
data = form.data
admin = Admin(
name=data["name"],
pwd=generate_password_hash(data["pwd"]),
role_id = data['role_id'],
is_super=1,
)
db.session.add(admin)
db.session.commit()
flash("添加管理员成功", "ok")
return render_template("admin/admin_add.html",form=form)
#管理员列表
@admin.route("/admin/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def admin_list(page=None):
if page is None:
page = 1
page_data = Admin.query.join(
Role
).filter(
Role.id == Admin.role_id
).order_by(
Admin.addtime.desc()
).paginate(page=page,per_page=10)
return render_template("admin/admin_list.html",page_data=page_data)
|
[
"smile@smilehacker.net"
] |
smile@smilehacker.net
|
e44d807c8b5f173e32fde9820ee02ff6100abd9b
|
259b81f2a6d02947cd95c1fbdac4ea182592d331
|
/API wrappers/Python/examples/mySE python.py
|
b0aff4b8e7f0dd249fd2f9c17246f26a7d8d3f7b
|
[
"MIT"
] |
permissive
|
Arthurdw/mySE
|
58ee59c982266b1d4ff0a53e01b882820125a71f
|
0100c2a4e8fd5006e170ca8a2890080ed96d0153
|
refs/heads/master
| 2020-12-08T20:48:54.380742
| 2020-05-03T11:56:29
| 2020-05-03T11:56:29
| 233,091,191
| 0
| 0
|
MIT
| 2020-01-16T20:06:44
| 2020-01-10T16:52:39
|
Python
|
UTF-8
|
Python
| false
| false
| 966
|
py
|
from mySE import mySE
from time import sleep
local_url, server_secret, mail = "http://127.0.0.1:5000/", "mySecureServerPassword", "mail@mail.mail"
# Generate a token:
# try:
mySE.gen_token(local_url, server_secret, mail)
# except mySE.error.UnauthorizedError:
# pass
# token = mySe.gen_token(local_url, mail)
# Fetch our token:
token = mySE.get_token(local_url, mail)
# Create our client object.
client = mySE.Client(local_url, token)
print(f"Client ID: {client.id}")
print(f"This secret token: {token}")
# Create 2 logs:
print("\r", "Creating 2 logs...", end=' ')
client.add_log(False)
sleep(1.2)
client.add_log(True)
print("\b\b\b\b: Done.")
# Display our logs:
print(f"Log information: ({len(client.logs)})")
count = 0
for log in client.logs:
count += 1
print(f"Log {count} | ID: {log.id};")
print(f"Log {count} | Time: {log.time.strftime('%d/%m/%Y | %H:%M:%S')}")
print(f"Log {count} | The light was {'on' if log.light else 'off'}!")
|
[
"arthur.dewitte@gmail.com"
] |
arthur.dewitte@gmail.com
|
5187524c1cb90f930855028a542fe8b09194d3b4
|
a5898ba24399d8a05cb8a730e10400126fb04c3a
|
/ToDoApp/urls.py
|
b023ed47ea1b03001f8a266d49de0a9564a0531a
|
[] |
no_license
|
Adnan232/Django-WebApp
|
ae50507082be98c7ede02cb9fbad2ac93524bb05
|
e0ffcfef21396f96751efb9b39d917895f38ae52
|
refs/heads/master
| 2023-09-04T01:30:07.148630
| 2021-10-10T17:22:15
| 2021-10-10T17:22:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 797
|
py
|
"""TodoProject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('App/',include('App.urls'))
]
|
[
"adnanhabib120@gmail.com"
] |
adnanhabib120@gmail.com
|
3d842f277956e7f09b7d38e83fd8d63cafd6e1cc
|
cc954deeb8178398a99be10c72d0d6b150801a4e
|
/log_bolt/bolt_kafka_registe.py
|
b9e47a8b219759261c816d36ed7f89ae835da4ba
|
[] |
no_license
|
Wstc2013/log_bolt
|
486b3d645b9f37bdf7010178169dcd35e29b51b8
|
f48473c0559072f6e5673631d31c2ba47c9c3450
|
refs/heads/master
| 2021-01-25T13:24:07.933630
| 2018-03-02T10:26:48
| 2018-03-02T10:26:48
| 123,565,326
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,289
|
py
|
#-*-coding:utf8-*-
#!/usr/bin/env python
from __future__ import absolute_import
from pyleus.storm import SimpleBolt
from module.registehandlelogic import RegisteHandleLogic
import logging
import time
import configparser
config = configparser.ConfigParser()
config.read("config/config.ini",encoding='utf-8')
logdir = config.get("log", "dir")
log = logging.getLogger('test_kafka')
class RegisteBolt(SimpleBolt):
def process_tuple(self,tup):
value = tup.values
if value != [''] and value != ['\x03']:
log.debug("kafka获取到的数据为:%s" % (value))
registe_handle_logic_obj = RegisteHandleLogic(value)
log.debug(u"开始注册次数处理!!!!")
registe_handle_logic_obj.count('REGISTECOUNT')
log.debug(u"开始注册插入数据库处理!!!!")
registe_handle_logic_obj.insertMysqlRegister()
if __name__ == '__main__':
log_time = time.strftime('%Y%m%d', time.localtime(time.time()))
log_filename = '%s/test_registe_%s.log' % (logdir,log_time)
logging.basicConfig(
level=logging.DEBUG,
filename=log_filename,
format="%(asctime)s[%(levelname)s][%(lineno)d]%(message)s",
filemode='a',
)
RegisteBolt().run()
|
[
"visen@enjoybcg.com"
] |
visen@enjoybcg.com
|
8174036a334ba82c11c5dbd9fa16642607155e86
|
06ad8f591b7b68d98d6f422e12c447c8453e1476
|
/data/cifar.py
|
6d323fbf99da68c37831849f7b128d2ca2b91676
|
[
"MIT"
] |
permissive
|
stormraiser/disunknown
|
4713a177bfa187ea918cd8dbe2f8d325b9a0db5d
|
194cc01851fe26bc2f0ed87cdcc238c801f4a333
|
refs/heads/main
| 2023-08-29T08:42:45.019039
| 2021-10-13T06:33:23
| 2021-10-13T06:33:23
| 406,071,664
| 20
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 640
|
py
|
import torch, torchvision
class CIFAR10(torchvision.datasets.CIFAR10):
def __init__(self, root, part, labeled_factors, transform):
super().__init__(root, part == 'train', transform = transform, download = True)
if len(labeled_factors) == 0:
self.has_label = False
self.nclass = []
self.class_freq = []
else:
self.has_label = True
self.nclass = [10]
class_count = torch.tensor(self.targets).bincount(minlength = 10)
self.class_freq = [class_count.float() / self.data.shape[0]]
def __getitem__(self, k):
img, target = super().__getitem__(k)
return (img, torch.tensor([target])) if self.has_label else img
|
[
"stormraiser2012@gmail.com"
] |
stormraiser2012@gmail.com
|
79c9ac4f57d7c75785c6e238248c49297bcd93e6
|
a40d5c5cd0fcc2410e3200f40f6a79f7201b0193
|
/kubernetes/client/models/v1beta1_custom_resource_column_definition.py
|
f844f995b1a28c3f912663019e6dab34922ac0e7
|
[
"Apache-2.0"
] |
permissive
|
Unacademy/kubernetes-client
|
662cdc2b9fe6df43301e32427e48b1b2715773ca
|
b7f9c740a82b4585478d052c8032495cdeb3b331
|
refs/heads/master
| 2023-06-24T05:57:27.226613
| 2022-12-14T16:29:58
| 2022-12-14T16:29:58
| 181,669,794
| 0
| 0
|
Apache-2.0
| 2023-06-20T13:05:37
| 2019-04-16T10:43:37
|
Python
|
UTF-8
|
Python
| false
| false
| 8,689
|
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1CustomResourceColumnDefinition(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'json_path': 'str',
'description': 'str',
'format': 'str',
'name': 'str',
'priority': 'int',
'type': 'str'
}
attribute_map = {
'json_path': 'JSONPath',
'description': 'description',
'format': 'format',
'name': 'name',
'priority': 'priority',
'type': 'type'
}
def __init__(self, json_path=None, description=None, format=None, name=None, priority=None, type=None):
"""
V1beta1CustomResourceColumnDefinition - a model defined in Swagger
"""
self._json_path = None
self._description = None
self._format = None
self._name = None
self._priority = None
self._type = None
self.discriminator = None
self.json_path = json_path
if description is not None:
self.description = description
if format is not None:
self.format = format
self.name = name
if priority is not None:
self.priority = priority
self.type = type
@property
def json_path(self):
"""
Gets the json_path of this V1beta1CustomResourceColumnDefinition.
JSONPath is a simple JSON path, i.e. with array notation.
:return: The json_path of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._json_path
@json_path.setter
def json_path(self, json_path):
"""
Sets the json_path of this V1beta1CustomResourceColumnDefinition.
JSONPath is a simple JSON path, i.e. with array notation.
:param json_path: The json_path of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if json_path is None:
raise ValueError("Invalid value for `json_path`, must not be `None`")
self._json_path = json_path
@property
def description(self):
"""
Gets the description of this V1beta1CustomResourceColumnDefinition.
description is a human readable description of this column.
:return: The description of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this V1beta1CustomResourceColumnDefinition.
description is a human readable description of this column.
:param description: The description of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
self._description = description
@property
def format(self):
"""
Gets the format of this V1beta1CustomResourceColumnDefinition.
format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:return: The format of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._format
@format.setter
def format(self, format):
"""
Sets the format of this V1beta1CustomResourceColumnDefinition.
format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:param format: The format of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
self._format = format
@property
def name(self):
"""
Gets the name of this V1beta1CustomResourceColumnDefinition.
name is a human readable name for the column.
:return: The name of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this V1beta1CustomResourceColumnDefinition.
name is a human readable name for the column.
:param name: The name of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._name = name
@property
def priority(self):
"""
Gets the priority of this V1beta1CustomResourceColumnDefinition.
priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a higher priority.
:return: The priority of this V1beta1CustomResourceColumnDefinition.
:rtype: int
"""
return self._priority
@priority.setter
def priority(self, priority):
"""
Sets the priority of this V1beta1CustomResourceColumnDefinition.
priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a higher priority.
:param priority: The priority of this V1beta1CustomResourceColumnDefinition.
:type: int
"""
self._priority = priority
@property
def type(self):
"""
Gets the type of this V1beta1CustomResourceColumnDefinition.
type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:return: The type of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1beta1CustomResourceColumnDefinition.
type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:param type: The type of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`")
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1CustomResourceColumnDefinition):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"haoweic@google.com"
] |
haoweic@google.com
|
91b20ea47434b099c8ae47c90abfab4af64dad34
|
89cd8b77ad5171c336cc60b2133fe6468a6cb53f
|
/Module01_CZ/day1_basics/04-代码/day1/20_重要演示__________变量的定义.py
|
f4f737892f50ed4f9b166828e1067e18cd29954f
|
[
"MIT"
] |
permissive
|
fenglihanxiao/Python
|
75178f6b6b0c53345e1ed54226ea645216572d6c
|
872baf3a3a5ee42740161152605ca2b1ddf4cd30
|
refs/heads/master
| 2021-05-23T18:49:20.656433
| 2020-04-29T01:06:21
| 2020-04-29T01:06:21
| 253,199,073
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 110
|
py
|
"""
使用变量保存个人信息:
姓名:张传智
性别:男
年龄:21
电话:18800008888
"""
|
[
"fenglihanxiao@qq.com"
] |
fenglihanxiao@qq.com
|
a1f02577c0adfa04d1396283c0f946dca6808285
|
77ee1f677ab2ececb821a11be128b76bcf0e8d6f
|
/electrum_mona/gui/qt/lightning_dialog.py
|
1d709aed9935b2c01bce4e473c6c8bdd4f25e9d9
|
[
"MIT"
] |
permissive
|
zcore-dev/electrum-mona
|
c74e6142a0f34721be70dba68d524ae9ce03179c
|
2beb0c9c7794e8b03d1725bae41ee8b792c57275
|
refs/heads/master
| 2020-08-22T15:32:55.604727
| 2019-10-21T22:56:29
| 2019-10-21T22:56:29
| 216,427,159
| 0
| 0
|
MIT
| 2019-10-20T21:03:48
| 2019-10-20T21:03:48
| null |
UTF-8
|
Python
| false
| false
| 3,658
|
py
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PyQt5.QtGui import QStandardItemModel, QStandardItem
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QDialog, QWidget, QLabel, QVBoxLayout, QCheckBox,
QGridLayout, QPushButton, QLineEdit, QTabWidget)
from electrum_mona.i18n import _
from .util import HelpLabel, MyTreeView, Buttons
class LightningDialog(QDialog):
def __init__(self, gui_object):
QDialog.__init__(self)
self.gui_object = gui_object
self.config = gui_object.config
self.network = gui_object.daemon.network
self.setWindowTitle(_('Lightning Network'))
self.setMinimumSize(600, 20)
vbox = QVBoxLayout(self)
self.num_peers = QLabel('')
vbox.addWidget(self.num_peers)
self.num_nodes = QLabel('')
vbox.addWidget(self.num_nodes)
self.num_channels = QLabel('')
vbox.addWidget(self.num_channels)
self.status = QLabel('')
vbox.addWidget(self.status)
vbox.addStretch(1)
b = QPushButton(_('Close'))
b.clicked.connect(self.close)
vbox.addLayout(Buttons(b))
self.network.register_callback(self.on_channel_db, ['channel_db'])
self.network.register_callback(self.set_num_peers, ['gossip_peers'])
self.network.register_callback(self.set_unknown_channels, ['unknown_channels'])
self.network.channel_db.update_counts() # trigger callback
self.set_num_peers('', self.network.lngossip.num_peers())
self.set_unknown_channels('', len(self.network.lngossip.unknown_ids))
def on_channel_db(self, event, num_nodes, num_channels, num_policies):
self.num_nodes.setText(_(f'{num_nodes} nodes'))
self.num_channels.setText(_(f'{num_channels} channels'))
def set_num_peers(self, event, num_peers):
self.num_peers.setText(_(f'Connected to {num_peers} peers'))
def set_unknown_channels(self, event, unknown):
self.status.setText(_(f'Requesting {unknown} channels...') if unknown else '')
def is_hidden(self):
return self.isMinimized() or self.isHidden()
def show_or_hide(self):
if self.is_hidden():
self.bring_to_top()
else:
self.hide()
def bring_to_top(self):
self.show()
self.raise_()
def closeEvent(self, event):
self.gui_object.lightning_dialog = None
event.accept()
|
[
"root@DESKTOP-97LL1PI.localdomain"
] |
root@DESKTOP-97LL1PI.localdomain
|
b7bf9bf3508f0c9b91f0a398d44aa26c34edd8f1
|
fdd78a985cde644ac362caaaa8e270f7177a6e16
|
/python/spreadsheet_month.py
|
0a292a31eeeef4cdb15e5044c586c89d8c613a69
|
[] |
no_license
|
andwxu/hackgt
|
d2980a4c67ab9c50e14783e6b8c22d1bbd584668
|
a147fc546c1080b42df26caebbb530268d12c632
|
refs/heads/master
| 2022-12-31T00:29:56.239822
| 2020-10-18T11:41:30
| 2020-10-18T11:41:30
| 304,765,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,433
|
py
|
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from matplotlib import pyplot as plt
from matplotlib import ticker as tick
from datetime import datetime, timezone, timedelta, date
import numpy as np
import random
import re
import mpld3
# use creds to create a client to interact with the Google Drive API
scope = ["https://spreadsheets.google.com/feeds",'https://www.googleapis.com/auth/spreadsheets',"https://www.googleapis.com/auth/drive.file","https://www.googleapis.com/auth/drive"]
creds = ServiceAccountCredentials.from_json_keyfile_name('sheets_data.json', scope)
client = gspread.authorize(creds)
sheet = client.open("Orders").sheet1
list_of_orders = sheet.get_all_values()
#initialize dictionary for each hour in a day
hours_month = dict()
i = 0
while i < 24:
hours_month[i] = dict()
i = i + 1
#iterate through rows in sheet, pulling time and item of order
menu = []
open_hour = 12
close_hour = 22
for order in list_of_orders:
time = datetime.fromtimestamp(int(order[1]) - 14400, timezone.utc)
elapsed = datetime.now() - time.replace(tzinfo=None)
elapsed_day = elapsed.days
hour = time.hour
order = order[2]
foods = re.split(",", order)
for food in foods:
if hour in range(open_hour, close_hour):
if food not in menu:
menu.append(food)
if elapsed_day <= 31:
if food in hours_month[hour]:
hours_month[hour][food] = hours_month[hour][food] + 1
else:
hours_month[hour][food] = 1
for hour in hours_month:
for food in menu:
if food in hours_month[hour]:
hours_month[hour][food] = hours_month[hour][food] / 31
else:
hours_month[hour][food] = .05
##generate x axis labels based on opening/closing hours
x_labels = []
for i in range(open_hour, close_hour + 1):
label = ""
time = i
if time % 12 != 0:
time = time % 12
else:
time = 12
label += str(time) + ":00"
if i <= 12:
label += " AM"
else:
label += " PM"
x_labels.append(label)
##format graph
barWidth = .5/len(menu)
position_base = np.arange(0,24,1)
i = -.25
fig, ax = plt.subplots(figsize=(20, 6))
x = range(len(menu))
plt.xlabel('Hour')
ax.xaxis.set_major_locator(tick.MultipleLocator(1))
ax.yaxis.set_major_locator(tick.MultipleLocator(1))
ax.set_xticklabels(x_labels)
ax.margins(2,0)
plt.xlim(12-.3, 21+.3)
plt.ylabel('Number of Orders')
ax.set_title('Average Orders/Hour per Month')
ax.yaxis.set_major_locator(tick.MultipleLocator(1))
max_val = 0
for menu_item in menu: #construct each bar
bar = []
for order_hour in hours_month:
if menu_item in hours_month[order_hour]:
if hours_month[order_hour][menu_item] > max_val:
max_val = hours_month[order_hour][menu_item]
if hours_month[order_hour][menu_item] < .05:
bar.append(.05)
else:
bar.append(hours_month[order_hour][menu_item])
else:
bar.append(.05)
r = random.random()
g = random.random()
b = random.random()
rgb = (r, g, b)
plt.bar((position_base + i), bar, color=rgb, width = barWidth, edgecolor = 'white', label = menu_item, align='edge')
i = i + .5/len(menu)
plt.ylim(0, max(round(max_val) + .5, 1))
plt.legend()
plt.savefig('../public/avg_month.png')
|
[
"46511636+aidandonelan@users.noreply.github.com"
] |
46511636+aidandonelan@users.noreply.github.com
|
4a4d2c2a023f48bedef21da3b002bc22be2cf986
|
42b16c3a421e59949441edc7c2153fef8ac690eb
|
/Admin.py
|
073ca76fa3a8ef4d75a360602cd9692bddb8af52
|
[] |
no_license
|
mastermobin/DatabaseProject
|
9a7179921e2878adeba820eb662a7609084a3667
|
bc80290b7d1c132763554dd5b910c9b9075d99f2
|
refs/heads/master
| 2020-12-24T00:13:10.429887
| 2020-01-30T22:46:06
| 2020-01-30T22:46:06
| 237,320,302
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,426
|
py
|
import mysql.connector
from tabulate import tabulate
def showQuery(mydb, query, h):
mycursor = mydb.cursor()
mycursor.execute(query)
myresult = mycursor.fetchall()
if mycursor.rowcount != 0:
print(tabulate(myresult, headers=h, tablefmt='psql'))
else:
print("No Result")
def run(ID, MyDB):
print('-------------------------------')
print('Wellcome ' + str(ID) + ' To Admin Panel')
while(True):
print('-------------------------------')
print("Choose One Of Below Options To Continue: ")
print("""
1.Show Average Charity's Population Per Region
2.Show Drivers With Average Score More Than 4
3.Show Min And Max Score For Each Driver
4.Show Restaurants Ordered By Donated Food Amount
5.Show Driver With Maximum State Changes
6.Show Most Needful Charities
7.Show Today's Deliveries With Driver Score More Than Avg
""")
ans = input("Your Answer: ")
if ans == '1':
showQuery(MyDB,
"""
SELECT CONCAT(City, ', ', Region) as Region, AVG(Population) as AvgPopulation
FROM charity
GROUP BY CONCAT(City, ', ', Region);
""",
['Region', 'AvgPopulation'])
elif ans == '2':
showQuery(MyDB,
"""
SELECT CONCAT(FirstName, ' ', LastName) as Name, AVG(Rate) as RateAvg
FROM delivery
INNER JOIN driver d on delivery.DriverID = d.ID
GROUP BY DriverID
HAVING (AVG(Rate) >= 4);
""",
['Name', 'RateAvg'])
elif ans == '3':
showQuery(MyDB,
"""
SELECT CONCAT(FirstName, ' ', LastName) as Name, MIN(Rate) as MinRate, MAX(Rate) as MaxRate
FROM delivery
INNER JOIN driver d on delivery.DriverID = d.ID
GROUP BY DriverID;
""",
['Name', 'MinRate', 'MaxRate'])
elif ans == '4':
showQuery(MyDB,
"""
SELECT Name, SUM(Count) as SentFood
FROM delivery
INNER JOIN restaurant r on delivery.RestaurantID = r.ID
GROUP BY RestaurantID
ORDER BY SentFood DESC;
""",
['Name', 'DonatedAmount'])
elif ans == '5':
showQuery(MyDB,
"""
SELECT DriverID, CONCAT(FirstName, ' ', LastName) as Name, COUNT(driver_log.ID) as ChangeCount
FROM driver_log
INNER JOIN driver d on driver_log.DriverID = d.ID
WHERE Date >= SUBDATE(CURRENT_TIMESTAMP, INTERVAL 1 DAY)
GROUP BY DriverID
ORDER BY ChangeCount DESC
LIMIT 1;
""",
['DriverID', 'Name', 'Change Count'])
elif ans == '6':
showQuery(MyDB,
"""
SELECT t1.Name, TakenFood, Population, ContractedRestaurants, (TakenFood * ContractedRestaurants / Population) as Score
FROM ((SELECT c.Name, SUM(Count) as TakenFood
FROM delivery
INNER JOIN charity c on CharityID = c.ID
GROUP BY CharityID)
UNION
(SELECT Name, 0 as TakenFood
FROM charity
WHERE ID NOT IN (SELECT DISTINCT CharityID FROM delivery d))
) as t1
INNER JOIN (SELECT c7.Name, Population, ContractedRestaurants
FROM charity as c7
INNER JOIN ((SELECT Name, COUNT(RestaurantID) as ContractedRestaurants
FROM contract
INNER JOIN charity c on contract.CharityID = c.ID
GROUP BY CharityID)
UNION
(SELECT Name, 0 as ContractedRestaurants
FROM charity
WHERE ID NOT IN (SELECT DISTINCT CharityID FROM contract))) as t3
ON t3.Name = c7.Name
) c2 ON c2.Name = t1.Name
ORDER BY Score DESC;
""",
['Name', 'TakenFood', 'Population', 'ContractedRestaurants', 'Score'])
elif ans == '7':
showQuery(MyDB,
"""
SELECT CONCAT(d.FirstName, ' ', d.LastName) as DriverName, c.Name as CharityName, r.Name as RestaurantName, Rate as DriverRate
FROM delivery
INNER JOIN charity c on delivery.CharityID = c.ID
INNER JOIN restaurant r on delivery.RestaurantID = r.ID
INNER JOIN driver d on delivery.DriverID = d.ID
WHERE Date = CURRENT_DATE() AND Rate > (SELECT AVG(Rate) FROM delivery);
""",
['Driver Name', 'Charity Name', 'Restaurant Name', 'Driver Rate'])
else:
print("Wrong Choice!")
ans = input("Do You Want To Continue? (y/N): ")
if ans == "Y" or ans == "y":
continue
break
|
[
"mvtikm@gmail.com"
] |
mvtikm@gmail.com
|
7b21a9e794befbf4b2268bb9e211d4e4aff762d6
|
53d03e48ca88e05fb134be8a2d3dda9b6079216e
|
/fabfile.py
|
ca82fcc42642e2e6fffd7cd3c79a84a7357a5b66
|
[] |
no_license
|
myungseokang/djangogirls
|
3b9e18acb7b7747ca04c32dbebb070fc15b88966
|
dd7f82131de66e0732553af2b1e3c170a6f44b5f
|
refs/heads/master
| 2021-06-18T09:59:14.143057
| 2017-07-03T00:16:00
| 2017-07-03T00:16:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| true
| false
| 6,054
|
py
|
from fabric.contrib.files import append, exists, sed, put
from fabric.api import env, local, run, sudo
import random
import os
import json
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# deploy.json파일을 불러와 envs변수에 저장합니다.
with open(os.path.join(PROJECT_DIR, "deploy.json")) as f:
envs = json.loads(f.read())
REPO_URL = envs['REPO_URL']
PROJECT_NAME = envs['PROJECT_NAME']
REMOTE_HOST = envs['REMOTE_HOST']
REMOTE_USER = envs['REMOTE_USER']
STATIC_ROOT_NAME = envs['STATIC_ROOT']
STATIC_URL_NAME = envs['STATIC_URL']
MEDIA_ROOT = envs['MEDIA_ROOT']
env.user = REMOTE_USER
username = env.user
env.hosts = [
REMOTE_HOST,
]
project_folder = '/home/{}/{}'.format(env.user, PROJECT_NAME)
apt_requirements = [
'ufw',
'curl',
'git',
'python3-dev',
'python3-pip',
'build-essential',
'python3-setuptools',
'apache2',
'libapache2-mod-wsgi-py3',
'libssl-dev',
'libxml2-dev',
'libjpeg8-dev',
'zlib1g-dev',
]
def new_server():
setup()
deploy()
def setup():
_register_ssh_key()
_get_latest_apt()
_install_apt_requirements(apt_requirements)
_make_virtualenv()
def deploy():
_get_latest_source()
_update_settings()
_update_virtualenv()
_update_static_files()
_update_database()
_make_virtualhost()
_grant_apache2()
_grant_sqlite3()
_restart_apache2()
def create_superuser():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py createsuperuser' % (
project_folder, virtualenv_folder
))
def _register_ssh_key():
local("ssh-keyscan -H {} >> {}".format(REMOTE_HOST, os.path.expanduser('~/.ssh/known_hosts')))
def _get_latest_apt():
update_or_not = input('Would U install Apache2/Python3 ?\n'
'[y/n, default: y]: ')
if update_or_not != 'n':
sudo('sudo apt-get update && sudo apt-get -y upgrade')
def _install_apt_requirements(apt_requirements):
reqs = ''
for req in apt_requirements:
reqs += (' ' + req)
sudo('sudo apt-get -y install {}'.format(reqs))
def _make_virtualenv():
if not exists('~/.virtualenvs'):
script = '''"# python virtualenv settings
export WORKON_HOME=~/.virtualenvs
export VIRTUALENVWRAPPER_PYTHON="$(command \which python3)" # location of python3
source /usr/local/bin/virtualenvwrapper.sh"'''
run('mkdir ~/.virtualenvs')
sudo('sudo pip3 install virtualenv virtualenvwrapper')
run('echo {} >> ~/.bashrc'.format(script))
def _get_latest_source():
if exists(project_folder + '/.git'):
run('cd %s && git fetch' % (project_folder,))
else:
run('git clone %s %s' % (REPO_URL, project_folder))
current_commit = local("git log -n 1 --format=%H", capture=True)
run('cd %s && git reset --hard %s' % (project_folder, current_commit))
def _update_settings():
settings_path = project_folder + '/{}/settings.py'.format(PROJECT_NAME)
sed(settings_path, "DEBUG = True", "DEBUG = False")
sed(settings_path,
'ALLOWED_HOSTS = .+$',
'ALLOWED_HOSTS = ["%s"]' % (REMOTE_HOST,)
)
secret_key_file = project_folder + '/{}/secret_key.py'.format(PROJECT_NAME)
if not exists(secret_key_file):
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
key = ''.join(random.SystemRandom().choice(chars) for _ in range(50))
append(secret_key_file, "SECRET_KEY = '%s'" % (key,))
append(settings_path, '\nfrom .secret_key import SECRET_KEY')
def _update_virtualenv():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
if not exists(virtualenv_folder + '/bin/pip'):
run('cd /home/%s/.virtualenvs && virtualenv %s' % (env.user, PROJECT_NAME))
run('%s/bin/pip install "django<2"' % (
virtualenv_folder
))
def _update_static_files():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py collectstatic --noinput' % (
project_folder, virtualenv_folder
))
def _update_database():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py migrate --noinput' % (
project_folder, virtualenv_folder
))
def _make_virtualhost():
script = """'<VirtualHost *:80>
ServerName {servername}
Alias /{static_url} /home/{username}/{project_name}/{static_root}
Alias /{media_url} /home/{username}/{project_name}/{media_url}
<Directory /home/{username}/{project_name}/{media_url}>
Require all granted
</Directory>
<Directory /home/{username}/{project_name}/{static_root}>
Require all granted
</Directory>
<Directory /home/{username}/{project_name}/{project_name}>
<Files wsgi.py>
Require all granted
</Files>
</Directory>
WSGIDaemonProcess {project_name} python-home=/home/{username}/.virtualenvs/{project_name} python-path=/home/{username}/{project_name}
WSGIProcessGroup {project_name}
WSGIScriptAlias / /home/{username}/{project_name}/{project_name}/wsgi.py
ErrorLog ${{APACHE_LOG_DIR}}/error.log
CustomLog ${{APACHE_LOG_DIR}}/access.log combined
</VirtualHost>'""".format(
static_root=STATIC_ROOT_NAME,
username=env.user,
project_name=PROJECT_NAME,
static_url=STATIC_URL_NAME,
servername=REMOTE_HOST,
media_url=MEDIA_ROOT
)
sudo('echo {} > /etc/apache2/sites-available/{}.conf'.format(script, PROJECT_NAME))
sudo('a2ensite {}.conf'.format(PROJECT_NAME))
def _grant_apache2():
sudo('sudo chown -R :www-data ~/{}'.format(PROJECT_NAME))
def _grant_sqlite3():
sudo('sudo chmod 775 ~/{}/db.sqlite3'.format(PROJECT_NAME))
def _restart_apache2():
sudo('sudo service apache2 restart')
|
[
"l3opold7@gmail.com"
] |
l3opold7@gmail.com
|
181dff928bcc29942a15804141960b142b348397
|
0183da495028d9d341641791355e62ba36d3f698
|
/PersonDetection/main.py
|
9b885d355bd53b10b03072db8adf438bb1ef2c69
|
[] |
no_license
|
Joshua1225/PersonSearchEngine
|
11bde400a39d555e00a363ca25b4b8721165e3fb
|
fdda6bcd6bfaf6cea736d19bb559ba94e359e41b
|
refs/heads/main
| 2023-03-07T04:58:18.604258
| 2020-12-12T18:03:15
| 2020-12-12T18:03:15
| 317,192,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 382
|
py
|
from Detector import PedestrianDetector, PedestrianDetectionResultDTO
import cv2
if __name__ == '__main__':
detector = PedestrianDetector('./weights/epoches_112.pth', cuda=True, cpu=False)
img = cv2.imread('./images/1.jpg')
pdrDTO = detector.detect(img)
img_list = pdrDTO.get_img_list()
for i, img in enumerate(img_list):
cv2.imwrite(f"{i}_.jpg", img)
|
[
"noreply@github.com"
] |
noreply@github.com
|
d5c4cac739d6c9ad1a641938dda9973c912c84c5
|
e944d288093c9234c3a6a2a76ffe4e3c9b236cf1
|
/annotation_utils/coco/structs/__init__.py
|
66e257d06be64002c0bce0580e1d58fd6c768ce7
|
[
"MIT"
] |
permissive
|
darwinharianto/annotation_utils
|
598b043345790580e99f34f159b9612b9b1bcd52
|
1cbdadaa28ff945e705dd7b806dda395e32ab23c
|
refs/heads/master
| 2022-04-27T01:20:10.738778
| 2020-04-27T09:23:57
| 2020-04-27T09:23:57
| 255,525,300
| 0
| 0
|
MIT
| 2020-04-27T09:23:59
| 2020-04-14T06:10:57
|
Python
|
UTF-8
|
Python
| false
| false
| 245
|
py
|
from .objects import COCO_Info, COCO_License, COCO_Image, \
COCO_Annotation, COCO_Category
from .handlers import COCO_License_Handler, COCO_Image_Handler, \
COCO_Annotation_Handler, COCO_Category_Handler
from .dataset import COCO_Dataset
|
[
"mork.clayton3@gmail.com"
] |
mork.clayton3@gmail.com
|
fc9b0c269aecdb44c4736fe6b9da03555f7de8e3
|
31622dd16963b459ac6eec71fcf54e4d243ac773
|
/edu_sharing_client/models/license.py
|
073b8ab7d8a99b38a95e9902e2a8e4a23e2cd02e
|
[] |
no_license
|
torsten-simon/oeh-search-etl
|
95e6e92698a97c98ef9d5b02076edcf993736d6f
|
eacdadcd8af169cb54629db0d2d46a5616f854a6
|
refs/heads/master
| 2023-04-16T05:08:41.194239
| 2020-11-16T09:51:59
| 2020-11-16T09:51:59
| 318,169,232
| 0
| 0
| null | 2023-04-03T23:04:46
| 2020-12-03T11:20:44
| null |
UTF-8
|
Python
| false
| false
| 3,484
|
py
|
# coding: utf-8
"""
edu-sharing Repository REST API
The public restful API of the edu-sharing repository. # noqa: E501
OpenAPI spec version: 1.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class License(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'icon': 'str',
'url': 'str'
}
attribute_map = {
'icon': 'icon',
'url': 'url'
}
def __init__(self, icon=None, url=None): # noqa: E501
"""License - a model defined in Swagger""" # noqa: E501
self._icon = None
self._url = None
self.discriminator = None
if icon is not None:
self.icon = icon
if url is not None:
self.url = url
@property
def icon(self):
"""Gets the icon of this License. # noqa: E501
:return: The icon of this License. # noqa: E501
:rtype: str
"""
return self._icon
@icon.setter
def icon(self, icon):
"""Sets the icon of this License.
:param icon: The icon of this License. # noqa: E501
:type: str
"""
self._icon = icon
@property
def url(self):
"""Gets the url of this License. # noqa: E501
:return: The url of this License. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this License.
:param url: The url of this License. # noqa: E501
:type: str
"""
self._url = url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(License, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, License):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"simon@edu-sharing.net"
] |
simon@edu-sharing.net
|
aad525f6d2bb1919a03e49948c4f34fe56ee5e66
|
8e6e3f7fc065548cb25825632c49d83964bf9f30
|
/Network/icmp_arp_nmap.py
|
d0ae2b9a0e1de85c750dab6d970b376a9474716c
|
[] |
no_license
|
raviwithu/Scripts
|
1d13c9f368ed9ab966fda434d022acd9f71d3f1d
|
cba34cdceee121ce696bc1b30faf19a1fc126eda
|
refs/heads/master
| 2021-01-22T02:13:18.252626
| 2018-05-03T01:53:16
| 2018-05-03T01:53:16
| 92,339,767
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 637
|
py
|
#!/usr/bin/env python
import logging
import subprocess
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
logging.getLogger("scapy.interactive").setLevel(logging.ERROR)
logging.getLogger("scapy.loading").setLevel(logging.ERROR)
from scapy.all import *
ans, unans = sr(IP(dst = "192.168.0.2-25") / ICMP(), timeout = 2, iface = "eth0", verbose = 0)
reachable = []
for reply in ans:
reachable.append(reply[1][IP].src)
for host in reachable:
send(ARP(hwsrc = get_if_hwaddr("eth0"), psrc = "192.168.0.1", hwdst = "ff:ff:ff:ff:ff:ff", pdst = host), iface = "eth0", verbose = 0)
print "\nDone!\n"
|
[
"miravishankar@yahoo.co.in"
] |
miravishankar@yahoo.co.in
|
d2edaeec8fdcd119849df0305b0cb817b3235ebe
|
8d9318a33afc2c3b5ca8ac99fce0d8544478c94a
|
/Books/Casandra DB/opscenter-5.1.0/lib/py/orbited/proxy.py
|
4c0e80c1f97cce4bb513bffb9be5583f06edd599
|
[] |
no_license
|
tushar239/git-large-repo
|
e30aa7b1894454bf00546312a3fb595f6dad0ed6
|
9ee51112596e5fc3a7ab2ea97a86ec6adc677162
|
refs/heads/master
| 2021-01-12T13:48:43.280111
| 2016-11-01T22:14:51
| 2016-11-01T22:14:51
| 69,609,373
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:ba1b92cda51cc0fbe137994a7b857befa45aa64d45009e4fb34ed1df78d0f3fc
size 5501
|
[
"tushar239@gmail.com"
] |
tushar239@gmail.com
|
10cefb112ffc8a72f2ddcd285ff5b6f871ecf497
|
41523dd4871e8ed1043d2b3ddf73417fcbdde209
|
/day16/map函数.py
|
7a19700236dcf557aafb01afb59951babcaa5d8d
|
[] |
no_license
|
WayneChen1994/Python1805
|
2aa1c611f8902b8373b8c9a4e06354c25f8826d6
|
a168cd3b7749afc326ec4326db413378fd3677d5
|
refs/heads/master
| 2020-03-30T23:19:00.773288
| 2018-11-02T10:47:40
| 2018-11-02T10:47:40
| 151,697,105
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 943
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author:Wayne
'''
map函数的功能:
将传入的函数依次作用于序列中的每一个对象,
然后将其作用的结果作为一个迭代器返回
'''
'''
需求:将列表中的["1", "2", "3", "4", "5"]
转为[1, 2, 3, 4, 5],写成一个函数。
'''
def func(alist):
return [int(x) for x in alist]
list1 = ["1", "2", "3", "4", "5"]
print(list1)
print(func(list1))
res = map(int, list1)
print(list(res))
'''
map(func,lsd)
参数一:要作用函数,【此函数有且只有一个参数】
参数二:要作用的序列
'''
'''
使用map函数,求n的序列[1, 4, 9, ..., n^2], 一行代码实现上述的要求,n从控制台输入。
'''
def func2(n):
return list(map(lambda x:x**2, range(1, n+1)))
num = int(input("请输入n的值:"))
print(func2(num))
print(list(map(lambda n:n*n, range(1, int(input("请输入一个整数:"))+1))))
|
[
"waynechen1994@163.com"
] |
waynechen1994@163.com
|
8f55ee77bb2e6f0f501c6aae41fe353d5946e7ed
|
48f092fd8191b0218df8605dc7125e526764e59e
|
/NestedLoops/venv/Scripts/pip-script.py
|
1f860a6a2d99a98a14ef6f35a31d2812b31131f3
|
[] |
no_license
|
LalityaSawant/Python-Projects
|
2edb430c094fe3d6b4e706cc61f885aa07e24dff
|
b142708256e26867f09b3063f5f3fffa305ec496
|
refs/heads/master
| 2020-05-01T03:00:26.012301
| 2019-03-23T22:09:33
| 2019-03-23T22:09:33
| 177,235,109
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 443
|
py
|
#!C:\Users\lsawant\Documents\Learning\Python\PycharmProjects\NestedLoops\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
|
[
"lalitya.sawant@gmail.com"
] |
lalitya.sawant@gmail.com
|
6346c55ed5ae3d16b8b3bcf214eb2f9ab0feed02
|
beb5c6eda82d6ed5d1999f74c115c30f9440feb0
|
/setup.py
|
269b77f96dc7dc8b0749146096b1b6eb644bf458
|
[
"Apache-2.0"
] |
permissive
|
fpgaco/hyperscalar
|
5aadcb4fd5e47439b00169f65cfdfce3d02563ed
|
44edb9005ed0117ecf14bfd161741ecf23c41279
|
refs/heads/master
| 2020-07-30T11:14:28.785400
| 2019-09-22T21:08:43
| 2019-09-22T21:08:43
| 210,209,838
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 909
|
py
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE.txt') as f:
license = f.read()
setup(
name='hyperscalar',
version='0.0.1',
description='Next generation type',
keywords=['hyperscalar'],
url='https://github.com/fpgaco/hyperscalar',
license=license,
long_description=readme,
author='Kenso Trabing',
author_email='ktrabing@acm.org',
maintainer='Kenso Trabing',
maintainer_email='ktrabing@acm.org',
packages=find_packages(exclude=('tests','docs','venv')),
classifiers=[
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)'
]
)
|
[
"ktrabing@acm.org"
] |
ktrabing@acm.org
|
a611fe6a9f40e4b853cdbfceb4e2125ec01b9c3d
|
550cfce0e688ffbba8290ece0baf5751d20016ea
|
/exts.py
|
7b042d99f614f1a09c08a6b6987c0f83177c9d1a
|
[] |
no_license
|
thunderwin/firstweb
|
0c32169ea1b2081cbe6b5019ba9cdb3993c93bd2
|
53f509a7044bd3174980e6f4f74a813e5b2d9f44
|
refs/heads/master
| 2021-07-21T02:22:00.243247
| 2017-10-31T11:28:42
| 2017-10-31T11:28:42
| 108,497,735
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 76
|
py
|
#encoding: utf-8
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
|
[
"nathan@MacBook-Pro.local"
] |
nathan@MacBook-Pro.local
|
27be4e80e15ee4738b6c1266e451404d6934047c
|
0445b05dc0c1a6b92cd4dde04fafd8acddc2365c
|
/libs/nrgreader.py
|
7a2baf3346816a09ee00824d20953e2a6634a4ae
|
[] |
no_license
|
ananchev/energymeter
|
a7842f7c31173aa332460c51b972ecf9321f0bb5
|
133e4ba75a14adb8d45ae5d5500850ee9c8fefb5
|
refs/heads/main
| 2023-08-21T10:59:06.721270
| 2021-10-17T10:31:52
| 2021-10-17T10:31:52
| 305,511,561
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,976
|
py
|
# Generic imports
import json # used when storing nrg readings into local json
from datetime import datetime # used to capture time when meter readings are stored
import os # used for file operations
# Establish logging
from libs.logger import logger, LOG_FILE
logger = logger.getChild('nrgreader')
# Pymodbus object
from pymodbus.client.sync import ModbusTcpClient
from pymodbus.payload import BinaryPayloadDecoder
from pymodbus.constants import Endian
# Client to work with influx database
from influxdb import InfluxDBClient
# IP and port of the Modbus TCP gateway
MODBUS_GW = dict(host='192.168.2.222', port=502)
# Energy meters
METERS = [
dict(meter_id=10,
influx_measure_base_name='line0'),
dict(meter_id=11,
influx_measure_base_name='line1'),
dict(meter_id=2,
influx_measure_base_name='line2'),
dict(meter_id=3,
influx_measure_base_name='line3'),
dict(meter_id=4,
influx_measure_base_name='line4'),
dict(meter_id=5,
influx_measure_base_name='line5')
]
# filename of the json where the readings from previous run are stored
READINGS_CACHE = 'readings_cache.json'
# details of the influx database to store timeseries data into
INLUX_DB = dict(host='192.168.2.8', port=8086, username='ananchev', password='1Race96R', database='openhab')
class Reader():
def __init__(self, interval="Manual"):
logger.info(f"Initialising energy reader with interval '{interval}'...")
self.interval = interval
self.prev_readings = {}
self.readings_cache = self.init_readings_cache()
self.modbus_client = ModbusTcpClient(**MODBUS_GW)
self.publish_to_influx_lst = []
def init_readings_cache(self) -> dict:
readtime = datetime.now()
readtime_epoch = readtime.timestamp()
readtime_str = readtime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
# does a local cache dict exist?
if not os.path.exists(READINGS_CACHE):
logger.info("Cached results from earlier readings not found. Creating a new file now...")
new_dict = {self.interval:dict(readtime_epoch=readtime_epoch,
readtime_str=readtime_str,
readings=list()
)
}
return new_dict
with open(READINGS_CACHE, 'r', encoding='utf-8') as f:
exisiting_dict = json.load(f)
if self.interval in exisiting_dict: # local cache exists and interval within it exists
logger.info(f"Cached results from earlier readings found. Copying interval '{self.interval}' from it.")
self.prev_readings = {key: value for key, value in exisiting_dict.items() if key in self.interval}
# with open('prev_readings.json', 'w', encoding='utf-8') as f:
# json.dump(self.prev_readings, f, ensure_ascii=False, indent=4)
exisiting_dict.update({self.interval:{'readtime_epoch':readtime_epoch, 'readtime_str':readtime_str, 'readings':list()}})
else: # local cache existis, but this is the first time we add the current read interval in it
logger.info(f"Cached results from earlier readings found, but interval '{self.interval}' does not exist and will be added.")
exisiting_dict[self.interval] = dict(readtime_epoch=readtime_epoch,
readtime_str=readtime_str,
readings=list()
)
return exisiting_dict
def execute(self):
self.read_current()
if self.calculate_consumed_energy():
self.write_to_influx()
def read_current(self):
self.connect_modbus()
for m in METERS:
# store the current readings for total energy value
current_energy_reading = self.total_energy_now(m)
self.readings_cache[self.interval]['readings'].append(current_energy_reading)
self.publish_to_influx_lst.append({"measurement":current_energy_reading["measurement_total"],
"time":self.readings_cache[self.interval]["readtime_str"],
"fields":dict(item=current_energy_reading["measurement_total"],
value=current_energy_reading["value_total"])})
self.modbus_client.close()
with open(READINGS_CACHE, 'w', encoding='utf-8') as f:
json.dump(self.readings_cache, f, ensure_ascii=False, indent=4)
def connect_modbus(self, retries = 0):
connection = self.modbus_client.connect()
if not connection:
if (retries < 3):
time.sleep(1)
self._connect(self, retries+1)
else:
raise Exception('cannot establish connection to gateway')
logger.info('connected to Modbus gateway')
def total_energy_now(self, meter):
meter_id = meter.get('meter_id')
result = self.read_modbus_registers(meter_id)
decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=Endian.Big)
energy_kwh = decoder.decode_32bit_uint() / 100
influx_measure = meter.get('influx_measure_base_name')
logger.info(f"{influx_measure}total = {energy_kwh} kWh")
return dict(meter_id=meter_id, measurement_total=influx_measure+"Total", value_total = energy_kwh)
def calculate_consumed_energy(self):
# calculate consumed energy only if previous reading cache for the trigger period is found
if not self.prev_readings: #empty dict evaluates to false
logger.info(f"No previous readings exist for trigger interval '{self.interval}'. Consumed energy will be calculated on next trigger.")
return False
for m in METERS:
meter_id = m.get('meter_id')
meter_prev_reading = next(i for i in self.prev_readings[self.interval]['readings'] if i['meter_id'] == meter_id)
meter_current_reading = next(j for j in self.readings_cache[self.interval]['readings'] if j['meter_id'] == meter_id)
# {"meter_id": 10, "measurement_total": "line0-total", "value_total": 0.95}
consumed = round(meter_current_reading['value_total'] - meter_prev_reading['value_total'],2)
logger.info(f"Consumed energy on meter '{meter_id}' for the last '{self.interval}' period is '{consumed}' kWh")
measure_base_name = m.get('influx_measure_base_name')
self.publish_to_influx_lst.append({"measurement":measure_base_name+"Last" + self.interval,
"time":self.readings_cache[self.interval]["readtime_str"],
"fields":dict(item=measure_base_name+"Last" + self.interval,
value=consumed)})
# with open("to_pub_to_influx.json", 'w', encoding='utf-8') as f:
# json.dump(self.publish_to_influx_lst, f, ensure_ascii=False, indent=4)
return True
def write_to_influx(self):
logger.info("Publishing total and interval results into influx db...")
client = InfluxDBClient(**INLUX_DB)
client.write_points(self.publish_to_influx_lst)
logger.info("Done!")
def read_modbus_registers(self, meter_id):
result = self.modbus_client.read_holding_registers(address=0,count=2,unit=meter_id)
if result.isError(): # retry in case of ModbusIOException due to connection issues.
logger.warning("invalid result, retrying the read operation...")
result = self.read_modbus_registers(meter_id)
return result
|
[
"ananchev@gmail.com"
] |
ananchev@gmail.com
|
0289b4bcf761b49c33907f4f98a3ded9f257d4fa
|
429a8441bb9730dcf0e33fedcb5f3672a731b3e7
|
/xero_python/accounting/models/tax_rate.py
|
3105e7e85477aa221bd8d79e66b609249374e58e
|
[
"MIT"
] |
permissive
|
gregsteelxinja/xero-python
|
1a26ec3b05ea156dd6848f2ec313c72e9f39b0e2
|
d0473ba91099de3464b3dffa377df5a11ad95afc
|
refs/heads/master
| 2022-12-16T10:54:11.424971
| 2020-09-01T01:00:23
| 2020-09-01T01:00:23
| 291,526,551
| 0
| 0
| null | 2020-08-30T18:16:48
| 2020-08-30T18:16:48
| null |
UTF-8
|
Python
| false
| false
| 14,802
|
py
|
# coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.2.14
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class TaxRate(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"name": "str",
"tax_type": "str",
"tax_components": "list[TaxComponent]",
"status": "str",
"report_tax_type": "str",
"can_apply_to_assets": "bool",
"can_apply_to_equity": "bool",
"can_apply_to_expenses": "bool",
"can_apply_to_liabilities": "bool",
"can_apply_to_revenue": "bool",
"display_tax_rate": "float",
"effective_rate": "float",
}
attribute_map = {
"name": "Name",
"tax_type": "TaxType",
"tax_components": "TaxComponents",
"status": "Status",
"report_tax_type": "ReportTaxType",
"can_apply_to_assets": "CanApplyToAssets",
"can_apply_to_equity": "CanApplyToEquity",
"can_apply_to_expenses": "CanApplyToExpenses",
"can_apply_to_liabilities": "CanApplyToLiabilities",
"can_apply_to_revenue": "CanApplyToRevenue",
"display_tax_rate": "DisplayTaxRate",
"effective_rate": "EffectiveRate",
}
def __init__(
self,
name=None,
tax_type=None,
tax_components=None,
status=None,
report_tax_type=None,
can_apply_to_assets=None,
can_apply_to_equity=None,
can_apply_to_expenses=None,
can_apply_to_liabilities=None,
can_apply_to_revenue=None,
display_tax_rate=None,
effective_rate=None,
): # noqa: E501
"""TaxRate - a model defined in OpenAPI""" # noqa: E501
self._name = None
self._tax_type = None
self._tax_components = None
self._status = None
self._report_tax_type = None
self._can_apply_to_assets = None
self._can_apply_to_equity = None
self._can_apply_to_expenses = None
self._can_apply_to_liabilities = None
self._can_apply_to_revenue = None
self._display_tax_rate = None
self._effective_rate = None
self.discriminator = None
if name is not None:
self.name = name
if tax_type is not None:
self.tax_type = tax_type
if tax_components is not None:
self.tax_components = tax_components
if status is not None:
self.status = status
if report_tax_type is not None:
self.report_tax_type = report_tax_type
if can_apply_to_assets is not None:
self.can_apply_to_assets = can_apply_to_assets
if can_apply_to_equity is not None:
self.can_apply_to_equity = can_apply_to_equity
if can_apply_to_expenses is not None:
self.can_apply_to_expenses = can_apply_to_expenses
if can_apply_to_liabilities is not None:
self.can_apply_to_liabilities = can_apply_to_liabilities
if can_apply_to_revenue is not None:
self.can_apply_to_revenue = can_apply_to_revenue
if display_tax_rate is not None:
self.display_tax_rate = display_tax_rate
if effective_rate is not None:
self.effective_rate = effective_rate
@property
def name(self):
"""Gets the name of this TaxRate. # noqa: E501
Name of tax rate # noqa: E501
:return: The name of this TaxRate. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this TaxRate.
Name of tax rate # noqa: E501
:param name: The name of this TaxRate. # noqa: E501
:type: str
"""
self._name = name
@property
def tax_type(self):
"""Gets the tax_type of this TaxRate. # noqa: E501
The tax type # noqa: E501
:return: The tax_type of this TaxRate. # noqa: E501
:rtype: str
"""
return self._tax_type
@tax_type.setter
def tax_type(self, tax_type):
"""Sets the tax_type of this TaxRate.
The tax type # noqa: E501
:param tax_type: The tax_type of this TaxRate. # noqa: E501
:type: str
"""
self._tax_type = tax_type
@property
def tax_components(self):
"""Gets the tax_components of this TaxRate. # noqa: E501
See TaxComponents # noqa: E501
:return: The tax_components of this TaxRate. # noqa: E501
:rtype: list[TaxComponent]
"""
return self._tax_components
@tax_components.setter
def tax_components(self, tax_components):
"""Sets the tax_components of this TaxRate.
See TaxComponents # noqa: E501
:param tax_components: The tax_components of this TaxRate. # noqa: E501
:type: list[TaxComponent]
"""
self._tax_components = tax_components
@property
def status(self):
"""Gets the status of this TaxRate. # noqa: E501
See Status Codes # noqa: E501
:return: The status of this TaxRate. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this TaxRate.
See Status Codes # noqa: E501
:param status: The status of this TaxRate. # noqa: E501
:type: str
"""
allowed_values = [
"ACTIVE",
"DELETED",
"ARCHIVED",
"PENDING",
"None",
] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format( # noqa: E501
status, allowed_values
)
)
self._status = status
@property
def report_tax_type(self):
"""Gets the report_tax_type of this TaxRate. # noqa: E501
See ReportTaxTypes # noqa: E501
:return: The report_tax_type of this TaxRate. # noqa: E501
:rtype: str
"""
return self._report_tax_type
@report_tax_type.setter
def report_tax_type(self, report_tax_type):
"""Sets the report_tax_type of this TaxRate.
See ReportTaxTypes # noqa: E501
:param report_tax_type: The report_tax_type of this TaxRate. # noqa: E501
:type: str
"""
allowed_values = [
"AVALARA",
"BASEXCLUDED",
"CAPITALSALESOUTPUT",
"CAPITALEXPENSESINPUT",
"ECOUTPUT",
"ECOUTPUTSERVICES",
"ECINPUT",
"ECACQUISITIONS",
"EXEMPTEXPENSES",
"EXEMPTINPUT",
"EXEMPTOUTPUT",
"GSTONIMPORTS",
"INPUT",
"INPUTTAXED",
"MOSSSALES",
"NONE",
"NONEOUTPUT",
"OUTPUT",
"PURCHASESINPUT",
"SALESOUTPUT",
"EXEMPTCAPITAL",
"EXEMPTEXPORT",
"CAPITALEXINPUT",
"GSTONCAPIMPORTS",
"GSTONCAPITALIMPORTS",
"REVERSECHARGES",
"PAYMENTS",
"INVOICE",
"CASH",
"ACCRUAL",
"FLATRATECASH",
"FLATRATEACCRUAL",
"ACCRUALS",
"TXCA",
"SRCAS",
"DSOUTPUT",
"BLINPUT2",
"EPINPUT",
"IMINPUT2",
"MEINPUT",
"IGDSINPUT2",
"ESN33OUTPUT",
"OPINPUT",
"OSOUTPUT",
"TXN33INPUT",
"TXESSINPUT",
"TXREINPUT",
"TXPETINPUT",
"NRINPUT",
"ES33OUTPUT",
"ZERORATEDINPUT",
"ZERORATEDOUTPUT",
"DRCHARGESUPPLY",
"DRCHARGE",
"CAPINPUT",
"CAPIMPORTS",
"IMINPUT",
"INPUT2",
"CIUINPUT",
"SRINPUT",
"OUTPUT2",
"SROUTPUT",
"CAPOUTPUT",
"SROUTPUT2",
"CIUOUTPUT",
"ZROUTPUT",
"ZREXPORT",
"ACC28PLUS",
"ACCUPTO28",
"OTHEROUTPUT",
"SHOUTPUT",
"ZRINPUT",
"BADDEBT",
"OTHERINPUT",
"None",
] # noqa: E501
if report_tax_type not in allowed_values:
raise ValueError(
"Invalid value for `report_tax_type` ({0}), must be one of {1}".format( # noqa: E501
report_tax_type, allowed_values
)
)
self._report_tax_type = report_tax_type
@property
def can_apply_to_assets(self):
"""Gets the can_apply_to_assets of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for asset accounts i.e. true,false # noqa: E501
:return: The can_apply_to_assets of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_assets
@can_apply_to_assets.setter
def can_apply_to_assets(self, can_apply_to_assets):
"""Sets the can_apply_to_assets of this TaxRate.
Boolean to describe if tax rate can be used for asset accounts i.e. true,false # noqa: E501
:param can_apply_to_assets: The can_apply_to_assets of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_assets = can_apply_to_assets
@property
def can_apply_to_equity(self):
"""Gets the can_apply_to_equity of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for equity accounts i.e true,false # noqa: E501
:return: The can_apply_to_equity of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_equity
@can_apply_to_equity.setter
def can_apply_to_equity(self, can_apply_to_equity):
"""Sets the can_apply_to_equity of this TaxRate.
Boolean to describe if tax rate can be used for equity accounts i.e true,false # noqa: E501
:param can_apply_to_equity: The can_apply_to_equity of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_equity = can_apply_to_equity
@property
def can_apply_to_expenses(self):
"""Gets the can_apply_to_expenses of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for expense accounts i.e. true,false # noqa: E501
:return: The can_apply_to_expenses of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_expenses
@can_apply_to_expenses.setter
def can_apply_to_expenses(self, can_apply_to_expenses):
"""Sets the can_apply_to_expenses of this TaxRate.
Boolean to describe if tax rate can be used for expense accounts i.e. true,false # noqa: E501
:param can_apply_to_expenses: The can_apply_to_expenses of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_expenses = can_apply_to_expenses
@property
def can_apply_to_liabilities(self):
"""Gets the can_apply_to_liabilities of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for liability accounts i.e. true,false # noqa: E501
:return: The can_apply_to_liabilities of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_liabilities
@can_apply_to_liabilities.setter
def can_apply_to_liabilities(self, can_apply_to_liabilities):
"""Sets the can_apply_to_liabilities of this TaxRate.
Boolean to describe if tax rate can be used for liability accounts i.e. true,false # noqa: E501
:param can_apply_to_liabilities: The can_apply_to_liabilities of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_liabilities = can_apply_to_liabilities
@property
def can_apply_to_revenue(self):
"""Gets the can_apply_to_revenue of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for revenue accounts i.e. true,false # noqa: E501
:return: The can_apply_to_revenue of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_revenue
@can_apply_to_revenue.setter
def can_apply_to_revenue(self, can_apply_to_revenue):
"""Sets the can_apply_to_revenue of this TaxRate.
Boolean to describe if tax rate can be used for revenue accounts i.e. true,false # noqa: E501
:param can_apply_to_revenue: The can_apply_to_revenue of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_revenue = can_apply_to_revenue
@property
def display_tax_rate(self):
"""Gets the display_tax_rate of this TaxRate. # noqa: E501
Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:return: The display_tax_rate of this TaxRate. # noqa: E501
:rtype: float
"""
return self._display_tax_rate
@display_tax_rate.setter
def display_tax_rate(self, display_tax_rate):
"""Sets the display_tax_rate of this TaxRate.
Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:param display_tax_rate: The display_tax_rate of this TaxRate. # noqa: E501
:type: float
"""
self._display_tax_rate = display_tax_rate
@property
def effective_rate(self):
"""Gets the effective_rate of this TaxRate. # noqa: E501
Effective Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:return: The effective_rate of this TaxRate. # noqa: E501
:rtype: float
"""
return self._effective_rate
@effective_rate.setter
def effective_rate(self, effective_rate):
"""Sets the effective_rate of this TaxRate.
Effective Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:param effective_rate: The effective_rate of this TaxRate. # noqa: E501
:type: float
"""
self._effective_rate = effective_rate
|
[
"sid.maestre@gmail.com"
] |
sid.maestre@gmail.com
|
7c1091c1be6fe52aee062e9522b51fcd54fa5f0e
|
526019f16e76a4afd84d25ea90fc39ffda285e34
|
/hw/HW03/code/HW03_utils.py
|
acdc2809d4a0fad12464afff2a0c335605d22bed
|
[] |
no_license
|
mitchnegus/CS289_mnegus
|
ac6a3cdd633bf5fda0fefbdf75c014eeaddb0f48
|
6111a029818762ca3fac6e6097d2a2c6a610aaf3
|
refs/heads/master
| 2021-09-05T00:31:52.637848
| 2018-01-23T04:55:38
| 2018-01-23T04:55:38
| 80,948,313
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 418
|
py
|
#HW03_utils.py
#-----------------------------------------
# Python module for CS289A HW03
#-----------------------------------------
#-----------------------------------------
import math
import numpy as np
from scipy import io as spio
def loaddata(shortpath,_DATA_DIR,dictkey):
#Load data
data_dict = spio.loadmat(_DATA_DIR+"/"+shortpath)
data = np.array(data_dict[dictkey])
return data
|
[
"mitchell.negus.57@gmail.com"
] |
mitchell.negus.57@gmail.com
|
816e4e22dde6bfeb7a7f9ef1ae675c0be4bd67a6
|
ed291071decb3514b7f9f321e68fd57fb3c11ebc
|
/Python/168_excel-sheet-column-title.py
|
573dae55e7016bdb1c10939d8dbf0ccfb6ce97db
|
[] |
no_license
|
antonylu/leetcode2
|
d7b1681cc9477bb01619be26461634edbb85a4e5
|
a57282895fb213b68e5d81db301903721a92d80f
|
refs/heads/master
| 2021-11-25T01:30:56.358849
| 2021-11-19T08:32:12
| 2021-11-19T08:32:12
| 130,139,831
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,549
|
py
|
"""
https://leetcode.com/problems/excel-sheet-column-title/description/
Given a positive integer, return its corresponding column title as appear in an Excel sheet.
For example:
1 -> A
2 -> B
3 -> C
...
26 -> Z
27 -> AA
28 -> AB
...
Example 1:
Input: 1
Output: "A"
Example 2:
Input: 28
Output: "AB"
Example 3:
Input: 701
Output: "ZY"
"""
class Solution(object):
def convertToTitle(self, n):
"""
:type n: int
:rtype: str
"""
# Approach #1, 26 進位, 3位
#
"""
A 1 AA 26+ 1 BA 2×26+ 1 ... ZA 26×26+ 1 AAA 1×26²+1×26+ 1
B 2 AB 26+ 2 BB 2×26+ 2 ... ZB 26×26+ 2 AAB 1×26²+1×26+ 2
. . .. ..... .. ....... ... .. ........ ... .............
. . .. ..... .. ....... ... .. ........ ... .............
. . .. ..... .. ....... ... .. ........ ... .............
Z 26 AZ 26+26 BZ 2×26+26 ... ZZ 26×26+26 AAZ 1×26²+1×26+26
"""
# 55%,32ms
number2char = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
r = ""
while n > 26:
(n,b) = divmod(n-1,26)
r = number2char[b] + r
r = number2char[n-1] +r
return r
if __name__ == "__main__":
s=Solution()
tc = [1,28,701,702]
#tc = [1]
for t in tc:
print(s.convertToTitle(t))
#s.convertToTitle(t)
|
[
"antony_lu@compal.com"
] |
antony_lu@compal.com
|
9b4fcb23bad9aefa541e7185a3d4e8f29c97b4b5
|
d0e2ef0fb0d9a0c3c9d5656273e8eae6787faee7
|
/app.py
|
0dc0cdd8295a03c72a0c122a7be8a0403b5210f0
|
[] |
no_license
|
HemendraTripathi/demo2
|
fe1ba499c914f5077f05f92b1b2aff2dd4e330a4
|
f5ec3be456ecad37017052795c2fe72b78eab4cf
|
refs/heads/master
| 2022-12-21T13:18:04.327276
| 2020-09-23T04:59:31
| 2020-09-23T04:59:31
| 297,856,545
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,838
|
py
|
#Shree Ganesh#
from bottle import (run, post, response, request as bottle_request)
import os,sqlite3,requests,json,sys,datetime
from xlsxwriter.workbook import Workbook
from ver import verify_input
from time import sleep
from get_user import get_user
from get_message import get_message
from send_message import send_message
from save_to_excel import save_to_excel
flag, TOKEN, API_TOKEN = verify_input(sys.argv)
if not flag:
sys.exit(3)
current_month_text = datetime.datetime.now().strftime('%B')
current_year_text = datetime.datetime.now().strftime('%Y')
sheet_name = current_month_text +"_"+current_year_text+".xlsx"
url = f'https://api.telegram.org/bot{TOKEN}/'
webhook_url = input("Enter Webhook URL : ")
try:
web = requests.get(url+'deletewebhook')
print("\nPrevious WebHook Deleted.")
except requests.exceptions.ConnectionError:
print("Please Check Your Internet Connection! WEBHOOK")
exit(6)
try:
web = requests.get(url+'setWebHook?url='+webhook_url)
print("\nWebhook Setted....\n")
except requests.exceptions.ConnectionError:
print("Please Check Your Internet Connection! WEBHOOK2")
exit(5)
cwd = os.getcwd()
path = os.path.join(cwd,'delivery_data')
print(path)
try:
os.mkdir(path)
print(path)
except FileExistsError:
pass
sheet_name = os.path.join(path,sheet_name)
print(sheet_name)
@post('/')
def main():
data = bottle_request.json
get_user(TOKEN)
get_message(TOKEN,API_TOKEN,data)
send_message(TOKEN)
save_to_excel(sheet_name)
if sys.platform == 'win32':
os.system('cls')
elif sys.platform == 'linux':
os.system('clear')
print("""
Refreshing ............
) (
( ) )
) ( (
_______)_
.-'---------|
( C|/\/\/\/\/|
'-./\/\/\/\/|
'_________'
'-------'
""")
return response
if __name__ == '__main__':
run(host = 'localhost', port = 8080, debug = True)
|
[
"noreply@github.com"
] |
noreply@github.com
|
a06d0667553909c1cab686643fbcb795d207c11e
|
648ed90628cd26d8a3d3941793100a8d5c309be8
|
/my_blog/settings.py
|
09929503a0adf0075496e708e183ea88b73a3ca4
|
[
"MIT"
] |
permissive
|
Chancj/my_blog
|
03200f62cf826558e2a074cb0f0eb578bbe2644c
|
988831904d2bd800afa332a1b4113143e1aaeccd
|
refs/heads/master
| 2022-12-29T11:23:24.973262
| 2020-04-29T16:19:24
| 2020-04-29T16:19:24
| 251,649,792
| 0
| 0
| null | 2022-12-08T03:56:34
| 2020-03-31T15:39:55
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 7,717
|
py
|
"""
Django settings for my_blog project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!z027!4o-s@jutnc=+iyvl$=u*9gooigul4(t+k373wv&1r169'
# SECURITY WARNING: don't run with debug turned on in production!
# 部署到线上时为 False; 读者在本地调试时请修改为 True
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
# 可添加需要的第三方登录
'allauth.socialaccount.providers.github',
'allauth.socialaccount.providers.weibo',
'password_reset',
'taggit',
'ckeditor',
'mptt',
'notifications',
# 自定义的app
'article',
'userprofile',
'comment',
'notice',
'article.templatetags',
# ml机器学习的app
'titanic',
'iris',
'cat_dog',
'face',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'my_blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# 定义模板位置
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'my_blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'djblog',
'USER': 'root',
'PASSWORD': '123456',
'HOST': 'localhost',
'PORT': 3306,
}
}
# django认证系统使用的模型类
# AUTH_USER_MODEL='article.User'
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
# 静态文件地址
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
# 静态文件收集目录
STATIC_ROOT = os.path.join(BASE_DIR, 'collected_static')
# 媒体文件地址
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
# SMTP服务器,改为你的邮箱的smtp!
EMAIL_HOST = 'smtp.qq.com'
# 改为你自己的邮箱名!
EMAIL_HOST_USER = 'your_email_account@xxx.com'
# 你的邮箱密码
EMAIL_HOST_PASSWORD = 'your_password'
# 发送邮件的端口
EMAIL_PORT = 25 # 不通,465或587端口
# 是否使用 TLS
EMAIL_USE_TLS = True
# 默认的发件人
DEFAULT_FROM_EMAIL = 'xxx的博客 <your_email_account@xxx.com>'
CKEDITOR_CONFIGS = {
# django-ckeditor默认使用default配置
'default': {
# 编辑器宽度自适应
'width':'auto',
'height':'250px',
# tab键转换空格数
'tabSpaces': 4,
# 工具栏风格
'toolbar': 'Custom',
# 工具栏按钮
'toolbar_Custom': [
# 表情 代码块
['Smiley', 'CodeSnippet'],
# 字体风格
['Bold', 'Italic', 'Underline', 'RemoveFormat', 'Blockquote'],
# 字体颜色
['TextColor', 'BGColor'],
# 链接
['Link', 'Unlink'],
# 列表
['NumberedList', 'BulletedList'],
# 最大化
['Maximize']
],
# 插件
'extraPlugins': ','.join(['codesnippet', 'prism', 'widget', 'lineutils']),
}
}
AUTHENTICATION_BACKENDS = (
# 此项使 Django 后台可独立于 allauth 登录
'django.contrib.auth.backends.ModelBackend',
# 配置 allauth 独有的认证方法,如 email 登录
'allauth.account.auth_backends.AuthenticationBackend',
)
# 设置站点
SITE_ID = 1
# 登录成功后重定向地址
# 重定向 url
LOGIN_REDIRECT_URL = '/'
# LOGGING = {
# 'version': 1,
# 'handlers': {
# 'file': {
# 'level': 'INFO',
# 'class': 'logging.FileHandler',
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
# },
# },
# 'loggers': {
# 'django': {
# 'handlers': ['file'],
# 'level': 'INFO',
# },
# },
# }
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'formatters': {
# 'verbose': {
# 'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}',
# 'style': '{',
# },
# 'simple': {
# 'format': '{levelname} {message}',
# 'style': '{',
# },
# },
# 'filters': {
# 'require_debug_true': {
# '()': 'django.utils.log.RequireDebugTrue',
# },
# },
# 'handlers': {
# 'console': {
# 'level': 'INFO',
# 'filters': ['require_debug_true'],
# 'class': 'logging.StreamHandler',
# 'formatter': 'simple'
# },
# 'mail_admins': {
# 'level': 'ERROR',
# 'class': 'django.utils.log.AdminEmailHandler',
# 'formatter': 'verbose',
# },
# 'file': {
# 'level': 'WARNING',
# # 'class': 'logging.FileHandler',
# 'class': 'logging.handlers.TimedRotatingFileHandler',
# 'when': 'midnight',
# 'backupCount': 30,
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
# 'formatter': 'verbose',
# },
# },
# 'loggers': {
# 'django': {
# 'handlers': ['console'],
# 'propagate': True,
# },
# 'django.request': {
# 'handlers': ['file', 'mail_admins'],
# 'level': 'WARNING',
# 'propagate': False,
# },
# }
# }
|
[
"1049669825@qq.com"
] |
1049669825@qq.com
|
f6d477f59ee4c2505b13d5f5b674da4640d5ea4f
|
eb9f05bee11d64f299b3d7c47a2812958a718e6e
|
/3_neural-network/3_5/2.py
|
368db922d7f6ea1cccc22e5e6c07b102926219e9
|
[] |
no_license
|
araki-ka/DeepLearning
|
c85730f5094bd8c5e9184aeb39074fc4dbbdbfc3
|
8b359e89f5f266f80e82b571cdd9f6491cbaf331
|
refs/heads/main
| 2022-08-21T17:27:42.861542
| 2017-10-11T05:09:59
| 2017-10-11T05:09:59
| 96,615,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 264
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# 3.5.2 ソフトマックス関数の実装上の注意
import numpy as np
a = np.array([1010, 1000, 990])
print(np.exp(a) / np.sum(np.exp(a)))
c = np.max(a)
print(a - c)
print(np.exp(a - c) / np.sum(np.exp(a - c)))
|
[
"araki-ka@legendapl.com"
] |
araki-ka@legendapl.com
|
dc188b288e92680cb9d0fa42245dc94996074db1
|
d3178cc89ce03eb68e6b5dd7b4d99cb36d68a6a4
|
/nets/vgg/__init__.py
|
24afea7e7bb181f07e703fcbdb3fb4cbda1ef8c7
|
[
"Unlicense"
] |
permissive
|
jumpsnack/SOA_DORN_TF
|
829debc37584d6140396fcfa58ced1612fb3c0d7
|
33814467e9135036abf28f2da19c5984c8744089
|
refs/heads/master
| 2021-11-27T14:10:44.364498
| 2019-01-29T07:15:15
| 2019-01-29T07:15:15
| 161,754,223
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19
|
py
|
from ._vgg import *
|
[
"kir0302@gmail.com"
] |
kir0302@gmail.com
|
9c1ce6e5363f9d645e9a14dc86c5acf6f135c7dc
|
1dc0f0d007c060c74de33bddd4cdcd26ecdf05e0
|
/stancode_Projects/boggle_game_solver/boggle.py
|
505e63ff243f47684b3699ff04cd8ee5cc7b9451
|
[
"MIT"
] |
permissive
|
hoholarry/sc-projects
|
178e0f93f6dc6b6cda4be17403fa74c84d0d8e36
|
4bd45e52b8c13a35d38d30e6583d242480dabff4
|
refs/heads/main
| 2023-03-06T06:58:29.741353
| 2021-02-22T15:16:03
| 2021-02-22T15:16:03
| 341,227,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,724
|
py
|
import copy
class Graph(object):
def __init__(self, board):
self.board = board
self.letters = {}
self.adj_list = {}
directions = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)]
for i in range(4):
for j in range(4):
if board[i][j] not in self.letters:
self.letters[board[i][j]] = []
self.letters[board[i][j]].append((i, j))
self.adj_list[(board[i][j], i, j)] = []
for d1, d2 in directions:
k = i+d1
l = j+d2
if k >= 0 and k < 4 and l >= 0 and l < 4:
self.adj_list[(board[i][j], i, j)].append((board[k][l], k, l))
print(self.letters)
print(self.adj_list)
def dfs(self, word):
if len(word) < 4:
return False
stack = []
if word[0] not in self.letters:
return False
for i, j in self.letters[word[0]]:
stack.append((word[0], word, (word[0], i, j), set([(i, j)])))
# print('this is stack', stack)
while len(stack) > 0:
sub, word, let, positions = stack.pop()
if sub == word:
return True
next_letter = word[len(sub)]
for l, i, j in self.adj_list[let]:
if l == next_letter and (i, j) not in positions:
p2 = copy.deepcopy(positions)
p2.add((i, j))
stack.append((sub+next_letter, word, (l, i, j), p2))
return False
d = []
def load_dictionary():
with open('dictionary.txt') as f:
for line in f:
if len(line) >= 4 and not line[0].isupper():
d.append(line.upper()[:-1])
def find_words(board):
g = Graph(board)
words = []
for word in d:
if g.dfs(word):
words.append((word))
return words
def boggle_input():
board = []
for i in range(4):
row = input().upper()
if len(row.split()) == 4:
for a in row.split():
if len(a) != 1:
print('illegal input')
return False
board.append(row.split())
else:
print('illegal input')
return False
print('1 row of letters:', board[0])
print('2 row of letters:', board[1])
print('3 row of letters:', board[2])
print('4 row of letters:', board[3])
print(board)
words = find_words(board)
for word in words:
print('Found:', word)
print('There are', len(words), 'anagrams in total.')
if __name__ == '__main__':
load_dictionary()
boggle_input()
|
[
"noreply@github.com"
] |
noreply@github.com
|
fd98027336d2b8b9faa6def3fed6e1c34db9988d
|
5e3d34e2025831896851d9b11dfac81869252de0
|
/p9/pyth_triplet.py
|
0df4b399e858734011ec8428e144b53927286c6c
|
[] |
no_license
|
sanjkm/ProjectEuler
|
b522fc51907821aeeffbb64c016502aa89f6db22
|
6f1300232846e4303e6c4516e11c506e64f10fd9
|
refs/heads/master
| 2020-04-12T06:30:10.928298
| 2017-01-12T22:43:45
| 2017-01-12T22:43:45
| 65,332,463
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 651
|
py
|
# pyth_triplet.py
from operator import mul
def check_combo (a, b, c):
if c**2 == (a**2 + b**2):
return 1
return 0
def gen_possible_ab_combos (c, total_sum):
low_start_point = (total_sum - c) / 2
high_start_point = total_sum - c - low_start_point
a, b = low_start_point, high_start_point
while a > 0 and b < c:
if check_combo (a, b, c) == 1:
return [a,b,c]
a, b = a-1, b+1
return []
total_sum = 1000
for c in range(334, 500):
final_combo = gen_possible_ab_combos (c, total_sum)
if final_combo != []:
print final_combo, reduce (mul, final_combo)
break
|
[
"sanjay.menon@gmail.com"
] |
sanjay.menon@gmail.com
|
204bf46a6fc6a8243ffaadcf8dc901323591f909
|
ad3e2940d52d6a213d4fc20a7a68a984e6a420d5
|
/diffusion_relaxation.py
|
93a3c27514ff8c16c1cec02b23a8eaea73e93e3b
|
[] |
no_license
|
alex21347/Temperature-Diffusion
|
3639aac6ef52b9ffa1fff2b563bd4b9d2470f2dd
|
e93b8c25e3f4ed2cc52bdb00ce3a3645bfed48ff
|
refs/heads/main
| 2023-01-06T00:25:09.294736
| 2020-10-31T16:16:37
| 2020-10-31T16:16:37
| 304,435,224
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,565
|
py
|
#Simulating and Analysing the Method of Relaxation for 2D Dirichlet Problem
import numpy as np
from tqdm import tqdm
from matplotlib import pyplot as plt
from matplotlib import cm
import time
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from mpl_toolkits.mplot3d import Axes3D
#setting up for-loop to find average time to reach accurate solution
dum = 10
tic = time.time()
for i in tqdm(range(dum)):
a = 17 #length of grid
b = 17 #width of grid
its = 200 #number of iterations
p_x = np.zeros((b,a,its))
x_coords = np.linspace(0,a-1,a)
y_coords = np.linspace(0,b-1,b)
graph = np.zeros((a*b,2))
#generating 2D graph
for j in range(0,a):
for i in range(0,b):
graph[a*i+j,0] = x_coords[j]
graph[a*i+j,1] = y_coords[i]
#Building the set of edges via finding closest neighbours of each vertex
neighbours = []
g = graph
for k in range(a*b):
neighbours.append(np.array(np.where(abs(g[k,1]-g[:,1])+abs(g[k,0]-g[:,0])<1.1))[0])
neighbours[k] = np.delete(neighbours[k], np.where(neighbours[k] == k), axis=0)
vals = np.zeros((a*b,1))
scenario = 16 #for scenario 1 set equal to 2, for scenario 2 set equal to 16
vals[:scenario] = 1
#finding interior of graph i.e. where the walker may walk
interior = []
for i in range(a*b):
if len(neighbours[i]) == 4:
interior.append(i)
#approximating p(x,y) over many iterations
for i in range(its):
for k in range(len(vals)):
if k in interior:
vals[k] = np.mean(vals[neighbours[k]])
p_x[int((k-np.mod(k,a))/(a)),np.mod(k,a),i] = vals[k]
if i > 0:
errorest = (np.abs(p_x[:,:,i]-p_x[:,:,i-1]).sum())/15**2
p_x1 = p_x[1:-1,1:-1]
#neatening solution
p_x = p_x[1:-1,1:-1]
toc = time.time()
print(f'Time Elapsed : {(toc-tic)/dum}')
#plotting solution of 2D Dirichlet Problem via Method of Relaxation
fig = plt.figure(figsize = (8,6))
ax = fig.gca(projection='3d')
Y = np.arange(0, b-2, 1)
Y = -1* Y
X = np.arange(0, a-2, 1)
X,Y = np.meshgrid(X, Y)
Z = np.transpose(p_x[:,:,99])
surf = ax.plot_surface(X,Y,Z, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
|
[
"noreply@github.com"
] |
noreply@github.com
|
c484b176ad74bbf3c3d2c6945058b3f6fa039104
|
1978a9455159b7c2f3286e0ad602652bc5277ffa
|
/exercises/05_basic_scripts/task_5_2b.py
|
942e752a8c38f07e0e2a188e036ef30e8781ecff
|
[] |
no_license
|
fortredux/py_net_eng
|
338fd7a80debbeda55b5915dbfba4f5577279ef0
|
61cf0b2a355d519c58bc9f2b59d7e5d224922890
|
refs/heads/master
| 2020-12-03T17:32:53.598813
| 2020-04-08T20:55:45
| 2020-04-08T20:55:45
| 231,409,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,047
|
py
|
# -*- coding: utf-8 -*-
'''
Задание 5.2b
Преобразовать скрипт из задания 5.2a таким образом,
чтобы сеть/маска не запрашивались у пользователя,
а передавались как аргумент скрипту.
Ограничение: Все задания надо выполнять используя только пройденные темы.
'''
from sys import argv
ip = str(argv[1])
mask = int(argv[2])
host_net_lst = ip.split('.')
host_net_lst[3] = '0'
mask_32 = int('1' * mask)
mask_32 = '{:<032}'.format(mask_32)
template = '''
Network:
{0:<8} {1:<8} {2:<8} {3:<8}
{0:08b} {1:08b} {2:08b} {3:08b}
Mask:
/{4:}
{5:<8} {6:<8} {7:<8} {8:<8}
{9:8} {10:8} {11:8} {12:8}
'''
print(template.format(int(host_net_lst[0]), int(host_net_lst[1]), int(host_net_lst[2]), int(host_net_lst[3]), mask,
int(mask_32[0:8], 2), int(mask_32[8:16], 2), int(mask_32[16:24], 2), int(mask_32[24:32], 2),
mask_32[0:8], mask_32[8:16], mask_32[16:24], mask_32[24:32]))
|
[
"fortunaredux@protonmail.com"
] |
fortunaredux@protonmail.com
|
1b3085c6b0e6c23be8cc76bd570aa2586dae7368
|
7e7b0a0c32998c13ed9ecb5e4f57ea3b8f6f2644
|
/histo.py
|
eda41b3531ed474468be080ad59e8d11fea32b46
|
[
"MIT"
] |
permissive
|
Pacific01/openPDF
|
f37a8485060ec4f46ca69436befa269f53b62ff4
|
a32f9aeab540fb6cfb7b268a905c74f33211d5ca
|
refs/heads/master
| 2020-12-24T07:11:22.490281
| 2016-12-13T10:48:37
| 2016-12-13T10:48:37
| 73,378,180
| 0
| 0
| null | 2016-12-12T23:51:39
| 2016-11-10T12:02:49
|
Python
|
UTF-8
|
Python
| false
| false
| 1,593
|
py
|
# -*- coding: utf-8 -*-
import json
from os import system, remove
with open('questions.json') as data_file:
questions = json.load(data_file)
with open('answers.json') as data_file:
answers = json.load(data_file)
questions = questions['questions']
answers = answers['answers']
for question in questions:
#Generar el tsv con los datos
for answer in answers:
if answer['questionId'] == question['id']:
respuestas = answer['answers']
yrange = max(answer['answers'])
respuestastsv = ''
cont = 0
for res in question['answers']:
respuestastsv += str(respuestas[res['id']-1]) + ' ' + res['text'].encode('utf-8')
cont+=1
file = open('respuestas.tsv', 'w+')
file.write(respuestastsv) # python will convert \n to os.linesep
file.close()
gnuplot = "\
# ______________________________________________________________________\n\
#Setting output\n\
set term png\n\
set output \"./Plots/plot"+str(question['id'])+".png\"\n\
# For the next graph, we want a histogram.\n\
set style data boxes\n\
# set xrange [0:"+str(question['numAnswers'])+"]\n\
# set yrange [0:"+str(yrange)+"]\n\
# set xtics rotate by -45\n\
\n\
# We want a small gap between solid (filled-in) bars.\n\
set boxwidth 0.8 relative\n\
set style fill solid 1.0\n\
\n\
# Plot the histogram (one curve).\n\
plot 'respuestas.tsv' using 1:xtic(2) with boxes title '"+question['text'].strip('\n').encode('utf-8')+"'\n\
"
file = open('tmp.gp', 'w+')
file.write(gnuplot) # python will convert \n to os.linesep
file.close()
#
#
#
#
system('gnuplot tmp.gp')
remove('respuestas.tsv')
remove('tmp.gp')
|
[
"cod.al.pro@gmail.com"
] |
cod.al.pro@gmail.com
|
38acb8c211006d953999bf2dfc3090c9f9313ea5
|
ee27325f6a3e6a2d1f5e004aa60f5974ad864ae9
|
/contrib/python/plotly/py3/plotly/validators/contourcarpet/__init__.py
|
09c50961c6d1e808ad2e54b12da590314f6b6cc2
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
alvinahmadov/catboost
|
f32d2b16be9db7439e429c88feb5676de842fc89
|
a6e0caa4779b31199f535cf43b09879d7c653abe
|
refs/heads/master
| 2023-06-12T19:29:52.028508
| 2023-05-11T18:33:03
| 2023-05-11T18:33:03
| 202,584,937
| 0
| 0
|
Apache-2.0
| 2019-08-15T17:35:23
| 2019-08-15T17:35:23
| null |
UTF-8
|
Python
| false
| false
| 4,621
|
py
|
import sys
from typing import TYPE_CHECKING
if sys.version_info < (3, 7) or TYPE_CHECKING:
from ._zsrc import ZsrcValidator
from ._zmin import ZminValidator
from ._zmid import ZmidValidator
from ._zmax import ZmaxValidator
from ._zauto import ZautoValidator
from ._z import ZValidator
from ._yaxis import YaxisValidator
from ._xaxis import XaxisValidator
from ._visible import VisibleValidator
from ._uirevision import UirevisionValidator
from ._uid import UidValidator
from ._transpose import TransposeValidator
from ._textsrc import TextsrcValidator
from ._text import TextValidator
from ._stream import StreamValidator
from ._showscale import ShowscaleValidator
from ._showlegend import ShowlegendValidator
from ._reversescale import ReversescaleValidator
from ._opacity import OpacityValidator
from ._ncontours import NcontoursValidator
from ._name import NameValidator
from ._metasrc import MetasrcValidator
from ._meta import MetaValidator
from ._line import LineValidator
from ._legendwidth import LegendwidthValidator
from ._legendrank import LegendrankValidator
from ._legendgrouptitle import LegendgrouptitleValidator
from ._legendgroup import LegendgroupValidator
from ._idssrc import IdssrcValidator
from ._ids import IdsValidator
from ._hovertextsrc import HovertextsrcValidator
from ._hovertext import HovertextValidator
from ._fillcolor import FillcolorValidator
from ._db import DbValidator
from ._da import DaValidator
from ._customdatasrc import CustomdatasrcValidator
from ._customdata import CustomdataValidator
from ._contours import ContoursValidator
from ._colorscale import ColorscaleValidator
from ._colorbar import ColorbarValidator
from ._coloraxis import ColoraxisValidator
from ._carpet import CarpetValidator
from ._btype import BtypeValidator
from ._bsrc import BsrcValidator
from ._b0 import B0Validator
from ._b import BValidator
from ._autocontour import AutocontourValidator
from ._autocolorscale import AutocolorscaleValidator
from ._atype import AtypeValidator
from ._asrc import AsrcValidator
from ._a0 import A0Validator
from ._a import AValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
[
"._zsrc.ZsrcValidator",
"._zmin.ZminValidator",
"._zmid.ZmidValidator",
"._zmax.ZmaxValidator",
"._zauto.ZautoValidator",
"._z.ZValidator",
"._yaxis.YaxisValidator",
"._xaxis.XaxisValidator",
"._visible.VisibleValidator",
"._uirevision.UirevisionValidator",
"._uid.UidValidator",
"._transpose.TransposeValidator",
"._textsrc.TextsrcValidator",
"._text.TextValidator",
"._stream.StreamValidator",
"._showscale.ShowscaleValidator",
"._showlegend.ShowlegendValidator",
"._reversescale.ReversescaleValidator",
"._opacity.OpacityValidator",
"._ncontours.NcontoursValidator",
"._name.NameValidator",
"._metasrc.MetasrcValidator",
"._meta.MetaValidator",
"._line.LineValidator",
"._legendwidth.LegendwidthValidator",
"._legendrank.LegendrankValidator",
"._legendgrouptitle.LegendgrouptitleValidator",
"._legendgroup.LegendgroupValidator",
"._idssrc.IdssrcValidator",
"._ids.IdsValidator",
"._hovertextsrc.HovertextsrcValidator",
"._hovertext.HovertextValidator",
"._fillcolor.FillcolorValidator",
"._db.DbValidator",
"._da.DaValidator",
"._customdatasrc.CustomdatasrcValidator",
"._customdata.CustomdataValidator",
"._contours.ContoursValidator",
"._colorscale.ColorscaleValidator",
"._colorbar.ColorbarValidator",
"._coloraxis.ColoraxisValidator",
"._carpet.CarpetValidator",
"._btype.BtypeValidator",
"._bsrc.BsrcValidator",
"._b0.B0Validator",
"._b.BValidator",
"._autocontour.AutocontourValidator",
"._autocolorscale.AutocolorscaleValidator",
"._atype.AtypeValidator",
"._asrc.AsrcValidator",
"._a0.A0Validator",
"._a.AValidator",
],
)
|
[
"akhropov@yandex-team.com"
] |
akhropov@yandex-team.com
|
e965fc7f54e9b3311e96c8e2934c07c82278a4d5
|
34d075680d450ef1ef74dd0c284f917f7b655c33
|
/lyremd/main.py
|
7bba0a4b39b56c992e5252b622642adff3db4ba5
|
[
"MIT"
] |
permissive
|
nao159/PycharmProjects
|
b252e837b44d79f630a964374c6dec48ec795966
|
c3abf404c9281ce89fdcd5d032220a1d5886d8bc
|
refs/heads/main
| 2023-07-27T14:05:26.711901
| 2021-09-09T07:09:39
| 2021-09-09T07:09:39
| 398,906,786
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,916
|
py
|
from os import path
from threading import Thread
from time import sleep
from mido import MidiFile
import argparse
import keyboard as kbd
from utils import find_best_shift, midi_play_filter
octave_interval = 12
c3_pitch = 48
c4_pitch = 60
c5_pitch = 72
b5_pitch = 83
keytable = "z?x?cv?b?n?m" + "a?s?df?g?h?j" + "q?w?er?t?y?u"
notetable = "C?D?EF?G?A?B"
play_state = 'idle'
def help():
print('Press "+" to start/stop playing, press "backspace" to exit.\n')
def note_name(note):
idx = note % octave_interval
if idx < 0:
return '-'
pre = notetable[idx]
if pre == '?':
pre = notetable[idx - 1] + '#'
return pre + str(note // octave_interval - 1)
def print_note(ch, orig, play, key):
print("ch {:<2} orig: {:<3}{:<5} play: {:<3}{:<5} {}\n"
.format(ch, note_name(orig),
'(' + str(orig) + ')',
note_name(play) if play else '-',
'(' + str(play) + ')' if play else '-',
key if key else '-'))
def play(midi, msg_filter, shift, no_semi, out_range):
global play_state
play_state = 'running'
print('Start playing')
for msg in midi:
if play_state != 'running':
break
sleep(msg.time)
if not msg_filter(msg):
continue
note = msg.note + shift
orig_note = note
if note < c3_pitch:
print(
'note {:<3} lower than C3 : {:+}'.format(note, c3_pitch - note))
if out_range:
note = note % octave_interval + c3_pitch
elif note > b5_pitch:
print(
'note {:<3} higher than B5: {:+}'.format(note, b5_pitch - note))
if out_range:
note = note % octave_interval + c5_pitch
if note < c3_pitch or note > b5_pitch:
print_note(msg.channel, orig_note, None, None)
continue
if keytable[note - c3_pitch] == '?' and not no_semi:
note -= 1
key = keytable[note - c3_pitch]
print_note(msg.channel, orig_note, note, key.upper())
kbd.send(key)
print('Stop playing')
help()
play_state = 'idle'
def control(*args):
global play_state
if play_state == 'running':
play_state = 'stopping'
elif play_state == 'idle':
kbd.call_later(
play,
args=args,
delay=1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Play midi file with Windsong Lyre in Genshin Impact')
parser.add_argument('midi', nargs="?", type=str, help='path to midi file')
parser.add_argument('-c', '--channels', nargs="*", type=int,
help="enabled midi channels, available values:0, 1, 2,...,N")
parser.add_argument('-s', '--shift', type=int, default=None,
help="shift note pitch, auto calculated by default")
parser.add_argument('-n', '--no-semi', action='store_true',
help="don't shift black key to white key")
parser.add_argument('-r', '--shift-out-of-range', dest="out_range",
action='store_true', help="shift notes which out of range")
args = parser.parse_args()
midi = args.midi
if not midi:
midi = path.join(path.dirname(
path.realpath(__file__)), 'files/canon.mid')
midi = MidiFile(midi)
msg_filter = lambda msg, ch=args.channels: midi_play_filter(msg, ch)
shift = args.shift
if shift == None:
shift = find_best_shift(midi, msg_filter)
print('Auto calculated pitch shift: {:+} semitone(s)\n'.format(shift))
kbd.add_hotkey('+',
lambda: control(midi, msg_filter, shift, args.no_semi, args.out_range),
suppress=True,
trigger_on_release=True)
help()
kbd.wait('backspace', suppress=True)
|
[
"79338207+nao159@users.noreply.github.com"
] |
79338207+nao159@users.noreply.github.com
|
b7852190aea462adb905c42a666ee25b1de79558
|
ecaffc075b41e36457423b810903e74280d7af75
|
/manage.py
|
abc9e3310be544a334973b156ca82af408f28585
|
[] |
no_license
|
MounishKesavan/Myproject2
|
c4d601869bc98ef78556b36c0de1412525c09d0d
|
b5f6dda8e4f46a2805e0bf73ef157255188cbe5f
|
refs/heads/main
| 2023-05-26T17:39:21.955239
| 2021-06-05T13:55:19
| 2021-06-05T13:55:19
| 374,117,990
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 832
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "resumeparser.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
[
"noreply@github.com"
] |
noreply@github.com
|
7b7f8627897a44c4bb4219ff4d136d59fc8e6391
|
7b218983611d96c653f99c3e2c7b2bb74091ac9e
|
/splitNSP.py
|
b5d2cdb1b990a4bdf5b9473e40cda1c86e5dcbb9
|
[
"Unlicense"
] |
permissive
|
doctorpangloss/splitNSP
|
02de827b20b6c949967f38e090e75808674fc577
|
15941e8204b73a4261034e39d9d5939c97394261
|
refs/heads/master
| 2020-04-25T07:12:43.167583
| 2019-02-26T00:24:15
| 2019-02-26T00:24:15
| 172,607,074
| 0
| 0
|
Unlicense
| 2019-02-26T00:23:41
| 2019-02-26T00:22:27
|
Python
|
UTF-8
|
Python
| false
| false
| 5,634
|
py
|
#!/usr/bin/env python3
# Author: AnalogMan
# Modified Date: 2018-10-08
# Purpose: Splits Nintendo Switch files into parts for installation on FAT32
import os
import argparse
import shutil
import os.path
import subprocess
from datetime import datetime
startTime = datetime.now()
splitSize = 0xFFFF0000 # 4,294,901,760 bytes
chunkSize = 0x8000 # 32,768 bytes
from os.path import splitext
def splitext_(path):
if len(path.split('.')) > 2:
return path.split('.')[0],'.'.join(path.split('.')[-2:])
return splitext(path)
def splitQuick(filepath):
fileSize = os.path.getsize(filepath)
info = shutil.disk_usage(os.path.dirname(os.path.abspath(filepath)))
if info.free < splitSize:
print('Not enough temporary space. Needs 4GiB of free space\n')
return
print('Calculating number of splits...\n')
splitNum = int(fileSize/splitSize)
if splitNum == 0:
print('This file is under 4GiB and does not need to be split.\n')
return
print('Splitting file into {0} parts...\n'.format(splitNum + 1))
# Create directory, delete if already exists
file_name,extension = splitext_(filepath)
dir = filepath[:-4] + '_split' + extension
if os.path.exists(dir):
shutil.rmtree(dir)
os.makedirs(dir)
if os.path.exists(dir):
subprocess.call(['attrib', '+a', dir])
# Move input file to directory and rename it to first part
filename = os.path.basename(filepath)
shutil.move(filepath, os.path.join(dir, '00'))
filepath = os.path.join(dir, '00')
# Calculate size of final part to copy first
finalSplitSize = fileSize - (splitSize * splitNum)
# Copy final part and trim from main file
with open(filepath, 'r+b') as nspFile:
nspFile.seek(finalSplitSize * -1, os.SEEK_END)
outFile = os.path.join(dir, '{:02}'.format(splitNum))
partSize = 0
print('Starting part {:02}'.format(splitNum))
with open(outFile, 'wb') as splitFile:
while partSize < finalSplitSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
nspFile.seek(finalSplitSize * -1, os.SEEK_END)
nspFile.truncate()
print('Part {:02} complete'.format(splitNum))
# Loop through additional parts and trim
with open(filepath, 'r+b') as nspFile:
for i in range(splitNum - 1):
nspFile.seek(splitSize * -1, os.SEEK_END)
outFile = os.path.join(dir, '{:02}'.format(splitNum - (i + 1)))
partSize = 0
print('Starting part {:02}'.format(splitNum - (i + 1)))
with open(outFile, 'wb') as splitFile:
while partSize < splitSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
nspFile.seek(splitSize * -1, os.SEEK_END)
nspFile.truncate()
print('Part {:02} complete'.format(splitNum - (i + 1)))
# Print assurance statement for user
print('Starting part 00\nPart 00 complete')
print('\nFile successfully split!\n')
def splitCopy(filepath):
fileSize = os.path.getsize(filepath)
info = shutil.disk_usage(os.path.dirname(os.path.abspath(filepath)))
if info.free < fileSize*2:
print('Not enough free space to run. Will require twice the space as the file\n')
return
print('Calculating number of splits...\n')
splitNum = int(fileSize/splitSize)
if splitNum == 0:
print('This file is under 4GiB and does not need to be split.\n')
return
print('Splitting file into {0} parts...\n'.format(splitNum + 1))
# Create directory, delete if already exists
file_name,extension = splitext_(filepath)
dir = filepath[:-4] + '_split' + extension
if os.path.exists(dir):
shutil.rmtree(dir)
os.makedirs(dir)
if os.path.exists(dir):
subprocess.call(['attrib', '+a', dir])
remainingSize = fileSize
# Open source file and begin writing to output files stoping at splitSize
with open(filepath, 'rb') as nspFile:
for i in range(splitNum + 1):
partSize = 0
print('Starting part {:02}'.format(i))
outFile = os.path.join(dir, '{:02}'.format(i))
with open(outFile, 'wb') as splitFile:
if remainingSize > splitSize:
while partSize < splitSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
remainingSize -= splitSize
else:
while partSize < remainingSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
print('Part {:02} complete'.format(i))
print('\nFile successfully split!\n')
def main():
print('\n========== File Splitter ==========\n')
# Arg parser for program options
parser = argparse.ArgumentParser(description='Split files into FAT32 compatible sizes')
parser.add_argument('filepath', help='Path to file')
parser.add_argument('-q', '--quick', action='store_true', help='Splits file in-place without creating a copy. Only requires 4GiB free space to run')
# Check passed arguments
args = parser.parse_args()
filepath = args.filepath
# Check if required files exist
if os.path.isfile(filepath) == False:
print('File cannot be found\n')
return 1
# Split file
if args.quick:
splitQuick(filepath)
else:
splitCopy(filepath)
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
079662848033b228ee09c8bb812f1c80e52e4cb0
|
1f68b6f9f55afaa7cb32df262f4fe0864472da05
|
/leetcode(多线程,DP,贪心,SQL)/二刷DP与贪心LeetCode/回溯/51. N皇后/solution.py
|
761c902fdb433e6e6f0765ec8b75578496b26cb9
|
[] |
no_license
|
faker-hong/testOne
|
7c4496362cb5495c25c640076102fe0704f8552f
|
768edc4a5526c8972fec66c6a71a38c0b24a1451
|
refs/heads/master
| 2022-12-04T14:47:53.614685
| 2020-10-30T03:17:50
| 2020-10-30T03:17:50
| 196,514,862
| 1
| 0
| null | 2022-11-22T02:43:32
| 2019-07-12T05:35:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,100
|
py
|
class Solution(object):
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
"""
# 判断放置的皇后位置与之前的是否冲突
def is_valid(row, col, track):
# 因为每一次的row不同,所以不用判断是否在同一行
if col in track: # 是否在同一列
return False
# 判断是否在两条对角线上
for k in range(row):
if row + col == k + track[k] or row - col == k - track[k]:
return False
return True
def backtrack(row, track):
if row == n:
res.append(track)
return
for col in range(n):
if is_valid(row, col, track): # 位置合法,进入下一行
backtrack(row + 1, track + [col])
res = []
backtrack(0, [])
return [['.'*i + 'Q' + '.'*(n-i-1) for i in l] for l in res]
if __name__ == '__main__':
s = Solution()
res = s.solveNQueens(4)
print(res)
|
[
"42666723+hongcheng97@users.noreply.github.com"
] |
42666723+hongcheng97@users.noreply.github.com
|
fe91480c51ec9d9e11d8cbf4c07c3dbad667f8a4
|
f2f21c643d1f5459253989e7cdba85c064cca8ce
|
/adding_bootstarp/adding_bootstarp/wsgi.py
|
b02fcd063eb36aa3dc1d03dc3104e13e690ebccf
|
[] |
no_license
|
NiteshTyagi/django_tutorial
|
342decea7532f1efb200b9f45e4123c581aad43f
|
3353f0d2907a00f43e1faee2b97abd9af66ca08f
|
refs/heads/master
| 2022-03-05T19:46:50.642154
| 2022-03-01T04:53:14
| 2022-03-01T04:53:14
| 205,629,609
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
"""
WSGI config for adding_bootstarp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'adding_bootstarp.settings')
application = get_wsgi_application()
|
[
"nitesh.tyagi.cs.2015@miet.ac.in"
] |
nitesh.tyagi.cs.2015@miet.ac.in
|
db10d84562d00aeab527245c8df21ab7c305aea4
|
8bd977f6f59799bfc8aad9e1605ba1afe637e275
|
/myComputerVisonLib.py
|
f4a3a4d702252bbf1d35cd7eba47ed9480accf69
|
[] |
no_license
|
wuethral/Automated_masking
|
0453634cf64f83e977a23163e06dfa2c1cf27eb9
|
f2852ca2848b2f60660db674d718f5c6b3ba391d
|
refs/heads/master
| 2023-08-15T00:09:57.479507
| 2021-10-01T11:56:45
| 2021-10-01T11:56:45
| 412,448,340
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,905
|
py
|
import cv2 as cv
import numpy as np
from PIL import Image
from numpy import unique
from numpy import where
from sklearn.datasets import make_classification
from sklearn.cluster import DBSCAN
from matplotlib import pyplot
def canny_edge_det(img_nr, img):
# Convert to graycsale
img_gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
# Blur the image for better edge detection
img_blur = cv.GaussianBlur(img_gray, (3, 3), 0)
# Sobel Edge Detection
# sobelx = cv.Sobel(src=img_blur, ddepth=cv.CV_64F, dx=1, dy=0, ksize=5) # Sobel Edge Detection on the X axis
# sobely = cv.Sobel(src=img_blur, ddepth=cv.CV_64F, dx=0, dy=1, ksize=5) # Sobel Edge Detection on the Y axis
# sobelxy = cv.Sobel(src=img_blur, ddepth=cv.CV_64F, dx=1, dy=1, ksize=5) # Combined X and Y Sobel Edge Detection
# Display Sobel Edge Detection Images
# cv.imshow('Sobel X', sobelx)
# cv.waitKey(0)
# cv.imshow('Sobel Y', sobely)
# cv.waitKey(0)
# cv.imshow('Sobel X Y using Sobel() function', sobelxy)
# cv.waitKey(0)
# Canny Edge Detection
edges = cv.Canny(image=img_blur, threshold1=100, threshold2=200) # Canny Edge Detection
path = 'D:/masks/pliers(video34)_3/canny_edge_images/canny_edge_mask_' + str(img_nr) +'.png'
cv.imwrite(path, edges)
# Display Canny Edge Detection Image
# cv.imshow('Canny Edge Detection', edges)
# cv.waitKey(0)
# cv.destroyAllWindows()
# img = cv.imread('C:/Users/wuethral/Desktop/colorfilter_2/14.9.21_try_2/Example_4/pliers.png')
# canny_edge_det(img)
def switch_pixel_row(row_array_hsv_filter, width):
new_row_image_matrix = [0] * width
for pixel_value in range(width):
if row_array_hsv_filter[pixel_value] == 0:
new_row_image_matrix[pixel_value] = 255
new_row_image_matrix = np.array(new_row_image_matrix)
return new_row_image_matrix
class SwitchingBlackWhite():
def __init__(self, img_nr, mask_hsv_filter, height, width):
self.img_nr = img_nr
self.mask_hsv_filter = mask_hsv_filter
self.height = height
self.width = width
self.switch_pixel()
def switch_pixel(self):
array_hsv_filter = np.array(self.mask_hsv_filter)
mask_matrix = np.zeros((self.height, self.width))
for i in range(self.height):
new_row = switch_pixel_row(array_hsv_filter[i], self.width)
mask_matrix[i, :] = new_row
matrix_to_array = np.squeeze(np.asarray(mask_matrix))
matrix_to_array = np.reshape(matrix_to_array, (self.height, self.width)).astype(np.uint8)
switch_pixel_mask = Image.fromarray(matrix_to_array)
path = "D:/masks/pliers(video34)_3/hsv_switch_bw/hsv_switch_bw_" + str(self.img_nr) + '.png'
switch_pixel_mask.save(path)
def hsv_filter(img_nr, img):
# convert the BGR image to HSV colour space
hsv = cv.cvtColor(img, cv.COLOR_BGR2HSV)
# set the lower and upper bounds for the green hue
green_black_lower_hsv = np.array([49, 66, 80])
green_black_higher_hsv = np.array([179, 255, 255])
mask_green_black_hsv = cv.inRange(hsv, green_black_lower_hsv, green_black_higher_hsv)
path = 'D:/masks/pliers(video34)_3/hsv_filter_images/hsv_mask_' + str(img_nr) +'.png'
cv.imwrite(path, mask_green_black_hsv)
def adding_pixel_values(row_canny_edge_det, row_array_hsv_filter, width):
new_row_image_matrix = [0] * width
for pixel_value in range(width):
if row_array_hsv_filter[pixel_value] == 0 or row_canny_edge_det[pixel_value] == 255:
new_row_image_matrix[pixel_value] = 255
new_row_image_matrix = np.array(new_row_image_matrix)
return new_row_image_matrix
class MergingMasks():
def __init__(self, img_nr, mask_canny_edge_detection, mask_hsv_filter, height, width):
self.img_nr = img_nr
self.mask_canny_edge_detection = mask_canny_edge_detection
self.mask_hsv_filter = mask_hsv_filter
self.height = height
self.width = width
self.merging_masks()
def merging_masks(self):
array_canny_edge_det = np.array(self.mask_canny_edge_detection)
array_hsv_filter = np.array(self.mask_hsv_filter)
mask_matrix = np.zeros((self.height, self.width))
for i in range(self.height):
new_row = adding_pixel_values(array_canny_edge_det[i], array_hsv_filter[i], self.width)
mask_matrix[i, :] = new_row
matrix_to_array = np.squeeze(np.asarray(mask_matrix))
matrix_to_array = np.reshape(matrix_to_array, (self.height, self.width)).astype(np.uint8)
final_mask_no_morph = Image.fromarray(matrix_to_array)
path = "D:/masks/pliers(video34)_3/final_mask_no_morph/final_mask_no_morph_" + str(self.img_nr) +'.png'
final_mask_no_morph.save(path)
# cv.waitKey(0)
def morphological_operation(img_nr, source_path, destination_path):
mask_to_morph = cv.imread(source_path)
# Taking a matrix of size 5 as the kernel
kernel = np.ones((5, 5), np.uint8)
# The first parameter is the original image,
# kernel is the matrix with which image is
# convolved and third parameter is the number
# of iterations, which will determine how much
# you want to erode/dilate a given image.
# img_erosion = cv.erode(img, kernel, iterations=1)
img_dilation = cv.dilate(mask_to_morph, kernel, iterations=2)
cv.imwrite(destination_path,
img_dilation)
# cv.imshow('Input', img)
# cv.imshow('Erosion', img_erosion)
class DbScan():
def __init__(self, img_nr, path, input_img_with_canny):
self.img_nr = img_nr
self.path_to_image = path
self.input_img_with_canny = input_img_with_canny
self.dbscan()
# define dataset
# X, _ = make_classification(n_samples=1000, n_features=2, n_informative=2, n_redundant=0, n_clusters_per_class=1, random_state=4)
def dbscan(self):
img = cv.imread(self.path_to_image)
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
coordinates_of_white_pixels = []
rows, cols = img.shape[:2]
for i in range(rows):
for j in range(cols):
if img[i, j] == 255:
coordinates_of_white_pixels.append([i, j])
X = np.asarray(coordinates_of_white_pixels)
# print(coordinates_of_white_pixels)
# define the model
# print(X)
model = DBSCAN(eps=2, min_samples=9)
# fit model and predict clusters
yhat = model.fit_predict(X)
# retrieve unique clusters
clusters = unique(yhat)
# create scatter plot for samples from each cluster
for cluster in clusters:
# get row indexes for samples with this cluster
row_ix = where(yhat == cluster)
# create scatter of these samples
pyplot.scatter(X[row_ix, 1], X[row_ix, 0])
# show the plot
path_cluster = 'D:/masks/pliers(video34)_3/cluster_plots/clusterplot_' + str(self.img_nr)
pyplot.savefig(path_cluster)
pyplot.clf()
size_of_biggest_cluster = 0
index_of_biggest_cluster = 0
for cluster in clusters:
row_ix = where(yhat == cluster)
if row_ix[0].size > size_of_biggest_cluster:
if max(X[row_ix, 0][0]) == 1079 or max(X[row_ix, 1][0]) == 1919 or min(X[row_ix, 0][0]) == 0 or min(
X[row_ix, 1][0]) == 0:
continue
else:
size_of_biggest_cluster = row_ix[0].size
index_of_biggest_cluster = cluster
for cluster in clusters:
if cluster == index_of_biggest_cluster:
continue
else:
row_ix = where(yhat == cluster)
x_coord_to_delete_mask = X[row_ix, 0]
y_coord_to_delete_mask = X[row_ix, 1]
for i in range(len(x_coord_to_delete_mask[0])):
img[x_coord_to_delete_mask[0][i], y_coord_to_delete_mask[0][i]] = 0
if self.input_img_with_canny:
path = "D:/masks/pliers(video34)_3/hsv_canny_dbscan/mask_hsv_canny_dbscan_" + str(self.img_nr) +'.png'
cv.imwrite(path, img)
else:
path = "D:/masks/pliers(video34)_3/hsv_dbscan(no_canny)/mask_hsv_dbscan_" + str(
self.img_nr) + '.png'
cv.imwrite(path, img)
def fill_hole(mask):
contours, hierarchy = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
len_contour = len(contours)
contour_list = []
for i in range(len_contour):
drawing = np.zeros_like(mask, np.uint8) # create a black image
img_contour = cv.drawContours(drawing, contours, i, (255,255,255), -1)
contour_list.append(img_contour)
out = sum(contour_list)
return out
|
[
"wuethral@ethz.ch"
] |
wuethral@ethz.ch
|
df2614ce95235a5ab406bf91e83a7e1c6f3a910e
|
0dbeaea56fc3e178894db1ac993bb2f2f7b44829
|
/docs/exercises/fwi.py
|
ad205314073398ebced78c7922ab22a541bf0156
|
[] |
no_license
|
simonlegrand/pysit
|
1bcc4fc4df225bc09f26a77751f447e3635774c8
|
1fb1a80839ceebef12a8d71aa9c295b65b08bac4
|
refs/heads/master
| 2021-06-30T11:43:54.909080
| 2021-03-15T13:06:17
| 2021-03-15T13:06:17
| 218,262,289
| 1
| 0
| null | 2019-10-29T10:34:57
| 2019-10-29T10:34:56
| null |
UTF-8
|
Python
| false
| false
| 5,387
|
py
|
import numpy as np
import matplotlib.pyplot as plt
from models import basic_model
config = dict()
##############################################################################
# Problem 1.1
def ricker(t, config):
nu0 = config['nu0']
# implementation goes here
return w
# Configure source wavelet
config['nu0'] = 10 # Hz
# Evaluate wavelet and plot it
ts = np.linspace(0, 0.5, 1000)
ws = ricker(ts, config)
plt.figure()
plt.plot(ts, ws,
color='green',
label=r'$\nu_0 =\,{0}$Hz'.format(config['nu0']),
linewidth=2)
plt.xlabel(r'$t$', fontsize=18)
plt.ylabel(r'$w(t)$', fontsize=18)
plt.title('Ricker Wavelet', fontsize=22)
plt.legend()
##############################################################################
# Problem 1.2
def point_source(value, position, config):
# implementation goes here
return f
# Domain parameters
config['x_limits'] = [0.0, 1.0]
config['nx'] = 201
config['dx'] = (config['x_limits'][1] - config['x_limits'][0]) / (config['nx']-1)
# Source parameter
config['x_s'] = 0.1
##############################################################################
# Problem 1.3
def construct_matrices(C, config):
# implementation goes here
return M, A, K
# Load the model
C, C0 = basic_model(config)
# Build an example set of matrices
M, A, K = construct_matrices(C, config)
##############################################################################
# Problem 1.4
def leap_frog(C, sources, config):
# implementation goes here
return us # list of wavefields
# Set CFL safety constant
config['alpha'] = 1.0/6.0
# Define time step parameters
config['T'] = 3 # seconds
config['dt'] = config['alpha'] * config['dx'] / C.max()
config['nt'] = int(config['T']/config['dt'])
# Generate the sources
sources = list()
for i in xrange(config['nt']):
t = i*config['dt']
f = point_source(ricker(t, config), config['x_s'], config)
sources.append(f)
# Generate wavefields
us = leap_frog(C, sources, config)
##############################################################################
# Problem 1.5
def plot_space_time(us, config, title=None):
# implementation goes here
pass
# Call your function
plot_space_time(us, config, title=r'u(x,t)')
##############################################################################
# Problem 1.6
def record_data(u, config):
# implementation goes here
return d
# Receiver position
config['x_r'] = 0.15
##############################################################################
# Problem 1.7
def forward_operator(C, config):
# implementation goes here
return us, trace
us, d = forward_operator(C, config)
# The last argument False excludes the end point from the list
ts = np.linspace(0, config['T'], config['nt'], False)
plt.figure()
plt.plot(ts, d, label=r'$x_r =\,{0}$'.format(config['x_r']), linewidth=2)
plt.xlabel(r'$t$', fontsize=18)
plt.ylabel(r'$d(t)$', fontsize=18)
plt.title('Trace at $x_r={0}$'.format(config['x_r']), fontsize=22)
plt.legend()
##############################################################################
# Problem 2.1
##############################################################################
# Problem 2.2
##############################################################################
# Problem 2.3
def imaging_condition(qs, u0s, config):
# implementation goes here
return image
# Compute the image
I_rtm = imaging_condition(qs, u0s, config)
# Plot the comparison
xs = np.arange(config['nx'])*config['dx']
dC = C-C0
plt.figure()
plt.subplot(2, 1, 1)
plt.plot(xs, dC, label=r'$\delta C$')
plt.legend()
plt.subplot(2, 1, 2)
plt.plot(xs, I_rtm, label=r'$I_\text{RTM}$')
plt.legend()
##############################################################################
# Problem 2.4
##############################################################################
# Problem 2.5
def adjoint_operator(C0, d, config):
# implementation goes here
return image
##############################################################################
# Problem 3.1
def linear_sources(dm, u0s, config):
# implementation goes here
return sources
##############################################################################
# Problem 3.2
def linear_forward_operator(C0, dm, config):
# implementation goes here
return u1s
##############################################################################
# Problem 3.3
def adjoint_condition(C0, config):
# implementation goes here
pass
##############################################################################
# Problem 4.1
def gradient_descent(C0, d, k, config):
# implementation goes here
return sources
##############################################################################
# Problem 3.2
def linear_forward_operator(C0, dm, config):
# implementation goes here
return u1s
##############################################################################
# Problem 3.3
def adjoint_condition(C0, config):
# implementation goes here
pass
##############################################################################
# Problem 4.1
def gradient_descent(C0, d, k, config):
# implementation goes here
pass
##############################################################################
# Problem 4.2
|
[
"rhewett@vt.edu"
] |
rhewett@vt.edu
|
30515edd35063c0b0ce56bb0b4cca5653ebf3076
|
ee0d795e2874b30ba1edd568f9f2f9483854f0a7
|
/Run/wsgi.py
|
6c10531c680a9fc6ce3a27c9d0eba2d6ba5a2cc0
|
[
"Apache-2.0"
] |
permissive
|
CodeMaxx/Run-Backend
|
91e65e5633263cf9913c814ac0985e8063679b33
|
880610ea473d0a0cde60a89064e987d92842dde3
|
refs/heads/master
| 2021-01-19T16:59:34.281976
| 2017-04-27T19:53:48
| 2017-04-27T19:53:48
| 86,144,226
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 384
|
py
|
"""
WSGI config for Run project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Run.settings")
application = get_wsgi_application()
|
[
"akash.trehan123@gmail.com"
] |
akash.trehan123@gmail.com
|
60e00547cea5608007f9512ca8f4505dd0388455
|
52ffc8be8e69745864f1a45992d27c94ca51cffe
|
/Admin/views.py
|
b460fb713762b1d8b21cbf2665ae329d00885cd2
|
[] |
no_license
|
Kaviya-M12/College-Enquiry-Chatbot
|
137c6222798c32a6c34e2dcd67e4120d4c57ad9c
|
4c0700ae6bd9b52fe80cd7ac238be239bf0b9de5
|
refs/heads/master
| 2023-04-09T00:32:45.470227
| 2021-04-16T11:11:55
| 2021-04-16T11:11:55
| 350,371,938
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,930
|
py
|
from django.shortcuts import render
# Create your views here.
# from django.shortcuts import render, redirect
from django.contrib import messages
from django.contrib.auth.models import User, auth
#from .models import Users
# Create your views here.
def Adminlogin(request):
if request.method== 'POST':
MailId = request.POST['MailId']
password = request.POST['password']
user = auth.authenticate(email=MailId,password=password)
if user is not None:
auth.login(request, user)
return redirect("/")
else:
messages.info(request,'invalid credentials')
return redirect('Adminlogin')
else:
return render(request,'Adminlogin.html')
def Adminregister(request):
if request.method == 'POST':
email = request.POST['MailId']
password= request.POST['createpassword']
secpassword = request.POST['confirmpassword']
username = request.POST['username']
if password==secpassword:
if User.objects.filter(email=email).exists():
messages.info(request,'MailId Taken')
return redirect('Adminregister')
elif User.objects.filter(username=username).exists():
messages.info(request,'username taken')
return redirect('Adminregister')
else:
user = User.objects.create_user(username=username, password=password, email=email)
#dummyusers=Users()
# #dummyusers.name=username
# print('user created')
return redirect('Adminlogin')
else:
messages.info(request,'password not matching..')
return redirect('Adminregister')
#return redirect('/')
else:
return render(request,'Adminregister.html')
def Adminlogout(request):
auth.Adminlogout(request)
return redirect('/')
|
[
"kaviyamadesh4@gmail.com"
] |
kaviyamadesh4@gmail.com
|
e066f05d3207fd56a8422bdeb6707bf4ebecfb1b
|
76f3b6dd64acdf60ff464f5c0fe9b4f4151358e0
|
/pymnn/pip_package/MNN/tools/mnn_fb/Transpose.py
|
557f49e65400b320cf2ced36f4306e9ee1dad48e
|
[
"Apache-2.0"
] |
permissive
|
qipengwang/Melon
|
33ce5d4b683af70215f73b82a7b9c15ffd7706d9
|
10c9d71cdc609a290bfdd09296db6af3913bb461
|
refs/heads/main
| 2023-08-31T17:44:28.797075
| 2023-08-17T14:24:59
| 2023-08-17T14:24:59
| 486,278,764
| 22
| 2
| null | 2022-04-27T16:54:48
| 2022-04-27T16:54:47
| null |
UTF-8
|
Python
| false
| false
| 899
|
py
|
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: MNN
import flatbuffers
class Transpose(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsTranspose(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Transpose()
x.Init(buf, n + offset)
return x
# Transpose
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Transpose
def Tperm(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def TransposeStart(builder): builder.StartObject(1)
def TransposeAddTperm(builder, Tperm): builder.PrependInt32Slot(0, Tperm, 0)
def TransposeEnd(builder): return builder.EndObject()
|
[
"861026685@qq.com"
] |
861026685@qq.com
|
acb80ba6822833f25d966e362da91eaa8b931efc
|
1df048bc8092f333c53f4e345c729c40ba2612c5
|
/count_list_4.py
|
1b5da491e36d1c5ef606f85dc241e68e374beef1
|
[] |
no_license
|
saipreeti1999/python_prog
|
58e3e5b5dab356fb3755b3abf36529aa475923be
|
8bdb773c29e0d8b2beb3a16dd4692485b4726b79
|
refs/heads/master
| 2020-06-28T09:15:46.787982
| 2019-08-06T07:03:18
| 2019-08-06T07:03:18
| 200,196,486
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 173
|
py
|
def count_list4(num):
count=0
for i in num:
if i==4:
count+=1
return count
print(count_list4([1,4,6,7,4]))
print(count_list4([1,4,6,4,7,4]))
|
[
"noreply@github.com"
] |
noreply@github.com
|
dcf9b0a27f7c7e49b3ad9e8d7887b134117d129a
|
76a6d509f366cc31febd05020a42da3024586407
|
/app/recipe/serializers.py
|
74e46eb63137f3678b98be3637ba359129a3648c
|
[
"MIT"
] |
permissive
|
dipodaimary/recipe-app-api
|
52e5a6d378d177bcf50e4db54e9eacc22f57ab84
|
fbf2a452b2fed41570bb5444a202284ab1d1a03f
|
refs/heads/main
| 2023-01-28T21:44:21.319328
| 2020-11-30T15:12:54
| 2020-11-30T15:12:54
| 316,896,884
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,411
|
py
|
from rest_framework import serializers
from core.models import Tag, Ingredient, Recipe
class TagSerializer(serializers.ModelSerializer):
"""Serializer for tag objects"""
class Meta:
model = Tag
fields = ('id', 'name')
read_only_fields = ('id',)
class IngredientSerializer(serializers.ModelSerializer):
"""Serializer for Ingredient objects"""
class Meta:
model = Ingredient
fields = ('id', 'name')
read_only_fields = ('id', )
class RecipeSerializer(serializers.ModelSerializer):
"""Serialize a recipe"""
ingredients = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Ingredient.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Tag.objects.all()
)
class Meta:
model = Recipe
fields = ('id', 'title', 'ingredients', 'tags', 'time_minutes',
'price', 'link')
read_only_fields = ('id',)
class RecipeDetailSerializer(RecipeSerializer):
"""Serialize a recipe detail"""
ingredients = IngredientSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
class RecipeImageSerializer(serializers.ModelSerializer):
"""Serializer for uploading image to recipes"""
class Meta:
model = Recipe
fields = ('id', 'image')
read_only_fields = ('id',)
|
[
"dipodaimary@gmail.com"
] |
dipodaimary@gmail.com
|
355ba71678f6bb9ec2076002fa247bff0631c87c
|
7fe92cf2077e83e13d6a496ec84788bd71713d21
|
/exp3/test_submodule_package/add.py
|
6b07b462f69e1b50f226a867c8b5a983d644a4c3
|
[] |
no_license
|
loveu3000s/learnPython
|
91546b3712deebdade1b87c0fb7691c70ad77397
|
80f7cd70708e96015e552311d843a2960654ce77
|
refs/heads/main
| 2023-06-12T23:08:44.040645
| 2021-05-30T04:38:18
| 2021-05-30T04:38:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 185
|
py
|
'''
Description:
Version: 2.0
Author: xuchaoxin
Date: 2021-04-13 11:36:54
LastEditors: xuchaoxin
LastEditTime: 2021-04-13 11:41:48
'''
def add_func(a, b):
return a+b
|
[
"838808930@qq.com"
] |
838808930@qq.com
|
5b11a4092b62457332b80ede0b087786272aa1cf
|
66580ac2680bd9060f6a4e852da52f45177e9721
|
/test/functional/p2p_leak.py
|
45620addb94fc3d51265c9c9c10c386dade7bddb
|
[
"MIT"
] |
permissive
|
bitcoinpaythrough-project/bitcoinpaythrough
|
459cae4b1805e3f966a870e820a9aab933b6a7a0
|
2aabe233abf16ec4e41be4c0528204b7a479dbd3
|
refs/heads/master
| 2022-11-18T17:04:30.689139
| 2020-07-16T23:42:34
| 2020-07-16T23:42:34
| 279,911,155
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,310
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2017-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
A node should never send anything other than VERSION/VERACK/REJECT until it's
received a VERACK.
This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't."""
import time
from test_framework.messages import msg_getaddr, msg_ping, msg_verack
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinpaythroughTestFramework
from test_framework.util import wait_until
banscore = 10
class CLazyNode(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
def bad_message(self, message):
self.unexpected_msg = True
self.log.info("should not have received message: %s" % message.command)
def on_open(self):
self.ever_connected = True
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_reject(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
def on_addr(self, message): self.bad_message(message)
def on_getdata(self, message): self.bad_message(message)
def on_getblocks(self, message): self.bad_message(message)
def on_tx(self, message): self.bad_message(message)
def on_block(self, message): self.bad_message(message)
def on_getaddr(self, message): self.bad_message(message)
def on_headers(self, message): self.bad_message(message)
def on_getheaders(self, message): self.bad_message(message)
def on_ping(self, message): self.bad_message(message)
def on_mempool(self, message): self.bad_message(message)
def on_pong(self, message): self.bad_message(message)
def on_feefilter(self, message): self.bad_message(message)
def on_sendheaders(self, message): self.bad_message(message)
def on_sendcmpct(self, message): self.bad_message(message)
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
# Node that never sends a version. We'll use this to send a bunch of messages
# anyway, and eventually get disconnected.
class CNodeNoVersionBan(CLazyNode):
# send a bunch of veracks without sending a message. This should get us disconnected.
# NOTE: implementation-specific check here. Remove if bitcoinpaythroughd ban behavior changes
def on_open(self):
super().on_open()
for i in range(banscore):
self.send_message(msg_verack())
def on_reject(self, message): pass
# Node that never sends a version. This one just sits idle and hopes to receive
# any message (it shouldn't!)
class CNodeNoVersionIdle(CLazyNode):
def __init__(self):
super().__init__()
# Node that sends a version but not a verack.
class CNodeNoVerackIdle(CLazyNode):
def __init__(self):
self.version_received = False
super().__init__()
def on_reject(self, message): pass
def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, message):
self.version_received = True
self.send_message(msg_ping())
self.send_message(msg_getaddr())
class P2PLeakTest(BitcoinpaythroughTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-banscore=' + str(banscore)]]
def run_test(self):
no_version_bannode = self.nodes[0].add_p2p_connection(CNodeNoVersionBan(), send_version=False, wait_for_verack=False)
no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False, wait_for_verack=False)
no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle())
wait_until(lambda: no_version_bannode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock)
# Mine a block and make sure that it's not sent to the connected nodes
self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)
#Give the node enough time to possibly leak out a message
time.sleep(5)
#This node should have been banned
assert not no_version_bannode.is_connected
self.nodes[0].disconnect_p2ps()
# Wait until all connections are closed
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0)
# Make sure no unexpected messages came in
assert no_version_bannode.unexpected_msg == False
assert no_version_idlenode.unexpected_msg == False
assert no_verack_idlenode.unexpected_msg == False
if __name__ == '__main__':
P2PLeakTest().main()
|
[
"bitcoinpaythrough@gmail.com"
] |
bitcoinpaythrough@gmail.com
|
42cc5b26ee8c1863e26fc185667574ef48c9737f
|
4208b2260aac638af6169872c87e923195f0dd11
|
/piopencvsandbox/motion_detector.py
|
50de871a827c73202fafbdd551baa1d0e6d188ae
|
[] |
no_license
|
cjore/pibox
|
be85b9e996b78392c20f267016fdd8349fe68583
|
62acdd791b1fb0bbcf209ad825e69f3d3ec1a64b
|
refs/heads/master
| 2020-12-29T02:32:33.165913
| 2017-04-06T20:22:28
| 2017-04-06T20:22:28
| 49,876,246
| 0
| 0
| null | 2016-01-19T20:45:57
| 2016-01-18T12:44:59
| null |
UTF-8
|
Python
| false
| false
| 2,897
|
py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Import the necessary packages
import argparse
import datetime
import imutils
from imutils.video import FPS
import time
import cv2
# Construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video", help="path to the video file")
ap.add_argument("-a", "--min-area", type=int, default=500, help="minimum area size")
args = vars(ap.parse_args())
# If the video argument is None, then we are reading from webcam
if args.get("video", None) is None:
camera = cv2.VideoCapture(0)
time.sleep(0.25)
# Otherwise, we are reading from a video file
else:
camera = cv2.VideoCapture(args["video"])
# Initialize the first fram in the video stream
firstFrame = None
fps=FPS().start()
# Loop over the frames of the video
while True :
# grab the current frame ans initialize the occupied/unoccupied test
(grabbed, frame) = camera.read()
text = "Unocoppied"
# if the frame could not be grabbed, then we have reached the end of the video
if not grabbed:
break
# resize the frame, convert it to greyscale, and blur it
frame = imutils.resize(frame, width=500)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21,21), 0)
# if the first frame is None, initialize it
if firstFrame is None:
firstFrame = gray
continue
# compute the absolute difference between the current frame and first frame
frameDelta = cv2.absdiff(firstFrame, gray)
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
# dilate the thresolded image to fill in holes, then find contours on thresholded image
thresh = cv2.dilate(thresh, None, iterations=2)
(_, cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < args["min_area"]:
continue
# compute the bounding box for the contour, draw it on the frame, and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
text = "Occupied"
# draw the text and timestamp on the frame
cv2.putText(frame, "Room Status: {}".format(text), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
fps.update()
# show the frameand record if the user presses a key
cv2.imshow("Security Feed", frame)
cv2.imshow("Thresh", thresh)
cv2.imshow("Frame Delta", frameDelta)
key = cv2.waitKey(1) & 0xFF
# if the 'q' key is pressed, break from the loop
if key == ord("q"):
break
fps.stop()
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
# Cleanuo the camera and close any open windows
camera.release()
cv2.destroyAllWindows()
|
[
"chris.jore@gmail.com"
] |
chris.jore@gmail.com
|
a78cee9826237d0c0568190586b4b45200cc39e1
|
a49bab6a5a7df245c0eea181cd041ae344cc018e
|
/server/settings/dev.py
|
3c192fb5410716bedffcfd9d31060f4382539528
|
[] |
no_license
|
ego/pyblog
|
0a52d2cd2835cd9a3607b55075ad985b960c8b40
|
28b6a9066a60b71bbec9f7b37ef40794e8b5e2aa
|
refs/heads/master
| 2023-04-02T16:37:00.922916
| 2020-11-07T23:58:55
| 2020-11-07T23:58:55
| 123,491,870
| 0
| 0
| null | 2021-04-16T20:36:49
| 2018-03-01T21:01:04
|
Python
|
UTF-8
|
Python
| false
| false
| 456
|
py
|
# Dev settings
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'blog_db',
'USER': 'blog_user',
'PASSWORD': 'blog_passwd',
'HOST': 'localhost',
'PORT': '5432',
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
THUMBNAIL_KVSTORE = 'sorl.thumbnail.kvstores.cached_db_kvstore.KVStore'
|
[
"0x7c48@gmail.com"
] |
0x7c48@gmail.com
|
cafb55372742d6d435053197b309733edbd35a30
|
aa257d7c9c99d66ec3723a145dbc672c688ca84a
|
/example
|
a90ef91fa95bd2484fbda6fd43f70b4be7614399
|
[
"MIT"
] |
permissive
|
unix-example-command/example
|
ae286828475193d7e8df57b4d76062fbfc53ccc7
|
def114584c09a3119793ea3efdf5491c80a4e858
|
refs/heads/master
| 2020-06-04T04:00:13.992711
| 2015-07-12T14:09:06
| 2015-07-12T14:09:06
| 33,952,491
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,849
|
#!/usr/bin/env python2.7
# Copyright (c) 2015 Sampo Raudaskoski, Samu Kallio
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os, sys, platform
import fcntl, termios, struct
import textwrap
import difflib
COPYRIGHT = "Copyright (c) 2015 Sampo Raudaskoski, Samu Kallio"
COLORS = { 'reset': "\033[0m",
'error': "\033[31m",
'matches': "\033[32m",
'related': "\033[33m",
'description': "\033[36m", }
MINCMDCOLS = 10 # minimum width for command line
MINDSCCOLS = 10 # minimum width for description
MINPADCOLS = 2 # minimum padding between command and description
g_colors = sys.stdout.isatty()
g_dbdirs = ["/usr/local/share/example", "/usr/share/example"]
def colorize(type, text):
if g_colors:
return "%s%s%s" % (COLORS[type], text, COLORS['reset'])
return text
def fdttycols(fd):
"""Find out terminal window width for an fd with a TTY."""
try:
data = fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')
return struct.unpack('hh', data)[1]
except:
return None
def ttycols():
"""Try to figure out our terminal width."""
cols = fdttycols(0) or fdttycols(1) or fdttycols(2)
if not cols:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cols = fdttycols(fd)
os.close(fd)
except:
pass
if not cols:
cols = os.environ.get('COLUMNS', 80)
return int(cols)
def dbsuggest(name):
"""Suggest close match commands from the database."""
names = []
for dbdir in g_dbdirs:
if not os.path.exists(dbdir):
continue
fnames = filter(lambda name: name.endswith(".txt"), os.listdir(dbdir))
names.extend([fname[:-4] for fname in fnames])
return difflib.get_close_matches(name, names)
def dbexists(name):
"""Check if a command exists in the database."""
for dbdir in g_dbdirs:
if os.path.exists("%s/%s.txt" % (dbdir, name)):
return True
return False
def dbload(name):
"""Load a command example database file."""
examples = []
tips = []
related = []
for dbdir in g_dbdirs:
filename = "%s/%s.txt" % (dbdir, name)
if os.path.exists(filename):
dbfile = open(filename, 'r')
break
state = 'start'
cmddesc = []
for line in dbfile:
line = line.strip()
if not line:
if state == 'cmddesc':
examples.append((cmd, "\n".join(cmddesc)))
cmddesc = []
state = 'start'
continue
if state == 'start':
if line in ("# tips", "#tips"):
state = 'tips'
elif line in ("# related", "#related"):
state = 'related'
else:
state = 'cmddesc'
cmd = line
elif state == 'cmddesc':
cmddesc.append(line)
elif state == 'tips':
if line.startswith("- "):
line = line[2:]
tips.append(line)
elif state == 'related':
if line.startswith("- "):
line = line[2:]
related.append(line)
if state == 'cmddesc':
examples.append((cmd, "\n".join(cmddesc)))
return examples, tips, related
def error(msg, *args):
me = os.path.basename(sys.argv[0])
sys.stderr.write("%s: %s\n" % (me, msg % args))
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# only for linux for now
if platform.system() != "Linux":
print ("Example is currently only available for Linux. If you "
"wish to contribute examples for other systems, please see "
"https://github.com/unix-example-command")
sys.exit(254)
#
cmd = sys.argv[1] if len(sys.argv) >= 2 else "example"
grep = sys.argv[2:]
# check that the command exists in the database
if not dbexists(cmd):
error("unknown command: %s", colorize('error', cmd))
cmds = dbsuggest(cmd)
if cmds:
cmdlist = " ".join(cmds)
error("close matches: %s", colorize('matches', cmdlist))
sys.exit(1)
# load command entry
examples, tips, related = dbload(cmd)
if not examples:
error("dbfile for command %s is empty", colorize('error', cmd))
sys.exit(2)
# compute unconstrained column counts
cmdcols = max(map(lambda (cmd,_): len(cmd), examples))
padcols = 11
dsccols = max(map(lambda (_,dsc): len(dsc), examples))
# tune columns to fit terminal width
maxcols = ttycols()
while cmdcols + padcols + dsccols > maxcols:
if padcols > MINPADCOLS:
padcols = max(maxcols - cmdcols - dsccols, MINPADCOLS)
elif dsccols > MINDSCCOLS:
dsccols = max(maxcols - cmdcols - padcols, MINDSCCOLS)
elif cmdcols > MINCMDCOLS:
cmdcols = max(maxcols - dsccols - padcols, MINCMDCOLS)
else:
break
# print examples
fmt = "%%-%ds%%s" % (cmdcols + padcols)
for cmdline, dscline in examples:
if not all(map(lambda kw: kw in cmdline, grep)) and \
not all(map(lambda kw: kw in dscline, grep)):
continue
cmdlines = textwrap.wrap(cmdline, cmdcols)
dsclines = textwrap.wrap(dscline, dsccols)
for _cmdline, _dscline in map(None, cmdlines, dsclines):
_cmdline = _cmdline or ""
_dscline = colorize('description', _dscline or "")
print fmt % (_cmdline, _dscline)
# print related
if related:
text = " ".join(related)
print "\nsee also: %s" % colorize('related', text)
# print tips
if tips:
print "\n" + "\n".join(tips)
if cmd == "example":
print "\n%s" % COPYRIGHT
|
[
"samu@samukallio.net"
] |
samu@samukallio.net
|
|
056bd5bc3d264e0caaf44ba1301141d9a63a1857
|
4786222ae4c6302c26e63d0281c12bdf6b1c9420
|
/permute.py
|
2e3a66ff4bb1de27aaadbc0c7ed1549a13fa0136
|
[] |
no_license
|
Kaviprakash156/hunterset2
|
423da05270209e089150a9165ac9d89e7a31e46f
|
182790c3edc66123fab398e79778e37fce5ad0c3
|
refs/heads/master
| 2021-01-24T22:21:42.944448
| 2018-02-28T12:25:06
| 2018-02-28T12:25:06
| 123,279,510
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 320
|
py
|
def toString(List):
return ''.join(List)
def permute(a, l, r):
if l==r:
print (toString(a))
else:
for i in range(l,r+1):
a[l], a[i] = a[i], a[l]
permute(a, l+1, r)
a[l], a[i] = a[i], a[l]
string = "kavi"
n = len(string)
a = list(string)
permute(a, 0, n-1)
|
[
"noreply@github.com"
] |
noreply@github.com
|
54e34c038c98d02ef48734b8a1b6e55a21b24693
|
32128ef01b5ca9d3c634bdffd5a870be12f23a01
|
/main.py
|
446a2fe5b0a39de49c9331cd568b2a0ac7a477e9
|
[] |
no_license
|
geminiwayne/cloud_assignment
|
4efa2ed28800853ac28c92ed546aad6d94fcd361
|
f187e86d3bf4227b270db155ea192b02a3e31737
|
refs/heads/master
| 2021-01-20T01:52:35.861799
| 2017-05-14T16:04:27
| 2017-05-14T16:04:27
| 89,339,596
| 2
| 3
| null | 2018-02-03T04:01:27
| 2017-04-25T09:00:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,474
|
py
|
#################################
#Team 4 #
#Melbourne,Sydney,Brisbane,Perth#
#Dong Wang 773504 #
#Danping Zeng 777691 #
#Jia Zhen 732355 #
#Jinghan Liang 732329 #
#Sixue Yang 722804 #
#################################
max_volum=0
import tweepy
import config
import history_twitter
import new_twitter
import threading
import time
import sys
if __name__=="__main__":
file = sys.argv[1]
config.get_config(file)
max_volum=(int)(config.max_size)
max_history_tweet= max_volum*2/3
# twitter_stream.filter(track=config.streaming_topic,locations=config.bound,async=True)
# # to use thread to control two crawling fucntion
while(1):
myStreamListener =new_twitter.MyStreamListener()
new_twitter.get_max(max_volum)
twitter_stream = tweepy.Stream(auth = new_twitter.get_connection(), listener=myStreamListener)
try:
t1 = threading.Thread(target=history_twitter.tweeet_crawl(max_history_tweet))
t1.start()
t1.join()
except Exception as e:
time.sleep(5)
print ("Error: t1 thread stop",e)
try:
t2 = threading.Thread(target=twitter_stream.filter(locations=config.bound,async=True))
t2.start()
t2.join()
except Exception as e:
time.sleep(5)
print ("Error: t2 thread stop",e)
print ("congratulation! Data harvest finished!")
|
[
"wayne@127.0.0.1 my_computer.local"
] |
wayne@127.0.0.1 my_computer.local
|
5c9012668e6fd64b0cc6875fd32e3144b136c72f
|
1c538a3c3a0c218bab4137bcefe650fdfc8be252
|
/api_server/assign_resources.py
|
ae6b08bbdbf8fe0adec08e7da5694a0c4e525383
|
[] |
no_license
|
Team-LZZZ/CarPooling-Server
|
b0070a41c6eea51232179d25ba7552d57b41d7bc
|
658b9f82a81d2f6a0cae9d563c93a57f75b2809e
|
refs/heads/master
| 2021-08-29T11:48:39.094390
| 2017-12-13T21:42:47
| 2017-12-13T21:42:47
| 108,786,782
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 667
|
py
|
from . import api
from .api_resources.UserLogin import UserLogin
from .api_resources.UserSettings import UserSettings
from .api_resources.UserRegister import UserRegister
from .api_resources.CarPools import CarPools
from .api_resources.GetToken import GetToken
from .api_resources.Reservations import Reservations
from .api_resources.Offers import Offers
api.add_resource(UserLogin, "/api/login")
api.add_resource(UserRegister, "/api/reg")
api.add_resource(UserSettings, "/api/settings")
api.add_resource(GetToken, "/api/token")
api.add_resource(CarPools, "/api/carPools")
api.add_resource(Reservations, "/api/reservations")
api.add_resource(Offers, "/api/offers")
|
[
"zhouyou66666@gmail.com"
] |
zhouyou66666@gmail.com
|
22667e36536935585748b0c24fcc4a732a2b8384
|
d5e0347bc2f492afb969149113b494c20a030244
|
/basic/list.py
|
a380fe9b6d965c5b32418c285739e60e76c79a10
|
[] |
no_license
|
qingmingsang/python-demo
|
2eff3cd7452f690cf4e058a28e83099b2dc3a0c7
|
3c96659cd5d5de537e3eefdc42086cf36851f14a
|
refs/heads/master
| 2021-04-06T14:11:16.248901
| 2019-10-15T16:31:43
| 2019-10-15T16:31:43
| 125,257,307
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 357
|
py
|
classmates = ["Michael", "Bob", "Tracy"]
print(classmates)
print(len(classmates))
print(classmates[-1])
# print(classmates[6])
# IndexError: list index out of range
print(range(5))
# range(0, 5)
print(list(range(5)))
# [0, 1, 2, 3, 4]
sum = 0
for x in range(101):
sum = sum + x
print(sum)
# 5050
L = ["Bart", "Lisa", "Adam"]
for x in L:
print(x)
|
[
"358242939@qq.com"
] |
358242939@qq.com
|
bc3e0aae1bddeb212273308b0e470cd458c735c0
|
ccabc13a33099b751cbf02459237ff806fdf3037
|
/week3/BiDAF_tf2/layers/attention.py
|
606212184a2c37720d71ba002efb8827df0b2717
|
[] |
no_license
|
SoloPro-Git/MRC_learning
|
9007f4f1cac814925be063a2d7857114a856a212
|
2a22a96789e21c6bb5e9d6fd301a57a69a01ce93
|
refs/heads/master
| 2023-02-07T18:48:56.898289
| 2020-12-17T10:01:04
| 2020-12-17T10:01:04
| 313,486,346
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,915
|
py
|
import tensorflow as tf
class C2QAttention(tf.keras.layers.Layer):
def call(self, similarity, qencode):
# 1. 对qecncode进行扩展维度 :tf.expand_dims
qencode_exp = tf.expand_dims(qencode, axis=1)
# 2. softmax函数处理相似度矩阵:tf.keras.activations.softmax
similarity_softmax = tf.keras.activations.softmax(similarity, axis=1)
# 3. 对处理结果扩展维度:tf.expand_dims
similarity_softmax_exp = tf.expand_dims(similarity_softmax, axis=-1)
# 4. 加权求和:tf.math.reduce_sum
c2q_att = tf.reduce_max(tf.multiply(qencode_exp, similarity_softmax_exp), axis=2)
return c2q_att
class Q2CAttention(tf.keras.layers.Layer):
def call(self, similarity, cencode):
# 1.计算similarity矩阵最大值:tf.math.reduce_max
simi_max = tf.reduce_max(similarity, axis=2)
# 2.使用 softmax函数处理最大值的相似度矩阵:tf.keras.activations.softmax
simi_sfmax = tf.keras.activations.softmax(simi_max, axis=1)
# 3.维度处理:tf.expand_dims
simi_sfmax_exp = tf.expand_dims(simi_sfmax, axis=-1)
# 4.加权求和:tf.math.reduce_sum
simi_sum = tf.math.reduce_sum(tf.multiply(simi_sfmax_exp, cencode), axis=2)
# 5.再次维度处理加权求和后的结果:tf.expand_dims
simi_sum = tf.math.reduce_sum(tf.multiply(simi_sfmax_exp, cencode), axis=2)
# 6.获取重复的次数: cencode.shape[1]
# 7.重复拼接获取最终矩阵:tf.tile
simi_sum_exp = tf.expand_dims(simi_sum, axis=-2)
q2c_att = tf.tile(simi_sum_exp, (1, cencode.shape[1], 1))
return q2c_att
if __name__ == '__main__':
# T=5,J=8 ,2d=10
g1 = tf.random_uniform_initializer(minval=0)
simi = g1(shape=[2, 5, 8])
q = tf.ones(shape=(2, 8, 10))
att_layer = C2QAttention()
att_layer.call(simi, q)
|
[
"137033760@qq.com"
] |
137033760@qq.com
|
b5f9ff772b6e5342461769fd2d62a492d7654f0d
|
d1b9fc59b69ffbfe988ea308f9005df12b50d4cd
|
/downloader.py
|
93d08ab725bf40a751a2c400bc9e0325988b1be2
|
[] |
no_license
|
jakemuncada/xkcd-crawler
|
98b9287fe213dbcbf17c64d9433bcf641369db8e
|
7f0788585d41dfb13321290c654856de16123113
|
refs/heads/master
| 2023-03-28T02:55:03.221505
| 2021-04-01T00:32:17
| 2021-04-01T00:32:17
| 337,334,063
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,376
|
py
|
"""
Provides high-level functions for fetching stuff from the internet.
"""
import shutil
import requests
class DownloaderError(Exception):
"""
Errors related to the Downloader
"""
class Downloader:
"""
Provides high-level functions for fetching stuff from the internet.
"""
##################################################
# GET REQUEST
##################################################
@staticmethod
def get(url):
"""
Send a GET request to the given URL.
Parameters:
url: The URL.
Returns:
response: The requests library response.
error: The error that was generated. None if the request was successful.
"""
response = None
error = None
try:
# Fetch the data.
response = requests.get(url)
# Raise exception if any.
response.raise_for_status()
# If there were no exceptions, the download was successful.
except Exception as err: # pylint: disable=broad-except
error = err
return response, error
##################################################
# DOWNLOAD IMAGE
##################################################
@staticmethod
def downloadImage(url, outputPath):
"""
Download an image to the given path.
Parameters:
url (str): The image URL.
outputPath (str): The full path (including filename) of the image.
Returns:
An exception if the download failed. None if the download was a success.
"""
error = None
try:
response = requests.get(url, stream=True)
if response.status_code != 200:
raise DownloaderError(f'Error: Status code {response.status_code}')
with open(outputPath, 'wb') as outputFile:
shutil.copyfileobj(response.raw, outputFile)
del response
except Exception as err: # pylint: disable=broad-except
error = err
return error
##################################################
# GET ERROR STRING
##################################################
@staticmethod
def getErrorString(err):
"""
Get the description of the error.
Parameters:
err: The error.
Returns:
The description of the error.
"""
desc = None
try:
raise err
except requests.exceptions.HTTPError as err:
desc = 'An HTTP error occurred.'
except requests.exceptions.ProxyError as err:
desc = 'A proxy error occurred.'
except requests.exceptions.SSLError as err:
desc = 'An SSL error occurred.'
except requests.exceptions.ConnectTimeout as err:
desc = 'The request timed out while trying to connect to the remote server.'
except requests.exceptions.ReadTimeout as err:
desc = 'The server did not send any data in the allotted amount of time.'
except requests.exceptions.Timeout as err:
desc = 'The request timed out.'
except requests.exceptions.ConnectionError as err:
desc = 'A Connection error occurred.'
except requests.exceptions.URLRequired as err:
desc = 'A valid URL is required to make a request.'
except requests.exceptions.TooManyRedirects as err:
desc = 'Too many redirects.'
except requests.exceptions.MissingSchema as err:
desc = 'The URL schema (e.g. http or https) is missing.'
except requests.exceptions.InvalidSchema as err:
desc = 'The URL schema is invalid.'
except requests.exceptions.InvalidHeader as err:
desc = 'The header value provided was somehow invalid.'
except requests.exceptions.InvalidProxyURL as err:
desc = 'The proxy URL provided is invalid.'
except requests.exceptions.InvalidURL as err:
desc = 'The URL provided was somehow invalid.'
except Exception as err: # pylint: disable=broad-except
desc = 'An unexpected error occurred.'
return desc
|
[
"jake@tkb.mss.co.jp"
] |
jake@tkb.mss.co.jp
|
b60a557139a95f213d84afedb6641a8bf0a4412d
|
ccfd551d677652ba1cf716fd514fac454faefd51
|
/minify.py
|
9b534e07816dee71eaff7a652906e9e8bc30f3f4
|
[] |
no_license
|
miguel-acevedo/CssMinify
|
3b7ce385773eb15ec2167a1300945c5e20dfc5ec
|
10e1ce9f059277544c48fd4955923a270e67fd51
|
refs/heads/master
| 2021-01-15T11:03:27.273605
| 2017-08-07T19:18:05
| 2017-08-07T19:18:05
| 99,608,332
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,127
|
py
|
import os
import requests
css = 'https://cssminifier.com/raw' #Calls the minifier url
js = 'https://javascript-minifier.com/raw'
def minifyCode(url, path):
print(path)
data = {'input': open(path, 'rb').read()} # Packs the css file to a data variable to be sent off to the minifier.
response = requests.post(url, data=data) # Sends a Post requests, then retrieves the data.
wr = open(path, 'w') # Opens current local file.
wr.write(response.text) # Overwrites the local file.
def find_files(directory): #Function to recursivly loop through all files and minify.
for filename in os.listdir(directory):
path = os.path.join(directory, filename) # Creates path
if (os.path.isdir(path)): #Checks if the file is directory
find_files(path)
if filename.endswith(".css"): # Checks for css files
minifyCode(css, path)
continue
elif filename.endswith(".js"): # Checks for css files
minifyCode(js, path)
continue
else:
continue
find_files(os.getcwd()) #Calls the function with the current directory
|
[
"acevedomiguel@outlook.com"
] |
acevedomiguel@outlook.com
|
bcaf8c92849c381fc6a341c20a4a37be90d0e991
|
6b948d8110b910aed1989e940f194dcee5e3ba51
|
/538_HW1_113166835/model.py
|
5d4531881ef1d484979c3a06b653b5e6779f15a5
|
[] |
no_license
|
SriramVithala/NLP
|
3589e07b7e5348c5418723cb74f780d607cb8932
|
ca61b9a9923f816312070e9a5d7b4a98ba66ffd5
|
refs/heads/main
| 2023-08-18T10:48:37.557680
| 2021-09-27T03:08:06
| 2021-09-27T03:08:06
| 409,264,778
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,081
|
py
|
"""
author-gh: @adithya8
editor-gh: ykl7
"""
import math
import numpy as np
from numpy.core.fromnumeric import size
from numpy.core.records import array
import torch
import torch.nn as nn
sigmoid = lambda x: 1/(1 + torch.exp(-x))
class WordVec(nn.Module):
def __init__(self, V, embedding_dim, loss_func, counts):
super(WordVec, self).__init__()
self.center_embeddings = nn.Embedding(num_embeddings=V, embedding_dim=embedding_dim)
self.center_embeddings.weight.data.normal_(mean=0, std=1/math.sqrt(embedding_dim))
self.center_embeddings.weight.data[self.center_embeddings.weight.data<-1] = -1
self.center_embeddings.weight.data[self.center_embeddings.weight.data>1] = 1
self.context_embeddings = nn.Embedding(num_embeddings=V, embedding_dim=embedding_dim)
self.context_embeddings.weight.data.normal_(mean=0, std=1/math.sqrt(embedding_dim))
self.context_embeddings.weight.data[self.context_embeddings.weight.data<-1] = -1 + 1e-10
self.context_embeddings.weight.data[self.context_embeddings.weight.data>1] = 1 - 1e-10
self.loss_func = loss_func
self.counts = counts
def forward(self, center_word, context_word):
if self.loss_func == "nll":
return self.negative_log_likelihood_loss(center_word, context_word)
elif self.loss_func == "neg":
return self.negative_sampling(center_word, context_word)
else:
raise Exception("No implementation found for %s"%(self.loss_func))
def negative_log_likelihood_loss(self, center_word, context_word):
### TODO(students): start
# import pdb; pdb.set_trace()
center_embeds = self.center_embeddings(center_word)
context_embeds = self.context_embeddings(context_word)
MatrixMultiplication = torch.matmul(center_embeds , context_embeds.T)
totalSum=torch.sum(torch.exp(MatrixMultiplication), dim=1)
logofsum=torch.log(totalSum)
MatrixMultiplication1=(torch.multiply(center_embeds, context_embeds))
totalsum1=torch.sum(MatrixMultiplication1, dim=1)
# # torch.exp()
loss=torch.mean(logofsum-totalsum1)
### TODO(students): end
# loss=0
return loss
def negative_sampling(self, center_word, context_word):
### TODO(students): start
center_embeds = self.center_embeddings(center_word)
context_embeds = self.context_embeddings(context_word)
batch_size=center_word.size()[0]
probability=(self.counts)/sum(self.counts)
k=5
neg= np.random.choice(len(self.counts),(batch_size,k), replace=False, p=probability)
Negative_embeds=self.context_embeddings.weight[neg]
sum1 = torch.log(torch.sigmoid(torch.sum(torch.multiply(center_embeds , context_embeds),dim=1)))
center_embeds = center_embeds.reshape((center_embeds.shape[0], center_embeds.shape[1], 1))
sum2 = torch.sum(torch.log(torch.sigmoid(torch.sum(-torch.matmul(Negative_embeds, center_embeds),dim=2))),dim=1)
loss=torch.mean(-sum2-sum1)
### TODO(students): end
return loss
def print_closest(self, validation_words, reverse_dictionary, top_k=8):
print('Printing closest words')
embeddings = torch.zeros(self.center_embeddings.weight.shape).copy_(self.center_embeddings.weight)
embeddings = embeddings.data.cpu().numpy()
validation_ids = validation_words
norm = np.sqrt(np.sum(np.square(embeddings),axis=1,keepdims=True))
normalized_embeddings = embeddings/norm
validation_embeddings = normalized_embeddings[validation_ids]
similarity = np.matmul(validation_embeddings, normalized_embeddings.T)
for i in range(len(validation_ids)):
word = reverse_dictionary[validation_words[i]]
nearest = (-similarity[i, :]).argsort()[1:top_k+1]
print(word, [reverse_dictionary[nearest[k]] for k in range(top_k)])
|
[
"noreply@github.com"
] |
noreply@github.com
|
6de3f25bbada06daec113eef9872b1c48f03e8b2
|
ac46ba236c54af834ef1d37870fd6147b9b606e3
|
/lesson_6/insert.py
|
63defec9dcb5a5177ee284cd5c3690357e20fb56
|
[] |
no_license
|
Loosper/algorithms
|
55c929022501a32a65ef94a3b98e3bbb0c152eaa
|
7c435a5c2ef50f59b292d907854e86c07698b4c3
|
refs/heads/master
| 2021-05-15T09:30:27.632163
| 2018-02-10T15:59:49
| 2018-02-10T15:59:49
| 108,136,059
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 625
|
py
|
"""
Insert Node at a specific position in a linked list
head input could be None as well for empty list
Node is defined as
class Node(object):
def __init__(self, data=None, next_node=None):
self.data = data
self.next = next_node
return back the head of the linked list in the below method.
"""
class Node:
pass
def InsertNth(head, data, position):
legit_head = head
for i in range(position - 1):
head = head.next
if position == 0:
legit_head = Node(data, legit_head)
else:
next = head.next
head.next = Node(data, next)
return legit_head
|
[
"boian4o1@gmail.com"
] |
boian4o1@gmail.com
|
1821540a5a6f992cc99d9431e91eda6bcd4b6a05
|
c8ef42b8fa355d62bb6c06c61c54baf00e4c4c90
|
/KDD99/kddMultiClass.py
|
a8c5c7192bf7ad6d92d28446d80cef682697cb86
|
[] |
no_license
|
Gci04/LightweightAutoencoderApproachForAnomalyDetection
|
0d90517bcb34765a686be6e30aea3f5a4a529d73
|
a474218b786e45448bde36ef5cb549cae82a326a
|
refs/heads/master
| 2022-04-27T04:12:20.376461
| 2020-04-07T12:24:46
| 2020-04-07T12:24:46
| 187,182,098
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,877
|
py
|
import numpy as np
import pandas as pd
np.random.seed(43)
import os, sys, keras, pickle, warnings
from scipy import stats
from time import time
import tensorflow as tf
from keras.layers import Input, Dense, Dropout
from keras.models import Model
from keras.callbacks import TensorBoard
from keras import optimizers, regularizers, backend as K
warnings.filterwarnings('ignore')
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import classification_report
import seaborn as sn
from matplotlib import pyplot as plt
from preprocessing import get_kdd_data
import Utils
train ,test ,indx = get_kdd_data("multiclass")
train_label = train.label
train = train.drop(["label"],axis=1)
Scaler = StandardScaler()
train = Scaler.fit_transform(train.values)[np.where(train_label == 1)]
xtest , ytest = Scaler.transform(test.drop(["label"],axis=1)), test.label.values
def fit_kdd_AE(X):
input_dim = X.shape[1]
latent_space_size = 12
K.clear_session()
input_ = Input(shape = (input_dim, ))
layer_1 = Dense(100, activation="tanh")(input_)
layer_2 = Dense(50, activation="tanh")(layer_1)
layer_3 = Dense(25, activation="tanh")(layer_2)
encoding = Dense(latent_space_size,activation=None)(layer_3)
layer_5 = Dense(25, activation="tanh")(encoding)
layer_6 = Dense(50, activation="tanh")(layer_5)
layer_7 = Dense(100, activation='tanh')(layer_6)
decoded = Dense(input_dim,activation=None)(layer_7)
autoencoder = Model(inputs=input_ , outputs=decoded)
# opt = optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
autoencoder.compile(metrics=['accuracy'],loss='mean_squared_error',optimizer="adam")
# autoencoder.summary()
#create TensorBoard
tb = TensorBoard(log_dir="kdd99logs/{}".format(time()),histogram_freq=0,write_graph=True,write_images=False)
# Fit autoencoder
autoencoder.fit(X, X,epochs=100,validation_split=0.1 ,batch_size=100,shuffle=False,verbose=0,callbacks=[tb])
return autoencoder
model = fit_kdd_AE(train)
losses = Utils.get_losses(model, train)
thresholds = Utils.confidence_intervals(losses,0.95)
threshold = thresholds[1]
pred = Utils.predictAnomaly(model,xtest,threshold)
true = np.where(ytest == "normal", 1,0)
Utils.performance(pred,true)
#1 : normal , 0 : Anomal
for key in indx.keys():
if(key != "normal"):
print('-'*35)
print(' '*18 + key)
print('-'*35)
temp = np.ones(len(pred))
mask = indx[key]
np.put(temp,mask,0)
temp_pred = np.ones(len(pred))
np.put(temp_pred,mask,pred[mask])
res = classification_report(temp,temp_pred,output_dict=True)["0.0"]
print("{:<12s}{:<12s}{:<12s}".format("precision", "recall" ,"f1-score"))
print("{:<12.2f} {:<12.2f} {:<12.2f}".format(res["precision"],res["recall"],res["f1-score"]))
print()
|
[
"gcinzoe04@gmail.com"
] |
gcinzoe04@gmail.com
|
f2c1ef7bf41e8e509ea29dba4ec11c8ad4e17a5e
|
0d90a0e3174c72aceb4107ca378c52afc7fa7f55
|
/sslproject/sslproject/settings.py
|
4041abe31b3cfa8da348a39382be9e75de4cdc36
|
[] |
no_license
|
alphaWizard/SSLproject
|
c555e2daffee539038558d483b5f0f44c6775cf3
|
c18543316484cb835b93c35c49bfb872de2d47c8
|
refs/heads/master
| 2021-08-22T03:45:54.702602
| 2017-11-29T05:44:47
| 2017-11-29T05:44:47
| 108,448,143
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,226
|
py
|
"""
Django settings for sslproject project.
Generated by 'django-admin startproject' using Django 1.11.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4jus(01i7s%6nr*b&((tj#i^qn#^of!^9!vb-*@8#bt7=e^i7-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'mywebsite',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sslproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sslproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
LOGIN_URL = 'mywebsite:login'
LOGOUT_URL = 'mywebsite:logout'
LOGIN_REDIRECT_URL = 'mywebsite:home'
|
[
"debangshubanerjee1997@gmail.com"
] |
debangshubanerjee1997@gmail.com
|
e82b311c44c264672396d4f6b68583127bf3dcc8
|
b700c8cfd4033be5a3081f5af94e8a65796dd04a
|
/plots.py
|
0c4542ad4c859ecee52ec5f03927bd5050e75b5e
|
[] |
no_license
|
ahriley/infall-times-gaia
|
149ca9fbb4d5be0b9547d6e2ef702c9ef64b450e
|
96764b33d1da2d40cc8893d7f43ca1f4a2323808
|
refs/heads/master
| 2021-06-01T22:35:03.219172
| 2018-07-12T16:15:10
| 2018-07-12T16:15:10
| 136,056,587
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,159
|
py
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from utils import *
from astropy.cosmology import WMAP7
# Rocha plots for ELVIS
r = np.array([])
for sim in list_of_sims('elvis'):
halos, subs = load_elvis(sim=sim, processed=True)
r = np.append(r, subs.r)
max_r = np.max(r)
bind, z, r, v_r = [np.array([]) for i in range(4)]
for sim in list_of_sims('elvis'):
if sim[0] != 'i' or 'HiRes' in sim:
continue
try:
halos, subs = load_elvis(sim=sim, processed=True)
subs = subs[subs.nadler2018 > 0.5]
pot = subs.pot_mltr
except AttributeError:
print(sim+" not included")
continue
bind_sim = -pot - 0.5*(subs.v_r.values**2 + subs.v_t.values**2)
bind = np.append(bind, bind_sim)
z = np.append(z, WMAP7.lookback_time(1/subs.a_acc.values - 1))
r = np.append(r, subs.r)
v_r = np.append(v_r, subs.v_r)
"""
plt.scatter(WMAP7.lookback_time(1/subs.a_acc.values - 1)[bind_sim>0], np.log10(bind_sim[bind_sim>0]), s=2.0, c=subs.r[bind_sim>0], cmap='plasma', vmin=0.0, vmax=max_r)
plt.colorbar().set_label(r'Galactocentric Radius [$kpc$]')
plt.xlim(0.0, WMAP7.lookback_time(np.inf).value)
plt.ylim(3.4,5.2)
plt.yticks([3.5,4.0,4.5,5.0])
plt.xlabel(r'Infall time [$Gyr$]')
plt.ylabel(r'log(Binding Energy) [$km^2\ s^{-2}$]');
plt.savefig('figures/eachvolume/rocha_fig1_'+sim+'.png', bbox_inches='tight')
plt.close()
plt.scatter(subs.r[bind_sim>0], subs.v_r[bind_sim>0], s=2.0, c=WMAP7.lookback_time(1/subs.a_acc.values - 1)[bind_sim>0], cmap='plasma')
plt.colorbar().set_label(r'Infall time [$Gyr$]')
plt.xlabel(r'Galactocentric Radius [$kpc$]')
plt.ylabel(r'Radial Velocity [$km/s$]')
plt.savefig('figures/eachvolume/rocha_fig3_'+sim+'.png', bbox_inches='tight')
plt.close()
"""
plt.scatter(z[(bind>0)], np.log10(bind[bind>0]), c=r[bind>0], s=2., cmap='plasma')
plt.ylim(2.5,5.2)
plt.colorbar().set_label(r'Galactocentric Radius [$kpc$]')
plt.xlabel(r'Infall time [$Gyr$]')
plt.ylabel(r'log(Binding Energy) [$km^2\ s^{-2}$]');
plt.savefig('figures/isolated.png', bbox_inches='tight')
"""
# plot for single halo
halos, subs = load_vl2(scale=1.0, processed=True)
# subs = subs[subs.nadler2018 > 0.5]
z = WMAP7.lookback_time(1/subs.a_acc.values - 1)
# z = subs.a_acc.values
r = subs.r.values
v_r = subs.v_r
bind = -subs.pot_mltr_1000.values - 0.5*(subs.v_r.values**2 + subs.v_t.values**2)
plt.scatter(z[bind>0], np.log10(bind[bind>0]), s=10.0, c=r[bind>0], cmap='plasma')
plt.colorbar().set_label(r'Galactocentric Radius [$kpc$]')
plt.title('VL2')
plt.ylim(3.4,5.2)
plt.yticks([3.5,4.0,4.5,5.0])
plt.xlabel(r'Infall time [$Gyr$]')
plt.ylabel(r'log(Binding Energy) [$km^2\ s^{-2}$]');
# plt.savefig('figures/rocha_fig1_iScylla_HiRes.png', bbox_inches='tight')
plt.close()
plt.scatter(r[bind>0], v_r[bind>0], s=2.0, c=z[bind>0], cmap='plasma')
plt.colorbar().set_label(r'Infall time [$Gyr$]')
plt.xlabel(r'Galactocentric Radius [$kpc$]')
plt.ylabel(r'Radial Velocity [$km/s$]');
plt.title('iScylla_HiRes')
# plt.savefig('figures/rocha_fig3_iScylla_HiRes.png', bbox_inches='tight')
plt.close()
# """
|
[
"30327239+ahriley@users.noreply.github.com"
] |
30327239+ahriley@users.noreply.github.com
|
b4cef5c07bfaf8de55ea028da11403d750c273ac
|
90e2412b0216f27285a98f4ee713e8f819910a6f
|
/manage.py
|
b9cc74196e0c0be51010c293aecec323f3a66509
|
[] |
no_license
|
rsikri/LetsCarpool
|
504edbf1125c736a51896865b7e137ac8b9d3d82
|
60c6aa010d2308844fb168612201e6659f04fc37
|
refs/heads/master
| 2021-04-09T11:42:40.011302
| 2018-03-16T21:30:00
| 2018-03-16T21:30:00
| 125,552,630
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 256
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ShareYourRide.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"richasikri@Richas-MacBook-Pro.local"
] |
richasikri@Richas-MacBook-Pro.local
|
5b3dbdd973981f53d7e2243cb5ad29122bae8999
|
6f2ee69b2b69877950335936f23f8d584e7711af
|
/src/implementations/helpers/partition.py
|
a2a6bd9976b416318124b09e58107623ac1a9881
|
[
"MIT"
] |
permissive
|
wobedi/algorithms-and-data-structures
|
b5a8fdb27be53ba9fae2a93c9aaa949852bf5ce4
|
2d43ac66fd87881182aed65ec22e7016541e9315
|
refs/heads/master
| 2020-08-24T08:21:17.406623
| 2020-08-04T13:47:28
| 2020-08-04T13:47:28
| 216,792,563
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 841
|
py
|
def three_way_partition(arr: list, lower: int, upper: int) -> (int, int):
"""In-place partitioning of arr. Implements:
https://en.wikipedia.org/wiki/Dutch_national_flag_problem#The_array_case
"""
lt, gt, i = lower, upper, lower
# Performance could be improved by using smarter pivot, e.g. median
pivot = arr[lower]
while i <= gt:
if arr[i] < pivot:
arr[lt], arr[i] = arr[i], arr[lt]
lt, i = lt+1, i+1
elif arr[i] > pivot:
arr[gt], arr[i] = arr[i], arr[gt]
gt -= 1
else:
i += 1
return lt, gt
if __name__ == '__main__':
arr = [4, 5, 4, 4, 1, 8, 3, 2, 9, 6] # [1, 2, 3, 4, 4, 4, 5, 6, 7, 8, 9]
lt, gt = three_way_partition(arr, 0, len(arr) - 1)
print(f'lt: {lt}, gt: {gt}')
assert lt == 3
assert gt == 5
|
[
"yanick.steinbeck@gmail.com"
] |
yanick.steinbeck@gmail.com
|
5d85ec597bf50561c780343e1b57a17fe70cdec9
|
581c2beee0cf5656971987a19064524e3de7bc68
|
/distributions/lomax.py
|
6afe6b8af24109d2284426642b481f36d5ef196f
|
[
"MIT"
] |
permissive
|
bballamudi/survival
|
ac6925b30ba3ca9ed574ba056d36830f9129114f
|
c86186b08e7896096b9d59a5013335f56283a7c4
|
refs/heads/master
| 2020-03-21T14:16:29.578172
| 2018-06-09T21:09:08
| 2018-06-09T21:09:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,123
|
py
|
import numpy as np
from scipy.stats import lomax
from distributions.basemodel import *
class Lomax(Base):
'''
We can instantiate a Lomax distribution
(https://en.wikipedia.org/wiki/Lomax_distribution)
with this class.
'''
def __init__(self, k = None, lmb = None, ti = None, xi = None):
'''
Instantiate a Lomax distribution.
args:
k: The shape parameter of the Lomax distribution.
lmb: The scale parameter of the lomax distribution.
ti: The uncensored samples for fitting the distribution.
xi: The censored samples for fitting the distribution.
'''
if ti is not None:
self.train_org = ti
self.train_inorg = xi
self.newtonRh()
else:
self.train = []
self.test = []
self.train_org = []
self.train_inorg = []
self.k = k
self.lmb = lmb
self.params = [self.k, self.lmb]
def determine_params(self, k, lmb, params):
'''
Determines the parameters. Defined in basemodel.py
'''
return super(Lomax, self).determine_params(k, lmb, params)
def pdf(self,t,k=None,lmb=None,params=None):
'''
The probability distribution function (PDF) of the Lomax distribution.
args:
t: The value at which the PDF is to be calculated.
k: The shape parameter of the Lomax distribution.
lmb: The scale parameter of the lomax distribution.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return lmb*k/(1+lmb*t)**(k+1)
def cdf(self,t,k=None,lmb=None,params=None):
'''
The cumulative density functino of the Lomax distribution.
Probability that the distribution is lower than a certain value.
args:
t: The value at which CDF is to be calculated.
k: The shape parameter of the Lomax.
lmb: The sclae parameter of the Lomax.
params: A 2d array with the shape and scale parameters.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return 1-(1+lmb*t)**-k
def survival(self,t,k=None,lmb=None, params = None):
'''
The survival function for the Lomax distribution.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return (1+lmb*t)**-k
def logpdf(self,t,k,lmb):
'''
The logarithm of the PDF function. Handy for calculating log likelihood.
args:
t: The value at which function is to be calculated.
l: The shape parameter.
lmb: The scale parameter.
'''
return np.log(k) + np.log(lmb) - (k+1)*np.log(1+lmb*t)
def logsurvival(self,t,k,lmb):
'''
The logarithm of the survival function. Handy for calculating log likelihood.
args:
t: The value at which function is to be calculated.
l: The shape parameter.
lmb: The scale parameter.
'''
return -k*np.log(1+lmb*t)
def loglik(self,t,x,k=0.5,lmb=0.3):
'''
The logarithm of the likelihood function.
args:
t: The un-censored samples.
x: The censored samples.
l: The shape parameter.
lmb: The scale parameter.
'''
return sum(self.logpdf(t,k,lmb)) +sum(self.logsurvival(x,k,lmb))
def grad(self,t,x,k=0.5,lmb=0.3):
'''
The gradient of the log-likelihood function.
args:
t: The un-censored samples.
x: The censored samples.
l: The shape parameter.
lmb: The scale parameter.
'''
n = len(t)
m = len(x)
delk = n/k - sum(np.log(1+lmb*t)) - sum(np.log(1+lmb*x))
dellmb = n/lmb -(k+1) * sum(t/(1+lmb*t)) -k*sum(x/(1+lmb*x))
return np.array([delk,dellmb])
def numerical_grad(self,t,x,k=None,lmb=None):
'''
Calculates the gradient of the log-likelihood function numerically.
args:
t: The survival data.
x: The censored data.
k: The shape parameter.
lmb: The scale parameter.
'''
if k is None or lmb is None:
k = self.k
lmb = self.lmb
eps = 1e-5
delk = (self.loglik(t,x,k+eps,lmb) - self.loglik(t,x,k-eps,lmb))/2/eps
dellmb = (self.loglik(t,x,k,lmb+eps) - self.loglik(t,x,k,lmb-eps))/2/eps
return np.array([delk, dellmb])
def hessian(self,t,x,k=0.5,lmb=0.3):
'''
The hessian of the Loglikelihood function for Lomax.
args:
t: The un-censored samples.
x: The censored samples.
l: The shape parameter.
lmb: The scale parameter.
'''
n=len(t)
delksq = -n/k**2
dellmbsq = -n/lmb**2 + (k+1)*sum((t/(1+lmb*t))**2) + k*sum((x/(1+lmb*x))**2)
delklmb = -sum(t/(1+lmb*t)) - sum(x/(1+lmb*x))
hess = np.zeros([2,2])
hess[0,0] = delksq
hess[1,1] = dellmbsq
hess[0,1] = hess[1,0] = delklmb
return hess
def numerical_hessian(self,t,x,k=0.5,lmb=0.3):
'''
Calculates the hessian of the log-likelihood function numerically.
args:
t: The survival data.
x: The censored data.
k: The shape parameter.
lmb: The scale parameter.
'''
eps = 1e-4
delksq = (self.loglik(t,x,k+2*eps,lmb) + self.loglik(t,x,k-2*eps,lmb) - 2*self.loglik(t,x,k,lmb))/4/eps/eps
dellmbsq = (self.loglik(t,x,k,lmb+2*eps) + self.loglik(t,x,k,lmb-2*eps) - 2*self.loglik(t,x,k,lmb))/4/eps/eps
dellmbk = (self.loglik(t,x,k+eps,lmb+eps) + self.loglik(t,x,k-eps,lmb-eps)
- self.loglik(t,x,k+eps,lmb-eps) - self.loglik(t,x,k-eps,lmb+eps))/4/eps/eps
hess = np.zeros([2,2])
hess[0,0] = delksq
hess[1,1] = dellmbsq
hess[0,1] = hess[1,0] = dellmbk
return hess
def gradient_descent(self, numIter=2001, params = np.array([.5,.3]), verbose=False):
'''
Performs gradient descent to get the best fitting parameters for
this Lomax given the censored and un-censored data.
args:
numIter: The maximum number of iterations for the iterative method.
params: The initial guess for the shape and scale parameters respectively.
verbose: Set to true for debugging. Shows progress as it fits data.
'''
for i in range(numIter):
lik = self.loglik(self.train_org,self.train_inorg,params[0],params[1])
directn = self.grad(self.train_org,self.train_inorg,params[0],params[1])
params2 = params
for alp1 in [1e-8,1e-7,1e-5,1e-3,1e-2,.1]:
params1 = params + alp1 * directn
if(min(params1) > 0):
lik1 = self.loglik(self.train_org,self.train_inorg,params1[0],params1[1])
if(lik1 > lik and np.isfinite(lik1)):
lik = lik1
params2 = params1
params = params2
if i%100 == 0 and verbose:
print("Iteration " + str(i) + " ,objective function: " + str(lik) + " \nparams = " + str(params) + " \nGradient = " + str(directn))
print("\n########\n")
return params
'''
def newtonRh(self, numIter=101, params = np.array([.1,.1]), verbose=False):
"""
Fits the parameters of a Lomax distribution to data (censored and uncensored).
Uses the Newton Raphson method for explanation, see: https://www.youtube.com/watch?v=acsSIyDugP0
args:
numIter: The maximum number of iterations for the iterative method.
params: The initial guess for the shape and scale parameters respectively.
verbose: Set to true for debugging. Shows progress as it fits data.
"""
for i in range(numIter):
directn = self.grad(self.train_org,self.train_inorg,params[0],params[1])
if sum(abs(directn)) < 1e-5:
if verbose:
print("\nIt took: " + str(i) + " Iterations.\n Gradients - " + str(directn))
self.params = params
[self.k, self.lmb] = params
return params
lik = self.loglik(self.train_org,self.train_inorg,params[0],params[1])
step = np.linalg.solve(self.hessian(self.train_org,self.train_inorg,params[0],params[1]),directn)
params = params - step
if min(params) < 0:
print("Drastic measures")
params = params + step # undo the effect of taking the step.
params2 = params
for alp1 in [1e-8,1e-7,1e-5,1e-3,1e-2,.1,.5,1.0]:
params1 = params - alp1 * step
if(max(params1) > 0):
lik1 = self.loglik(self.train_org,self.train_inorg,params1[0],params1[1])
if(lik1 > lik and np.isfinite(lik1)):
lik = lik1
params2 = params1
scale = alp1
params = params2
if i % 10 == 0 and verbose:
print("Iteration " + str(i) + " ,objective function: " + str(lik) + " \nparams = " + str(params) + " \nGradient = " + str(directn) + "\n##\n\n")
[self.k, self.lmb] = params
self.params = params
return params
'''
def optimal_wait_threshold(self, intervention_cost, k=None, lmb=None):
'''
Gets the optimal time one should wait for a Lomax recovery before intervention.
args:
intervention_cost: The cost of intervening.
k: The shape parameter of this Lomax distribution.
lmb: The scale parameter of this Lomax distribution.
'''
if k is None or lmb is None:
k = self.k
lmb = self.lmb
return (intervention_cost*k - 1/lmb)
def expectedDT(self,tau,k,lmb,intervention_cost):
'''
The expected downtime incurred when the waiting threshold is set to an arbitrary value.
args:
tau: The value we should set for the intervention threshold.
k: The shape parameter of the current Lomax.
lmb: The scale parameter of the current Lomax.
intervention_cost: The cost of intervening.
'''
return 1/lmb/(k-1) - (1/lmb/(k-1) + tau*k/(k-1))*1/(1+lmb*tau)**k + (tau + intervention_cost)*1/(1+lmb*tau)**k
@staticmethod
def expectedDT_s(tau,k,lmb,intervention_cost):
'''
The expected downtime incurred when the waiting threshold is set to an arbitrary value (static version).
args:
tau: The value we should set for the intervention threshold.
k: The shape parameter of the current Lomax.
lmb: The scale parameter of the current Lomax.
intervention_cost: The cost of intervening.
'''
return 1/lmb/(k-1) - (1/lmb/(k-1) + tau*k/(k-1))*1/(1+lmb*tau)**k + (tau + intervention_cost)*1/(1+lmb*tau)**k
def expectedT(self,tau,k=None,lmb=None,params=None):
'''
The expected value of the Lomax conditional on it being less than tau.
args:
tau: Censor the Lomax here.
k: The shape parameter of the current Lomax.
lmb: The scale parameter of the current Lomax.
params: A 2-d array with shape and scale parameters.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return (1/lmb/(k-1) - (1/lmb/(k-1) + tau*k/(k-1))*1/(1+lmb*tau)**k)/(1-1/(1+lmb*tau)**k)
def samples(self, k=None, lmb=None, size=1000, params=None):
'''
Generates samples for the Lomax distribution.
args:
k: Shape of Lomax.
lmb: Scale of Lomax.
size: The number of simulations to be generated.
params: A 2-d array with shape and scale parameters.
'''
[k, lmb] = self.determine_params(k, lmb, params)
return lomax.rvs(c=k, scale=(1 / lmb), size=size)
@staticmethod
def samples_s(k, lmb, size = 1000):
return lomax.rvs(c=k, scale=(1 / lmb), size=size)
def kappafn_k(self,t,x,lmb=0.1):
n = len(t)
return n/(sum(np.log(1+lmb*t)) + sum(np.log(1+lmb*x)))
def kappafn_lmb(self,t,x,lmb=0.1):
n = len(t)
return (n/lmb - sum(t/(1+lmb*t)))/(sum(t/(1+lmb*t)) + sum(x/(1+lmb*x)))
def bisection_fn(self,lmb=0.1):
return self.kappafn_k(self.train_org,self.train_inorg,lmb) - self.kappafn_lmb(self.train_org,self.train_inorg,lmb)
def bisection(self,a=1e-6,b=2000):
n=1
while n < 10000:
c=(a+b)/2
if self.bisection_fn(c) == 0 or (b-a)/2 < 1e-6:
return c
n=n+1
if (self.bisection_fn(c) > 0) == (self.bisection_fn(a) > 0):
a=c
else:
b=c
|
[
"rohitpandey576@gmail.com"
] |
rohitpandey576@gmail.com
|
f1eda5ad518399f11f695d6ba4d08d977131db26
|
c72758161d4da978a0cc0c87d88535fc9ca58ba5
|
/inc/preparation/PrepareBaseRequest.py
|
162f602b36060eddbf619cace00aa35737e10375
|
[] |
no_license
|
Damian89/extended-baserequest-importer
|
5478878aefbfa90d34e8ac880b2e84532c0ef202
|
3f72e51546bb833720f014377284bd966db6275a
|
refs/heads/master
| 2020-04-27T09:03:34.575147
| 2019-08-02T06:32:22
| 2019-08-02T06:32:22
| 174,198,885
| 10
| 7
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,654
|
py
|
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# Author: Damian Schwyrz
from urllib.parse import urlparse
from inc.Headers import *
class PrepareBaseRequest:
def __init__(self, config):
self.config = config
self.tests = []
self.__create_request_data()
def __create_request_data(self):
for attacked_site in self.config.urls:
url = self.__make_url(attacked_site)
path = self.__get_path_and_query(url)
hostname = self.__get_host(url)
port = self.__get_port(url)
self.__add_test(url, hostname, port, path)
def __add_test(self, url, hostname, port, path):
headers = Headers(self.config)
headers.set("Host", hostname)
headers.add_user_defined_headers()
if self.config.cookies != "":
headers.set("Cookie", self.config.cookies)
headers.set("Referer", "{}".format(url))
headers.set("User-Agent", headers.get_random_user_agent())
headers.set("Content-Type", "text/html")
self.tests.append({
'url': url,
'port': port,
'method': 'GET',
'host': hostname,
'path': path,
'headers': headers.make(),
'body': '',
})
@staticmethod
def __make_url(attacked_site):
url = attacked_site
if not attacked_site.startswith("http"):
url = "http://{}/".format(attacked_site)
return url
@staticmethod
def __get_path_and_query(url):
parser = urlparse(url)
path = parser.path
query = parser.query
if query == "" or query is None:
return path
return "{}?{}".format(path, query)
@staticmethod
def __get_host(url):
parser = urlparse(url)
return parser.hostname
@staticmethod
def __get_port(url):
parser = urlparse(url)
return parser.port
|
[
"mail@damianschwyrz.de"
] |
mail@damianschwyrz.de
|
c8bf5f9685eb4c0e1013dff3da0cfca040acec35
|
4c0dd004f54979c87db0bcaff3f74490d82591a7
|
/plotBaseline.py
|
c27724ae1f21ccdfc3efb25eff309f8f8a56610d
|
[] |
no_license
|
albertpuente/newInterpDetect
|
3decf2a5459e82ba69ca4d754c869af3f1b3a94a
|
174b7ecd521f2918f742dafb7e1939357c55aa35
|
refs/heads/master
| 2021-01-17T21:39:25.174906
| 2016-07-21T11:17:31
| 2016-07-21T11:17:31
| 62,659,146
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,229
|
py
|
import matplotlib.pyplot as plt
V = []
theta = []
theta_b = []
with open('DEBUG_FIND_OUTPUT.txt', 'r') as f:
for l in f.readlines():
things = l.split(' ')
V.append(things[0])
theta.append(things[1])
theta_b.append(things[2])
plt.plot(V, 'r-')
plt.plot(theta, 'b--')
plt.plot(theta_b, 'r--')
plt.title('Interpolated voltage + boundaries')
plt.show()
##
Qdiff = []
vMovingAvg = []
vGlobalMovingAvg = []
baseline = []
variability = []
vGlobal = []
Qmin = []
with open('DEBUG_OUTPUT.txt', 'r') as f:
for l in f.readlines():
things = l.split(' ')
vMovingAvg.append(things[0])
vGlobalMovingAvg.append(things[1])
baseline.append(things[2])
variability.append(things[3])
Qdiff.append(things[4])
vGlobal.append(things[5])
Qmin.append(things[6])
plt.plot(vGlobalMovingAvg, 'r-')
plt.plot(vGlobal, 'b-')
plt.title('vGlobalMovingAvg and vGlobal')
plt.show()
plt.plot(baseline, 'r-')
plt.plot(vMovingAvg, 'b-')
plt.title('Baseline and vMovingAvg')
plt.show()
plt.plot(variability, 'r-')
plt.title('variability')
plt.show()
plt.plot(Qdiff, 'r-')
plt.title('Qdiff')
plt.show()
plt.plot(Qmin, 'r-')
plt.title('Qmin')
plt.show()
|
[
"albertpuente93@gmail.com"
] |
albertpuente93@gmail.com
|
66ec71bb988eb0d3f33c75d5c26df53404e2780b
|
6d77cf9932bf600ab89deae26b849221fdf88704
|
/Safe Marks/Interface/TeacherCommandLineInterface.py
|
0060d6cad337413098e7b4a29ad70bae1d24b7e9
|
[
"MIT"
] |
permissive
|
mriduldhall/Safe-Marks
|
4cecc89bcc16c83b64602cc686ad3371e0f14580
|
3ea657c842fe30f084e0d70633d4319073bc82f0
|
refs/heads/main
| 2023-07-05T05:06:38.270423
| 2021-08-16T22:53:43
| 2021-08-16T22:53:43
| 307,712,616
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,720
|
py
|
from HelperLibrary.Validator import Validator
from HelperLibrary.Student import Student
from HelperLibrary.StorageFunctions import StorageFunctions
from Interface.SettingsCommandLineInterface import CLI as SettingsCLI
from Interface.AccountCommandLineInterface import CLI as AccountCLI
from datetime import datetime
class LogoutMenuItem:
def __init__(self):
self.is_exit_initiated = False
def execute(self):
print("Logging out...")
self.is_exit_initiated = True
def exit_initiated(self):
return self.is_exit_initiated
class SettingsMenuItem:
def __init__(self, singleton):
self.singleton = singleton
self.is_exit_initiated = False
def execute(self):
user_deleted = SettingsCLI(self.singleton).initiate()
if user_deleted:
self.is_exit_initiated = True
def exit_initiated(self):
return self.is_exit_initiated
class YearEndMenuItem:
def __init__(self):
pass
def execute(self):
if Validator("year end").should_continue():
student_list = self.get_student_list()
self.increase_year(student_list)
@staticmethod
def get_student_list():
student_list = StorageFunctions("students").list("name")
return student_list
@staticmethod
def increase_year(student_list):
for student_name in student_list:
student = Student(student_name, None, None, None, None)
student.recreate_student()
if not student.leave_date:
if student.year_group != 13:
student.year_group += 1
student.student_controller.save_student_data(save_mark_sheet_data=False)
student.student_controller.create_mark_sheets()
else:
student.year_group = None
student.leave_date = datetime.now()
student.student_controller.save_student_data(save_mark_sheet_data=False)
@staticmethod
def exit_initiated():
return False
class ManageMenuItem:
def __init__(self, admin):
self.admin = admin
def execute(self):
if Validator("manage").should_continue():
work_on_new_student = True
while work_on_new_student:
message = Student(None, None, None, None, None).manage(self.admin)
print(message)
work_on_new_student = bool(int(input("Enter 1 to enter another name and work on another student or 0 to leave.")))
@staticmethod
def exit_initiated():
return False
class ManageAccountsMenuItem:
def __init__(self, singleton):
self.singleton = singleton
def execute(self):
AccountCLI(self.singleton).initiate()
@staticmethod
def exit_initiated():
return False
class CreateMenuItem:
def __init__(self, singleton):
self.singleton = singleton
def execute(self):
if Validator("create").should_continue():
continuation = True
while continuation is True:
menu_options = {
"1": self.new_student,
"2": self.old_student,
}
menu_choice = input("Enter 1 to create a new student or 2 to add an old student back(unarchive):")
if menu_choice in menu_options.keys():
message = menu_options[menu_choice]()
else:
message = "Invalid choice"
print(message)
continuation = bool(int(input("Enter 1 to create another student and 0 to head back to main menu.")))
def new_student(self):
student = self.getstudentdetails()
return student.create_new_student()
@staticmethod
def old_student():
return Student(None, None, None, None, None).create_old_student()
@staticmethod
def getstudentdetails():
valid = False
while not valid:
name = input("Enter student's name:").capitalize()
birth_year = int(input("Enter student's year of birth:"))
birth_month = int(input("Enter student's month of birth:"))
birth_date = int(input("Enter student's date of birth:"))
date_of_birth = datetime(birth_year, birth_month, birth_date)
address = input("Enter student's address:")
father_name = input("Enter student's father's name:")
mother_name = input("Enter student's mother's name:")
student = Student(name, date_of_birth, address, father_name, mother_name)
valid, message = student.student_controller.validate_student_details()
if message:
print(message)
return student
@staticmethod
def exit_initiated():
return False
class CLI:
def __init__(self, singleton):
self.main_menu_dictionary = {
"m": ManageMenuItem(singleton.admin),
"s": SettingsMenuItem(singleton),
"l": LogoutMenuItem()
}
self.admin_main_menu_dictionary = {
"c": CreateMenuItem(singleton),
"m": ManageMenuItem(singleton.admin),
"a": ManageAccountsMenuItem(singleton),
"y": YearEndMenuItem(),
"s": SettingsMenuItem(singleton),
"l": LogoutMenuItem()
}
self.disabled_main_menu_dictionary = {
"s": SettingsMenuItem(singleton),
"l": LogoutMenuItem(),
}
self.admin = singleton.admin
self.enabled = singleton.enabled
def initiate(self):
exit_initiated = False
while not exit_initiated:
if not self.enabled:
print("Your account has been marked disabled. Please contact an administrator to get this changed.")
choice = input("Enter s for settings and l to logout:").lower()
menu_item = self.disabled_main_menu_dictionary.get(choice)
elif not self.admin:
choice = input("Enter m to manage students and their mark sheets, s for settings and l to logout:").lower()
menu_item = self.main_menu_dictionary.get(choice)
else:
choice = input("Enter c to create new students, m to manage students and their mark sheets, a to manage accounts, y to change academic year, s for settings and l to logout:").lower()
menu_item = self.admin_main_menu_dictionary.get(choice)
if menu_item is None:
print("Please enter valid choice")
continue
menu_item.execute()
exit_initiated = menu_item.exit_initiated()
|
[
"mriduldhall1@gmail.com"
] |
mriduldhall1@gmail.com
|
a1e35aa2875921cf394a2c897190977063e15a94
|
849c3c4946c116e7a799d7555e70c310f5236435
|
/playground.py
|
fe9e014182df7020da2e83af988766b493d61416
|
[] |
no_license
|
gwendahartsoe/Graduation-Project
|
faa7e19540b4d381a6ef2e4fd5ae72b456f6091d
|
0b6db6c6913ac5ae2453d7d038192222a4eed629
|
refs/heads/master
| 2023-04-20T11:38:01.860316
| 2021-04-28T11:37:45
| 2021-04-28T11:37:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,488
|
py
|
# 导入模块
import numpy as np
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader, Dataset, TensorDataset
def get_kfold_data(k, i, X, y):
# 返回第 i+1 折 (i = 0 -> k-1) 交叉验证时所需要的训练和验证数据,X_train为训练集,X_valid为验证集
fold_size = X.shape[0] // k # 每份的个数:数据总条数/折数(组数)
val_start = i * fold_size
if i != k - 1:
val_end = (i + 1) * fold_size
X_valid, y_valid = X[val_start:val_end], y[val_start:val_end]
X_train = torch.cat((X[0:val_start], X[val_end:]), dim = 0)
y_train = torch.cat((y[0:val_start], y[val_end:]), dim = 0)
else: # 若是最后一折交叉验证
X_valid, y_valid = X[val_start:], y[val_start:] # 若不能整除,将多的case放在最后一折里
X_train = X[0:val_start]
y_train = y[0:val_start]
return X_train, y_train, X_valid,y_valid
# 创建一个数据集
# X = torch.rand(500, 100, 10)
# Y = torch.rand(500, 1)
# # X = X.view(X.size(0),X.size(1), X.size(2),1)
# m = nn.Conv1d(15,100,3)
# out = m(X)
# print(out)
# X.view()
x = [1,0,1,0,0]
y = [1,0,1,1,1]
print(x == y)
# x = torch.rand((2,2,3))
# y = torch.rand((2,2,3))
# print("x:",x)
# print("y:",y)
# print("dim=0:", torch.cat((x,y),dim=0).size())
# print("dim=1:", torch.cat((x,y), dim=1).size())
# print("dim=2:", torch.cat((x, y), dim=2).size())
|
[
"819156618@qq.com"
] |
819156618@qq.com
|
3123f9b6c63d2f4d24498b526b5adf3d5f85a175
|
99247d0562f3bdc1952ef3f8b2b7465247bde9eb
|
/users/views.py
|
61f199e43fd6b308f4bff2b029179ada5808d86f
|
[] |
no_license
|
Goryunova/yamdb_final
|
c2977ef6ec359adcac98538b3cf2d010ddbb0630
|
0950dc35ca454dfe44d92b90f40a26f66ccfa452
|
refs/heads/master
| 2023-07-26T19:04:09.091856
| 2021-09-11T13:19:19
| 2021-09-11T13:19:19
| 401,342,047
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,283
|
py
|
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from yamdb.models import User
from yamdb_auth.permissions import IsAdmin, IsAuthenticated
from .serializers import UsersSerializer
class UsersViewSet(ModelViewSet):
queryset = User.objects.all()
serializer_class = UsersSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = ['username']
lookup_field = 'username'
permission_classes = [IsAdmin]
@action(detail=False,
methods=['get', 'patch'],
permission_classes=[IsAuthenticated])
def me(self, request):
user = request.user
if request.method == 'GET':
serializer = UsersSerializer(user)
return Response(serializer.data, status=status.HTTP_200_OK)
serializer = self.get_serializer(user,
data=request.data,
partial=True)
serializer.is_valid(raise_exception=True)
serializer.save(role=user.role, partial=True)
return Response(serializer.data, status=status.HTTP_200_OK)
|
[
"bilka77@mail.ru"
] |
bilka77@mail.ru
|
a2f3badba22fbc90f72923fcc33d4c53fcd479b5
|
10ae0467d267b2abbcb0c622d151823d91841f65
|
/Demos and Tests/Student Work - 2010/Dakota.py
|
1484d643a5c742945839bca3462e6bce09c8238b
|
[] |
no_license
|
Panda3D-public-projects-archive/pandacamp
|
06b90f42796bf0734ba7932c927c43508ec381f3
|
1b8950589db10259e078edbdecade5c1b6beee2d
|
refs/heads/master
| 2022-04-27T01:12:46.132224
| 2015-03-13T23:00:19
| 2015-03-13T23:00:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,367
|
py
|
from Panda import *
def randomTetra(pics):
pic1 = shuffle(pics)
return tetra(pic1[0], pic1[1], pic1[2], pic1[3], v1 = P3(random11(), random11(), random11()), v2 = P3(random11(), random11(), random11()), v3 = P3(random11(), random11(), random11()), v4 = P3(random11(), random11(), random11()))
group = ["pics/duck1.jpg", "pics/eric1.jpg", "pics/mike1.jpg", "pics/mike2.jpg", "pics/mike3.jpg", "pics/raft1.jpg",
"pics/raft2.jpg", "pics/raft3.jpg", "pics/raft4.jpg", "pics/raft5.jpg", "pics/raft6.jpg", "pics/raft7.jpg",
"pics/raft8.jpg", "pics/raft9.jpg", "pics/rock1.jpg", "pics/rock2.jpg", "pics/rock3.jpg", "pics/rock4.jpg",
"pics/sea1.jpg", "pics/sea2.jpg", "pics/sea3.jpg", "pics/sea4.jpg"]
dak = ["pics/d1.jpg", "pics/d2.jpg", "pics/d3.jpg", "pics/d4.jpg", "pics/d.jpg"]
def randomTet(p):
r = randomTetra(group)
r.position = p
r.size = 1.5
r.hpr = integral(HPR(random11(), random11(), random11()))
for i in range(5):
for j in range(5):
randomTet(P3(i-2, random11()*2, j-2))
world.color = itime(at(color(0,.3, 0)) + to(3, color(0, .3, .3)) + to(3, color(.3, 0, 0)) + to(3, color(.3, 0. ,3)))
c = tags(dak, alarm(start = 2, step = 3))
def launchPhoto(m, f):
f = unitSquare(texture = f)
f.position = P3(-4 + localTime, -3, 0)
react(c, launchPhoto)
# name.hpr = HPR(time*3, 0, 0)
start()
|
[
"ProfessorJohnPeterson@gmail.com"
] |
ProfessorJohnPeterson@gmail.com
|
f3c78a164ef56f6a0ec9276cfd66d8ebe7bc2607
|
8dbc386a5ec3943ac64b35a4da34b70fbb988152
|
/src/handlers/me/articles/comments/reply/handler.py
|
af9a8487352380b5ead2b639f4a498b23f2a10df
|
[] |
no_license
|
AlisProject/serverless-application
|
60cab427d3088f1fa7b653ad7ee78674d64dd70c
|
8a3f9ed146f71281036986ec5baa481718768866
|
refs/heads/master
| 2023-03-30T06:48:47.426067
| 2023-03-28T13:09:24
| 2023-03-28T13:09:24
| 123,153,011
| 62
| 20
| null | 2023-03-28T13:09:18
| 2018-02-27T16:00:10
|
Python
|
UTF-8
|
Python
| false
| false
| 328
|
py
|
# -*- coding: utf-8 -*-
import boto3
from me_articles_comments_reply import MeArticlesCommentsReply
dynamodb = boto3.resource('dynamodb')
def lambda_handler(event, context):
me_articles_comments_reply = MeArticlesCommentsReply(event=event, context=context, dynamodb=dynamodb)
return me_articles_comments_reply.main()
|
[
"matsumatsu20@gmail.com"
] |
matsumatsu20@gmail.com
|
06615d6a0ed9cca545e0f513c0da0cc11049404f
|
bc3f5e9272b1142a104390871f86efb97202503b
|
/draiver/tests/DataStreamer/server3.py
|
758310cd263bfcaa6ce23410b294a73b0417ddb7
|
[] |
no_license
|
MarcoSignoretto/drAIver
|
dce358de22e1de03a7435d9388bdfabcf9584f2d
|
7a14b3973a2ca8b57f09eb3ee8a1fedaa5e1cadc
|
refs/heads/master
| 2021-04-06T12:25:40.688394
| 2018-06-16T10:38:06
| 2018-06-16T10:38:06
| 125,176,238
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,104
|
py
|
#!/usr/bin/python
import socket
import cv2
import numpy
def recvall(sock, count):
buf = b''
while count:
newbuf = sock.recv(count)
if not newbuf: return None
buf += newbuf
count -= len(newbuf)
return buf
# camera init
camera_left = cv2.VideoCapture()
camera_left.set(4, 640)
camera_left.set(5, 480)
camera_left.open(0)
# socket init
server_address = ('10.42.0.1', 10000)
sock=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(server_address)
sock.listen(True)
conn, addr = sock.accept()
while True:
length_left = recvall(conn,16)
stringData_left = recvall(conn, int(length_left))
data_left = numpy.fromstring(stringData_left, dtype='uint8')
decimg_left=cv2.imdecode(data_left,1)
length_right = recvall(conn, 16)
stringData_right = recvall(conn, int(length_right))
data_right = numpy.fromstring(stringData_right, dtype='uint8')
decimg_right = cv2.imdecode(data_right, 1)
cv2.imshow('SERVER_LEFT',decimg_left)
cv2.imshow('SERVER_RIGHT',decimg_right)
cv2.waitKey(1)
cv2.destroyAllWindows()
sock.close()
|
[
"marco.signoretto.dev@gmail.com"
] |
marco.signoretto.dev@gmail.com
|
b7a247ccaea8874f9bdfa9cd247def7e342ba18d
|
c970c3800ab7a33989d7fffaee3da63f72dac6ea
|
/api_trial2.py
|
c1fcd40999a337e958197b9b473f2f67fcd90672
|
[] |
no_license
|
smustala/DSCI551-Project
|
8053d16f2c7f53e8533277c34a1e1ab0444f353b
|
fe335785e0cb529656f064f059435fcd8f0753c0
|
refs/heads/master
| 2023-01-08T19:24:44.866301
| 2020-11-06T11:30:01
| 2020-11-06T11:30:01
| 310,578,443
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,321
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 6 02:10:06 2020
@author: shalinimustala
"""
import flask
from flask import request, jsonify
def tempbymonth1(month, year, countryname):
from mysql import connector
cnx = connector.connect(user='shalini1', password='hello', host='18.188.12.200', database='551project')
cursor = cnx.cursor()
query = 'select AverageTemperature, city from tempbycity where dt like ' + "'" + str(year) + '-' + str(month) + "-%'" + " and country = " + "'" + countryname + "'"
#query = "select AverageTemperature, city from tempbycity where dt like '2012-12-%' and country = 'India'"
#query = "select distinct city from tempbycity where country = 'India'"
temp_all = []
cursor.execute(query)
for row in cursor:
temp_all.append(row)
return temp_all
app = flask.Flask(__name__)
app.config["DEBUG"] = True
@app.route('/api/temp', methods=['GET'])
def api_filter():
query_parameters = request.args
countryname = query_parameters.get('countryname')
month = query_parameters.get('month')
year = query_parameters.get('year')
temp_all = tempbymonth1(month, year, countryname)
return jsonify(temp_all)
app.run()
#USE API : http://127.0.0.1:5000/api/temp?month=12&year=2012&countryname=India
|
[
"shalinimustala@Shalinis-MBP.attlocal.net"
] |
shalinimustala@Shalinis-MBP.attlocal.net
|
e79ce4977b6a49f90c9aaada2b5644b561275441
|
a2170a9fc6355dfd9a2a54e7b7f9de531dc74c9c
|
/pywhoisxml/exceptions.py
|
de667387dc2898d0ed0a1507dd5f50e4c2b7e9f6
|
[] |
no_license
|
VarthanV/pywhoisxml
|
82a826d0c23b9319e1fd75b58bab4bbb7602786e
|
47fb58ace086df5222fbef936fc71bbb4383d0d1
|
refs/heads/master
| 2022-05-27T03:30:46.806937
| 2020-05-01T03:32:07
| 2020-05-01T03:32:07
| 259,951,022
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 44
|
py
|
class PyWhoisException(Exception):
pass
|
[
"vichu@Vishnus-MacBook-Air.local"
] |
vichu@Vishnus-MacBook-Air.local
|
a86b793d90e6718bbb40e66343ea07b986f34a35
|
9e99a543ac84503729604c3fc967742f741f7d77
|
/todo-list.py
|
9a834c7f1182783a0a02aa028fd6d25d9a09ae5e
|
[] |
no_license
|
emreyeprem/python-class-object
|
17b512a9dfd0ae7a96f9903b6085f33c0e2af599
|
9da06cdc269265a0359dadaed3cc3056b821f6b4
|
refs/heads/master
| 2020-03-29T18:21:12.922481
| 2018-09-25T04:23:31
| 2018-09-25T04:23:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,710
|
py
|
#---------------------- First Try------------------------
# priority_list = []
# task_list = []
# task_and_priority_list = []
#
# while 1==1:
# task_title = input("Enter the task: ").lower()
# task_priority = int(input("Type a number from 0 to 5(0: least important -> 5: highly important): "))
# quit_program = input('Press q to quit or enter to add another task: ').lower()
# task_list.append(task_title)
# priority_list.append(task_priority)
# task_and_priority_list.append("{0} : {1}".format(task_priority,task_title))
# #print(task_and_priority_list)
# #print(task_list)
# final_list = sorted(task_and_priority_list, reverse = True)
# print(final_list)
# if quit_program == 'q':
# break
# -----------------Second try-------------------------------
task_list = {}
priority_list = []
class Task:
def __init__(self,name, priority):
self.name = name
self.priority = priority
task_list[self.name] = self.priority
print(task_list)
def remove(self):
for key in task_list:
i = input('Enter the task to remove: ')
if i in task_list:
removed = task_list.pop(i)
return removed
def sort(self):
sorted_list = sorted(task_list.items(), key=lambda kv: kv[1], reverse = True)
print(sorted_list)
while 1==1:
task = Task(input('Enter task name:'), int(input('Type a number from 0 to 5(0: least important -> 5: highly important): ')))
if input('Press q to quit or enter to add more task: ') == 'q':
break
# task.remove()
task.remove()
print(task_list)
task.sort()
#-------------------------------------------------
|
[
"emreakurek@MacBook-Air-2.local"
] |
emreakurek@MacBook-Air-2.local
|
806045494341c3fa1fb96aa5bd0843029bd4e3fc
|
14dcb10e4b0e85b7c95c2f186fe2d4093c853ea0
|
/pygmt/tests/test_grd2cpt.py
|
a1afa97312a022480419107814f227db552fb897
|
[
"BSD-3-Clause"
] |
permissive
|
xumi1993/pygmt
|
1f3f9a065544693bfbbdb688019f2988d12cdce2
|
a555ba705857aeb5e09046308b25574721fccf5f
|
refs/heads/master
| 2023-03-04T01:56:20.742803
| 2022-09-09T14:40:37
| 2022-09-09T14:40:37
| 213,913,126
| 1
| 0
|
BSD-3-Clause
| 2023-02-21T15:57:33
| 2019-10-09T12:30:33
|
Python
|
UTF-8
|
Python
| false
| false
| 1,902
|
py
|
"""
Tests for grd2cpt.
"""
import os
import pytest
from pygmt import Figure, grd2cpt
from pygmt.exceptions import GMTInvalidInput
from pygmt.helpers import GMTTempFile
from pygmt.helpers.testing import load_static_earth_relief
@pytest.fixture(scope="module", name="grid")
def fixture_grid():
"""
Load the grid data from the sample earth_relief file.
"""
return load_static_earth_relief()
@pytest.mark.mpl_image_compare
def test_grd2cpt(grid):
"""
Test creating a CPT with grd2cpt to create a CPT based off a grid input and
plot it with a color bar.
"""
fig = Figure()
fig.basemap(frame="a", projection="W0/15c", region="d")
grd2cpt(grid=grid)
fig.colorbar(frame="a")
return fig
def test_grd2cpt_blank_output(grid):
"""
Use incorrect setting by passing in blank file name to output parameter.
"""
with pytest.raises(GMTInvalidInput):
grd2cpt(grid=grid, output="")
def test_grd2cpt_invalid_output(grid):
"""
Use incorrect setting by passing in invalid type to output parameter.
"""
with pytest.raises(GMTInvalidInput):
grd2cpt(grid=grid, output=["some.cpt"])
def test_grd2cpt_output_to_cpt_file(grid):
"""
Save the generated static color palette table to a .cpt file.
"""
with GMTTempFile(suffix=".cpt") as cptfile:
grd2cpt(grid=grid, output=cptfile.name)
assert os.path.getsize(cptfile.name) > 0
def test_grd2cpt_unrecognized_data_type():
"""
Test that an error will be raised if an invalid data type is passed to
grid.
"""
with pytest.raises(GMTInvalidInput):
grd2cpt(grid=0)
def test_grd2cpt_categorical_and_cyclic(grid):
"""
Use incorrect setting by setting both categorical and cyclic to True.
"""
with pytest.raises(GMTInvalidInput):
grd2cpt(grid=grid, cmap="batlow", categorical=True, cyclic=True)
|
[
"noreply@github.com"
] |
noreply@github.com
|
952fd72ad5a8100025aa2e461084375532616b8e
|
677562bf6835be104204f32a6c9998d9a901f9fc
|
/from_scratch/detect_metadata/times.py
|
fefebd85201b58cd0821fa91c8c528a5f775d688
|
[] |
no_license
|
santokalayil/neural_network
|
3cb2f843430e9f35e017edcde83ba13212d0f5cf
|
f453856214d027f55afc5c861784dc693a9bf2c6
|
refs/heads/main
| 2023-06-12T01:53:43.588403
| 2021-07-02T08:30:20
| 2021-07-02T08:30:20
| 382,281,787
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 343
|
py
|
# import platform # to detect the operating system
import os
import time
def get_last_modified_time(path_to_file):
return os.path.getmtime(path_to_file) # time.ctime(os.path.getmtime(path_to_file))
def get_created_time(path_to_file):
return os.path.getctime(path_to_file) # time.ctime(os.path.getctime(path_to_file))
|
[
"49450970+santokalayil@users.noreply.github.com"
] |
49450970+santokalayil@users.noreply.github.com
|
3b937e27177d4b2213f47baa81c00973e7037be0
|
bc4910ecec94749697dbce5e7cf5093275411125
|
/src/generator/Cheetah/ErrorCatchers.py
|
500f2490d613628fe69f683fafa883f5d586e55d
|
[
"MIT"
] |
permissive
|
se210/tracy
|
7e73a6f0d64f355842b9a11035c3720b4d40fde5
|
232a42ce1aefcffa1f8544c89d60a16ebd897033
|
refs/heads/master
| 2021-01-09T20:55:03.241111
| 2013-12-15T23:34:36
| 2013-12-15T23:34:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,838
|
py
|
#!/usr/bin/env python
# $Id: ErrorCatchers.py,v 1.1 2006-09-06 09:50:08 skyostil Exp $
"""ErrorCatcher class for Cheetah Templates
Meta-Data
================================================================================
Author: Tavis Rudd <tavis@damnsimple.com>
Version: $Revision: 1.1 $
Start Date: 2001/08/01
Last Revision Date: $Date: 2006-09-06 09:50:08 $
"""
__author__ = "Tavis Rudd <tavis@damnsimple.com>"
__revision__ = "$Revision: 1.1 $"[11:-2]
import time
from Cheetah.NameMapper import NotFound
class Error(Exception):
pass
class ErrorCatcher:
_exceptionsToCatch = (NotFound,)
def __init__(self, templateObj):
pass
def exceptions(self):
return self._exceptionsToCatch
def warn(self, exc_val, code, rawCode, lineCol):
return rawCode
## make an alias
Echo = ErrorCatcher
class BigEcho(ErrorCatcher):
def warn(self, exc_val, code, rawCode, lineCol):
return "="*15 + "<" + rawCode + " could not be found>" + "="*15
class KeyError(ErrorCatcher):
def warn(self, exc_val, code, rawCode, lineCol):
raise KeyError("no '%s' in this Template Object's Search List" % rawCode)
class ListErrors(ErrorCatcher):
"""Accumulate a list of errors."""
_timeFormat = "%c"
def __init__(self, templateObj):
ErrorCatcher.__init__(self, templateObj)
self._errors = []
def warn(self, exc_val, code, rawCode, lineCol):
dict = locals().copy()
del dict['self']
dict['time'] = time.strftime(self._timeFormat,
time.localtime(time.time()))
self._errors.append(dict)
return rawCode
def listErrors(self):
"""Return the list of errors."""
return self._errors
|
[
"sami.kyostila@unrealvoodoo.org"
] |
sami.kyostila@unrealvoodoo.org
|
81e69967751cb96ce288091f2fc4ea2ff7d9ae79
|
837d7fad3a4317fbc45345652cb0b0cee8aa310d
|
/Autonomous/pwmtester.py
|
f62fbebb490b9489e870a606e11f66962d5ae6ec
|
[] |
no_license
|
IliasChekkori/BAUROV-Autonomous
|
0f0e17ece2a22e4929334fc2d08c177476c004f9
|
6bfe1e41c7328aad6ab9dffadfdbe193829e8213
|
refs/heads/master
| 2022-12-09T00:57:01.610265
| 2020-08-31T21:55:23
| 2020-08-31T21:55:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,149
|
py
|
# Import mavutil
from pymavlink import mavutil
# connection olusturma
master = mavutil.mavlink_connection(
'/dev/ttyACM0',
baud=115200)# Raspberry pi ile pixhawk'ın iletişim kurabilmesi için
# RC pwm değerlerini olusturuyoruz
def set_rc_channel_pwm(id, pwm=1500):
t, optional): Channel pwm value 1100-1900
"""
if id < 1:
print("Channel does not exist.")
return
#http://mavlink.org/messages/common#RC_CHANNELS_OVERRIDE
if id < 9:
rc_channel_values = [65535 for _ in range(8)]
rc_channel_values[id - 1] = pwm
master.mav.rc_channels_override_send(
master.target_component, # target_component
*rc_channel_values) # Rc channel listesi
deger= int(input("Deger Giriniz: ")) #pwm değeri
#1100 Maximum ileri geri
#1900 Maximum hızda ileri
#1500 = 0
pin= int(input("Channel giriniz: ")) #komutlari integer olarak giriniz, komutları buradan ogrenebilirsiniz https://www.ardusub.com/operators-manual/rc-input-and-output.html
count = 0
while (count < 10000):
set_rc_channel_pwm(pin, deger)
count = count + 1
|
[
"noreply@github.com"
] |
noreply@github.com
|
2294a459f1fdf255465e1c2a3b0a875e3b2dd9a2
|
0ea9136591fbd928716cd6c1159ffc8985de0242
|
/Mongo_Search/old scripts/SearchChannel.py
|
aaffb1ce5e6d28c4e55e6ffe9dc99ebc35c3cdbe
|
[] |
no_license
|
kaayem/Kym
|
0d8b1042fbd98772b58f294aecb122750d00b99b
|
8f99c20757c5505809cacbe5459ba6605ac93e98
|
refs/heads/master
| 2022-11-17T13:29:46.111333
| 2020-07-22T11:55:01
| 2020-07-22T11:55:01
| 277,321,585
| 0
| 0
| null | 2020-07-22T11:55:03
| 2020-07-05T14:45:47
|
Python
|
UTF-8
|
Python
| false
| false
| 1,478
|
py
|
#generate set up checks for python
# check all brands in a market
import pymongo
from pymongo import MongoClient
import pandas as pd
import numpy as np
import os
def db_connect():
#connecting to a DB in mongoDB
try:
if client.get_database(DB_NAME):
print("Connection Successful!")
return True
except:
print("No, Please check your connection!!!")
return False
def db_close():
print ("Connection Getting Closed")
client.close()
#con = pymongo.MongoClient("mongodb://127.0.0.1:27017/")
con = pymongo.MongoClient("mongodb://192.168.1.181:27017/")
#db = con['mm_pharma']
db = con['mm_dev']
coll = db['attributes']
print("Have we successfully connected to Mongo?")
db_connect()
print(" Please note this will need to be run in python 2")
CH = input("Please enter the channel you are looking at ")
mydoc4 = db.attributes.aggregate([{
'$project' :{'_id':0,'code':1,'source':1,'report':1,'channel':'$channel'}},
{'$unwind':'$channel'},
{'$project':{'_id':0,'code':1,'source':1,'report':1,'mstd' : '$channel.value', 'mraw' : '$channel.name'}},
{'$match':{ 'mstd': {'$in': [CH,]}}}])
doc5 = []
for x in mydoc4:
doc5.append(x)
df = pd.DataFrame(data =doc5)
index = ['code', 'source', 'report', 'mraw', 'mstd']
df = df.reindex(columns = index)
#print(df)
name= 'Channel finder for '+CH+'.csv'
df.to_csv(name)
print(" CSV of all metrics you are that are mapped has been outputted to", os.getcwd())
|
[
"kaayempatel@wessexinsights.com"
] |
kaayempatel@wessexinsights.com
|
7e0ddf1ed82eb7a872aeed25135c9b4572b437ac
|
a7b722424273b0b9ad00ca0088d129b464c9bb1b
|
/Back-end/store/admin.py
|
7d34d3b73fb34832d5bba84103412c4d01fae0ad
|
[] |
no_license
|
HackRx2-0/ps2_legit_geeks
|
df1f71dcf0b43ef750ceeb9ff6ccdae1b2ce4c9e
|
c19de4403c45fdd5ed132e5c17a8f22c6aea0f46
|
refs/heads/main
| 2023-06-23T02:12:44.494118
| 2021-07-24T09:50:30
| 2021-07-24T09:50:30
| 388,513,416
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 747
|
py
|
from django.contrib import admin
import nested_admin
from .models import Store, ShippingZone, ShippingMethod, BankAccount
class StoreAdmin(nested_admin.NestedModelAdmin):
model = Store
# inlines = [WholesaleProductVariantInline]
class ShippingMethodInlineAdmin(nested_admin.NestedTabularInline):
model = ShippingMethod
extra = 0
min_num = 1
class ShippingZoneAdmin(nested_admin.NestedModelAdmin):
inline = [ShippingMethodInlineAdmin]
class BankAccountAdmin(admin.ModelAdmin):
fields = ['store', 'holder_name', 'account_number', 'bank_name', 'ifsc', 'account_type']
admin.site.register(Store, StoreAdmin)
admin.site.register(ShippingZone, ShippingZoneAdmin)
admin.site.register(BankAccount, BankAccountAdmin)
|
[
"181210045@nitdelhi.ac.in"
] |
181210045@nitdelhi.ac.in
|
686c9615805e1a5fc4ed701965c783ef17b68d2e
|
3de6a297bdacd268c9aafadc37d0f53d1a77da76
|
/example/strategy_two_graphs.py
|
877f0c12157d9964cba360e559135a7547036ae7
|
[] |
no_license
|
HipGraph/GNNfam
|
bfe7ae9b3f120f7fd06174dc318c086bb4d8e0e0
|
7b6e2bc7f07402696b574625222979b8f57a1a24
|
refs/heads/master
| 2023-04-26T05:12:34.990943
| 2021-05-25T23:38:27
| 2021-05-25T23:38:27
| 357,778,089
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,150
|
py
|
#!/home/anuj/virtualenvforest/globalvenv/bin/python
import networkx as nx
def main():
basegraph = nx.Graph()
with open('graph.txt', 'r') as f:
for line in f.readlines():
a, b, c = line.strip().split()
basegraph.add_edge(int(a), int(b), weight=float(c))
#basegraph is the networkx graph generated from the original graph.txt file
for stepval in range(10, 101, 10):
step = stepval/100
all_edges = []
for node in basegraph.nodes():
edges = [
(k,v['weight']) for k,v in sorted(
basegraph[node].items(), reverse=True,
key=lambda item:item[1]['weight']
)
]
keepedges = edges[:int(len(edges)*step)]
for edge in keepedges:
all_edges.append(
"{} {} {}\n".format(node, edge[0], edge[1])
)
all_edges.append("{} {} {}\n".format(node,node, 1))
with open("strategy_two_graph_{}_percent.txt".format(stepval), "w") as f:
f.writelines(all_edges)
if __name__ == '__main__':
main()
|
[
"abgodase@iu.edu"
] |
abgodase@iu.edu
|
5e5690e04aa90942ebf8fe480955b7c04ff6e383
|
a78f5fd783acad55e97916114e3ccb0d817e8ae3
|
/2021/12/day12.py
|
99b2835aae66c8fe2c4262b65d9ad70308275469
|
[] |
no_license
|
fredrik-aschehoug/AdventOfCode
|
3853c1f947ebd0fbec6232674e258763a99775da
|
f44d15bf1167dcbbcb15c5a317f72e45ba560316
|
refs/heads/master
| 2023-06-22T23:04:40.670487
| 2023-06-13T11:21:08
| 2023-06-13T11:21:08
| 225,464,307
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 544
|
py
|
from utils import get_network
from PathFinder import PathFinder1, PathFinder2
def main():
with open("12/input.txt", encoding="UTF-8") as file:
lines = file.read().splitlines()
network = get_network(lines)
pathfinder = PathFinder1(network)
paths = pathfinder.get_distinct_paths()
print("Part 1: ", len(paths))
network = get_network(lines, part2=True)
pathfinder = PathFinder2(network)
paths = pathfinder.get_distinct_paths()
print("Part 2: ", len(paths))
if __name__ == "__main__":
main()
|
[
"15358786+fredrik-aschehoug@users.noreply.github.com"
] |
15358786+fredrik-aschehoug@users.noreply.github.com
|
c280ee3b854a4f6043932dbcd3aa1b31846f9e2c
|
47988c4d1e2c07cd2465da204890f481d59dbd4b
|
/src/tests/ftest/util/command_utils_base.py
|
e25429c501973d0d6e453644fdfcba2b0cf4b268
|
[
"BSD-2-Clause",
"BSD-2-Clause-Patent"
] |
permissive
|
dsikich/daos
|
974000a2e9a37c2edc994007f864ab69afe347e3
|
13385f8eb3209dfe9f63772a68a3bb8cadaf2e23
|
refs/heads/master
| 2022-07-07T05:46:07.074084
| 2022-06-29T13:01:52
| 2022-06-29T13:01:52
| 242,208,796
| 0
| 0
|
NOASSERTION
| 2021-12-07T21:17:27
| 2020-02-21T18:50:31
|
C
|
UTF-8
|
Python
| false
| false
| 26,663
|
py
|
#!/usr/bin/python
"""
(C) Copyright 2020-2022 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
from logging import getLogger
import os
import yaml
from exception_utils import CommandFailure
class BasicParameter():
"""A class for parameters whose values are read from a yaml file."""
def __init__(self, value, default=None, yaml_key=None):
"""Create a BasicParameter object.
Normal use includes assigning this object to an attribute name that
matches the test yaml file key used to assign its value. If the
variable name will conflict with another class attribute, e.g. self.log,
then the `yaml_key` argument can be used to define the test yaml file
key independently of the attribute name.
Args:
value (object): initial value for the parameter
default (object, optional): default value. Defaults to None.
yaml_key (str, optional): the yaml key name to use when finding the
value to assign from the test yaml file. Default is None which
will use the object's variable name as the yaml key.
"""
self._value = value if value is not None else default
self._default = default
self._yaml_key = yaml_key
self.log = getLogger(__name__)
# Flag used to indicate if a parameter value has or has not been updated
self.updated = True
def __str__(self):
"""Convert this BasicParameter into a string.
Returns:
str: the string version of the parameter's value
"""
return str(self.value) if self.value is not None else ""
@property
def value(self):
"""Get the value of this setting.
Returns:
object: value currently assigned to the setting
"""
return self._value
@value.setter
def value(self, item):
"""Set the value of this setting.
Args:
item (object): value to assign for the setting
"""
if item != self._value:
self._value = item
self.updated = True
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
if self._yaml_key is not None:
# Use the yaml key name instead of the variable name
name = self._yaml_key
if hasattr(test, "config") and test.config is not None:
self.value = test.config.get(name, path, self._default)
else:
self.value = test.params.get(name, path, self._default)
def update(self, value, name=None, append=False):
"""Update the value of the parameter.
Args:
value (object): value to assign
name (str, optional): name of the parameter which, if provided, is
used to display the update. Defaults to None.
append (bool, optional): append/extend/update the current list/dict
with the provided value. Defaults to False - override the
current value.
"""
if append and isinstance(self.value, list):
if isinstance(value, list):
# Add the new list of value to the existing list
self.value.extend(value)
else:
# Add the new value to the existing list
self.value.append(value)
self.updated = True
elif append and isinstance(self.value, dict):
# Update the dictionary with the new key/value pairs
self.value.update(value)
self.updated = True
else:
# Override the current value with the new value
self.value = value
if name is not None:
self.log.debug("Updated param %s => %s", name, self.value)
def update_default(self, value):
"""Update the BasicParameter default value.
Args:
value (object): new default value
"""
self._default = value
class FormattedParameter(BasicParameter):
# pylint: disable=too-few-public-methods
"""A class for test parameters whose values are read from a yaml file."""
def __init__(self, str_format, default=None, yaml_key=None):
"""Create a FormattedParameter object.
Normal use includes assigning this object to an attribute name that
matches the test yaml file key used to assign its value. If the
variable name will conflict with another class attribute, e.g. self.log,
then the `yaml_key` argument can be used to define the test yaml file
key independently of the attribute name.
Args:
str_format (str): format string used to convert the value into an
command line argument string
default (object): default value for the param
yaml_key (str, optional): alternative yaml key name to use when
assigning the value from a yaml file. Default is None which
will use the object's variable name as the yaml key.
"""
super().__init__(default, default)
self._str_format = str_format
self._yaml_key = yaml_key
def __str__(self):
"""Return a FormattedParameter object as a string.
Returns:
str: if defined, the parameter, otherwise an empty string
"""
parameter = ""
if isinstance(self._default, bool) and self.value:
parameter = self._str_format
elif not isinstance(self._default, bool) and self.value is not None:
if isinstance(self.value, dict):
parameter = " ".join([
self._str_format.format(
"{} \"{}\"".format(key, self.value[key]))
for key in self.value])
elif isinstance(self.value, (list, tuple)):
parameter = " ".join(
[self._str_format.format(value) for value in self.value])
else:
parameter = self._str_format.format(self.value)
return parameter
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file - not used
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
if self._yaml_key is not None:
# Use the yaml key name instead of the variable name
name = self._yaml_key
return super().get_yaml_value(name, test, path)
class LogParameter(FormattedParameter):
"""A class for a test log file parameter which is read from a yaml file."""
def __init__(self, directory, str_format, default=None):
"""Create a LogParameter object.
Args:
directory (str): fixed location for the log file name specified by
the yaml file
str_format (str): format string used to convert the value into an
command line argument string
default (object): default value for the param
"""
super().__init__(str_format, default)
self._directory = directory
self._add_directory()
def _add_directory(self):
"""Add the directory to the log file name assignment.
The initial value is restricted to just the log file name as the
location (directory) of the file is fixed. This method updates the
initial log file value (just the log file name) to include the directory
and name for the log file.
"""
if isinstance(self.value, str):
name = os.path.basename(self.value)
self.value = os.path.join(self._directory, name)
elif self.value is not None:
self.log.info(
"Warning: '%s' not added to '%s' due to incompatible type: %s",
self._directory, self.value, type(self.value))
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
super().get_yaml_value(name, test, path)
self._add_directory()
self.log.debug(" Added the directory: %s => %s", name, self.value)
def update(self, value, name=None, append=False):
"""Update the value of the parameter.
Args:
value (object): value to assign
name (str, optional): name of the parameter which, if provided, is
used to display the update. Defaults to None.
append (bool, optional): append/extend/update the current list/dict
with the provided value. Defaults to False - override the
current value.
"""
super().update(value, name, append)
self._add_directory()
self.log.debug(" Added the directory: %s => %s", name, self.value)
class ObjectWithParameters():
"""A class for an object with parameters."""
def __init__(self, namespace):
"""Create a ObjectWithParameters object.
Args:
namespace (str): yaml namespace (path to parameters)
"""
self.namespace = namespace
self.log = getLogger(__name__)
def get_attribute_names(self, attr_type=None):
"""Get a sorted list of the names of the attr_type attributes.
Args:
attr_type(object, optional): A single object type or tuple of
object types used to filter class attributes by their type.
Defaults to None.
Returns:
list: a list of class attribute names used to define parameters
"""
return [
name for name in sorted(self.__dict__.keys())
if attr_type is None or isinstance(getattr(self, name), attr_type)]
def get_param_names(self):
"""Get a sorted list of the names of the BasicParameter attributes.
Note: Override this method to change the order or inclusion of a
command parameter in the get_params() method.
Returns:
list: a list of class attribute names used to define parameters
"""
return self.get_attribute_names(BasicParameter)
def get_params(self, test):
"""Get values for all of the command params from the yaml file.
Sets each BasicParameter object's value to the yaml key that matches
the assigned name of the BasicParameter object in this class. For
example, the self.block_size.value will be set to the value in the yaml
file with the key 'block_size'.
If no key matches are found in the yaml file the BasicParameter object
will be set to its default value.
Args:
test (Test): avocado Test object
"""
for name in self.get_param_names():
getattr(self, name).get_yaml_value(name, test, self.namespace)
def update_params(self, **params):
"""Update each of provided parameter name and value pairs."""
for name, value in params.items():
try:
getattr(self, name).update(value, name)
except AttributeError as error:
raise CommandFailure("Unknown parameter: {}".format(name)) from error
class CommandWithParameters(ObjectWithParameters):
"""A class for command with parameters."""
def __init__(self, namespace, command, path=""):
"""Create a CommandWithParameters object.
Uses Avocado's utils.process module to run a command str provided.
Args:
namespace (str): yaml namespace (path to parameters)
command (str): string of the command to be executed.
path (str, optional): path to location of command binary file.
Defaults to "".
"""
super().__init__(namespace)
self._command = command
self._path = path
self._pre_command = None
@property
def command(self):
"""Get the command without its parameters."""
return self._command
@property
def command_path(self):
"""Get the path used for the command."""
return self._path
def __str__(self):
"""Return the command with all of its defined parameters as a string.
Returns:
str: the command with all the defined parameters
"""
# Join all the parameters that have been assigned a value with the
# command to create the command string
params = []
for name in self.get_str_param_names():
value = str(getattr(self, name))
if value != "":
params.append(value)
# Append the path to the command and prepend it with any other
# specified commands
command_list = [] if self._pre_command is None else [self._pre_command]
command_list.append(os.path.join(self._path, self._command))
# Return the command and its parameters
return " ".join(command_list + params)
def get_str_param_names(self):
"""Get a sorted list of the names of the command attributes.
Returns:
list: a list of class attribute names used to define parameters
for the command.
"""
return self.get_param_names()
class YamlParameters(ObjectWithParameters):
"""A class of parameters used to create a yaml file."""
def __init__(self, namespace, filename=None, title=None, other_params=None):
"""Create a YamlParameters object.
Args:
namespace (str): yaml namespace (path to parameters)
filename (str): the yaml file to generate with the parameters
title (str, optional): namespace under which to place the
parameters when creating the yaml file. Defaults to None.
other_params (YamlParameters, optional): yaml parameters to
include with these yaml parameters. Defaults to None.
"""
super().__init__(namespace)
self.filename = filename
self.title = title
self.other_params = other_params
def get_params(self, test):
"""Get values for the yaml parameters from the test yaml file.
Args:
test (Test): avocado Test object
"""
# Get the values for the yaml parameters defined by this class
super().get_params(test)
# Get the values for the yaml parameters defined by the other class
if self.other_params is not None:
self.other_params.get_params(test)
def get_yaml_data(self):
"""Convert the parameters into a dictionary to use to write a yaml file.
Returns:
dict: a dictionary of parameter name keys and values
"""
if (self.other_params is not None and
hasattr(self.other_params, "get_yaml_data")):
yaml_data = self.other_params.get_yaml_data()
else:
yaml_data = {}
for name in self.get_param_names():
value = getattr(self, name).value
if value is not None:
yaml_data[name] = value
return yaml_data if self.title is None else {self.title: yaml_data}
def is_yaml_data_updated(self):
"""Determine if any of the yaml file parameters have been updated.
Returns:
bool: whether or not a yaml file parameter has been updated
"""
yaml_data_updated = False
if (self.other_params is not None and
hasattr(self.other_params, "is_yaml_data_updated")):
yaml_data_updated = self.other_params.is_yaml_data_updated()
if not yaml_data_updated:
for name in self.get_param_names():
if getattr(self, name).updated:
yaml_data_updated = True
break
return yaml_data_updated
def reset_yaml_data_updated(self):
"""Reset each yaml file parameter updated state to False."""
if (self.other_params is not None and
hasattr(self.other_params, "reset_yaml_data_updated")):
self.other_params.reset_yaml_data_updated()
for name in self.get_param_names():
getattr(self, name).updated = False
def create_yaml(self, filename=None):
"""Create a yaml file from the parameter values.
A yaml file will only be created if at least one of its parameter values
have be updated (BasicParameter.updated = True).
Args:
filename (str, optional): the yaml file to generate with the
parameters. Defaults to None, which uses self.filename.
Raises:
CommandFailure: if there is an error creating the yaml file
Returns:
bool: whether or not an updated yaml file was created
"""
create_yaml = self.is_yaml_data_updated()
if create_yaml:
# Write a new yaml file if any of the parameters have been updated
if filename is None:
filename = self.filename
yaml_data = self.get_yaml_data()
self.log.info("Writing yaml configuration file %s", filename)
try:
with open(filename, 'w') as write_file:
yaml.dump(yaml_data, write_file, default_flow_style=False)
except Exception as error:
raise CommandFailure(
"Error writing the yaml file {}: {}".format(
filename, error)) from error
self.reset_yaml_data_updated()
return create_yaml
def set_value(self, name, value):
"""Set the value for a specified attribute name.
Args:
name (str): name of the attribute for which to set the value
value (object): the value to set
Returns:
bool: if the attribute name was found and the value was set
"""
status = False
setting = getattr(self, name, None)
if setting is not None and hasattr(setting, "update"):
setting.update(value, name)
status = True
elif setting is not None:
setattr(self, name, value)
self.log.debug("Updated param %s => %s", name, value)
status = True
elif self.other_params is not None:
status = self.other_params.set_value(name, value)
return status
def get_value(self, name):
"""Get the value of the specified attribute name.
Args:
name (str): name of the attribute from which to get the value
Returns:
object: the object's value referenced by the attribute name
"""
setting = getattr(self, name, None)
if setting is not None and hasattr(setting, "value"):
value = setting.value
elif setting is not None:
value = setting
elif self.other_params is not None:
value = self.other_params.get_value(name)
else:
value = None
return value
class TransportCredentials(YamlParameters):
"""Transport credentials listing certificates for secure communication."""
def __init__(self, namespace, title, log_dir):
"""Initialize a TransportConfig object.
Args:
namespace (str): yaml namespace (path to parameters)
title (str, optional): namespace under which to place the
parameters when creating the yaml file. Defaults to None.
"""
super().__init__(namespace, None, title)
default_insecure = str(os.environ.get("DAOS_INSECURE_MODE", True))
default_insecure = default_insecure.lower() == "true"
self.ca_cert = LogParameter(log_dir, None, "daosCA.crt")
self.allow_insecure = BasicParameter(None, default_insecure)
def get_yaml_data(self):
"""Convert the parameters into a dictionary to use to write a yaml file.
Returns:
dict: a dictionary of parameter name keys and values
"""
yaml_data = super().get_yaml_data()
# Convert the boolean value into a string
if self.title is not None:
yaml_data[self.title]["allow_insecure"] = self.allow_insecure.value
else:
yaml_data["allow_insecure"] = self.allow_insecure.value
return yaml_data
def get_certificate_data(self, name_list):
"""Get certificate data by name_list.
Args:
name_list (list): list of certificate attribute names.
Returns:
data (dict): a dictionary of parameter directory name keys and
value.
"""
data = {}
if not self.allow_insecure.value:
for name in name_list:
value = getattr(self, name).value
if isinstance(value, str):
dir_name, file_name = os.path.split(value)
if dir_name not in data:
data[dir_name] = [file_name]
else:
data[dir_name].append(file_name)
return data
class CommonConfig(YamlParameters):
"""Defines common daos_agent and daos_server configuration file parameters.
Includes:
- the daos system name (name)
- a list of access point nodes (access_points)
- the default port number (port)
- the transport credentials
"""
def __init__(self, name, transport):
"""Initialize a CommonConfig object.
Args:
name (str): default value for the name configuration parameter
transport (TransportCredentials): transport credentials
"""
super().__init__(
"/run/common_config/*", None, None, transport)
# Common configuration parameters
# - name: <str>, e.g. "daos_server"
# Name associated with the DAOS system.
#
# - access_points: <list>, e.g. ["hostname1:10001"]
# Hosts can be specified with or without port, default port below
# assumed if not specified. Defaults to the hostname of this node
# at port 10000 for local testing
#
# - port: <int>, e.g. 10001
# Default port number with with to bind the daos_server. This
# will also be used when connecting to access points if the list
# only contains host names.
#
self.name = BasicParameter(None, name)
self.access_points = BasicParameter(None, ["localhost"])
self.port = BasicParameter(None, 10001)
class EnvironmentVariables(dict):
"""Dictionary of environment variable keys and values."""
def copy(self):
"""Return a copy of this object.
Returns:
EnvironmentVariables: a copy of this object
"""
return EnvironmentVariables(self)
def get_list(self):
"""Get a list of environment variable assignments.
Returns:
list: a list of environment variable assignment (key=value) strings
"""
return [
key if value is None else "{}={}".format(key, value)
for key, value in list(self.items())
]
def get_export_str(self, separator=";"):
"""Get the command to export all of the environment variables.
Args:
separator (str, optional): export command separator.
Defaults to ";".
Returns:
str: a string of export commands for each environment variable
"""
export_list = ["export {}".format(export) for export in self.get_list()]
export_str = separator.join(export_list)
if export_str:
export_str = "".join([export_str, separator])
return export_str
class PositionalParameter(BasicParameter):
"""Parameter that defines position.
Used to support positional parameters for dmg and daos.
"""
def __init__(self, position, default=None):
"""Create a PositionalParameter object.
Args:
position (int): argument position/order
default (object, optional): default value for the param. Defaults to
None.
"""
super().__init__(default, default)
self._position = position
@property
def position(self):
"""Position property that defines the position of the parameter."""
return self._position
def __lt__(self, other):
return self.position < other.position
def __gt__(self, other):
return self.position > other.position
def __eq__(self, other):
return self.position == other.position
def __hash__(self):
"""Returns self.position as the hash of the class.
This is used in CommandWithPositionalParameters.get_attribute_names()
where we use this object as the key for a dictionary.
"""
return self.position
class CommandWithPositionalParameters(CommandWithParameters):
"""Command that uses positional parameters.
Used to support positional parameters for dmg and daos.
"""
def get_attribute_names(self, attr_type=None):
"""Get a sorted list of the names of the attr_type attributes.
The list has the ordered positional parameters first, then
non-positional parameters.
Args:
attr_type(object, optional): A single object type or tuple of
object types used to filter class attributes by their type.
Defaults to None.
Returns:
list: a list of class attribute names used to define parameters
"""
positional = {}
non_positional = []
for name in sorted(list(self.__dict__)):
attr = getattr(self, name)
if isinstance(attr, attr_type):
if hasattr(attr, "position"):
positional[attr] = name
else:
non_positional.append(name)
return [positional[key] for key in sorted(positional)] + non_positional
|
[
"noreply@github.com"
] |
noreply@github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.