text stringlengths 8 6.05M |
|---|
from ..FeatureExtractor import InterExtractor
from common_functions.plot_methods import plot_vs_frequencies
class pct_80_montecarlo_extractor(plot_vs_frequencies,InterExtractor):
""" picks the right montecarlo cruve according to the desired significance """
active = True
extname = 'pct_80_montecarlo' #extractor's name
percent_significance = 80
def extract(self):
self.spectra = self.fetch_extr('montecarlo')
spectr_index = self.calc_index()
if spectr_index == len(self.spectra):
self.ex_error("too high degree of certainty for this number of montecarlo iteration")
nth_spectrum = self.spectra[spectr_index]
return nth_spectrum
def calc_index(self):
" will calculate the correct index for the sorted array of spectra according to the desired percent significance "
how_many = self.spectra.shape[0]
sign = float(self.percent_significance)
ind_rough = sign/100.0 * how_many
ind = int(round(ind_rough))
return ind
class pct_90_montecarlo_extractor(pct_80_montecarlo_extractor):
""" picks the right montecarlo cruve according to the desired significance """
active = True
extname = 'pct_90_montecarlo' #extractor's name
percent_significance = 90
class pct_95_montecarlo_extractor(pct_80_montecarlo_extractor):
""" picks the right montecarlo cruve according to the desired significance """
active = True
extname = 'pct_95_montecarlo' #extractor's name
percent_significance = 95
class pct_99_montecarlo_extractor(pct_80_montecarlo_extractor):
""" picks the right montecarlo cruve according to the desired significance """
active = True
extname = 'pct_99_montecarlo' #extractor's name
percent_significance = 99
|
s,v=map(int,(input().split()))
a=0
for x in range(2,v):
if(s%x==0 and v%x==0):
a=x
print(a)
|
'''
Created on Nov 17, 2010
@author: Jason Huang
'''
import Marker
from datetime import datetime
class SaveMarker():
@staticmethod
def save(trip, type, description, latitude, longtidue):
tripMarker = Marker()
tripMarker.type = type
tripMarker.latitude = latitude
tripMarker.longtidue = longtidue
tripMarker.put()
trip.markerGroup.append(tripMarker.key())
trip.updateTime = datetime.now()
trip.put()
|
#!/usr/bin/python
from __future__ import division
import numpy as np
import pandas as pd
import random
import sys
import csv
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import classification_report
def count_frame(file):
data_frame = pd.read_csv(file)
data_frame_len = len(data_frame)
res_frame = data_frame[data_frame['class'] == data_frame['predicted1']]
count = len(res_frame)
acc = count / data_frame_len
print("Accuracy: " + str(acc))
def compute_mcc(file):
data_frame = pd.read_csv(file)
testarr = data_frame['class'].values
trainarr = data_frame['predicted1'].values
mcc = matthews_corrcoef(testarr, trainarr)
target_names = ['-1', '1']
print(classification_report(testarr,trainarr, target_names=target_names))
print('Correlation: ' +str(mcc))
compute_mcc(sys.argv[1])
count_frame(sys.argv[1])
|
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Alias',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('address', models.CharField(help_text='The alias address.', max_length=254, verbose_name='address')),
('enabled', models.BooleanField(default=True, help_text='Check to activate this alias', verbose_name='enabled')),
('internal', models.BooleanField(default=False)),
],
options={
'ordering': ['address'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='AliasRecipient',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('address', models.EmailField(max_length=75)),
('alias', models.ForeignKey(to='admin.Alias', on_delete=models.CASCADE)),
('r_alias', models.ForeignKey(related_name='alias_recipient_aliases', blank=True, to='admin.Alias', null=True, on_delete=models.CASCADE)),
],
options={
'db_table': 'modoboa_admin_aliasrecipient',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Domain',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='The domain name', unique=True, max_length=100, verbose_name='name')),
('quota', models.IntegerField()),
('enabled', models.BooleanField(default=True, help_text='Check to activate this domain', verbose_name='enabled')),
('type', models.CharField(default=b'domain', max_length=20)),
],
options={
'ordering': ['name'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DomainAlias',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='The alias name', unique=True, max_length=100, verbose_name='name')),
('enabled', models.BooleanField(default=True, help_text='Check to activate this alias', verbose_name='enabled')),
],
bases=(models.Model,),
),
migrations.CreateModel(
name='Mailbox',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('address', models.CharField(help_text='Mailbox address (without the @domain.tld part)', max_length=252, verbose_name='address')),
('quota', models.PositiveIntegerField()),
('use_domain_quota', models.BooleanField(default=False)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name='MailboxOperation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=20, choices=[(b'rename', b'rename'), (b'delete', b'delete')])),
('argument', models.TextField()),
('mailbox', models.ForeignKey(blank=True, to='admin.Mailbox', null=True, on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ObjectDates',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation', models.DateTimeField(auto_now_add=True)),
('last_modification', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Quota',
fields=[
('username', models.EmailField(max_length=254, serialize=False, primary_key=True)),
('bytes', models.BigIntegerField(default=0)),
('messages', models.IntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='mailbox',
name='dates',
field=models.ForeignKey(to='admin.ObjectDates', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='mailbox',
name='domain',
field=models.ForeignKey(to='admin.Domain', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='mailbox',
name='user',
field=models.OneToOneField(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='domainalias',
name='dates',
field=models.ForeignKey(to='admin.ObjectDates', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='domainalias',
name='target',
field=models.ForeignKey(verbose_name='target', to='admin.Domain', help_text='The domain this alias points to', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='domain',
name='dates',
field=models.ForeignKey(to='admin.ObjectDates', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='aliasrecipient',
name='r_mailbox',
field=models.ForeignKey(blank=True, to='admin.Mailbox', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='aliasrecipient',
unique_together=set([('alias', 'r_alias'), ('alias', 'r_mailbox')]),
),
migrations.AddField(
model_name='alias',
name='dates',
field=models.ForeignKey(to='admin.ObjectDates', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='alias',
name='domain',
field=models.ForeignKey(to='admin.Domain', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='alias',
unique_together=set([('address', 'internal')]),
),
]
|
#!/usr/bin/env python3
import _thread
import time
def print_time(threadName, delay):
count = 0
while (count < 5):
time.sleep(delay)
count += 1
print("%s: %s" % ( threadName, time.ctime(time.time()) ))
try:
_thread.start_new_thread(print_time, ("Thread-1", 2, ))
_thread.start_new_thread(print_time, ("Thread-2", 4, ))
except Exception as e:
print("Error: can not start the thread", e)
while 1:
pass |
import time
import tornado
from tornado.web import RequestHandler
from utils import config
class RenderException(Exception):
def __init__(self, code, message, update_user=True, template=None, template_args=None):
Exception.__init__(self, message)
self.code = code
self.message = message
self.update_user = update_user
self.template = template
self.template_args = template_args
class BaseHandler(tornado.web.RequestHandler):
def __init__(self, application, request, **kwargs):
self.providers = dict()
self.logger = application.logger
self.data = application.data
super(BaseHandler, self).__init__(application, request, **kwargs)
def get_google_user(self):
# user id
gid = self.get_secure_cookie(config.USER_ID_COOKIE_NAME)
# session id
session_id = self.get_secure_cookie(config.USER_SESSION_COOKIE_NAME)
# both cookies must be set or can not continue
if not gid or not session_id:
return None, None
# google user info
gl_user = self.data.get_gid_info(gid)
return gid, gl_user
def get_gl_user(self):
# get logged in google user
gid, gl_user = self.get_google_user()
if not (gl_user and 'id' in gl_user and gl_user['id'] and gl_user['id'] == gid):
# clear cookies and let user to re-sign in
self.clear_current_user_session()
return None
return gl_user
def set_current_user_session(self, gid):
self.set_secure_cookie(config.USER_ID_COOKIE_NAME, gid, expires_days=1)
self.set_secure_cookie(config.USER_SESSION_COOKIE_NAME, str(int(time.time())))
def clear_current_user_session(self):
gid = self.get_secure_cookie(config.USER_ID_COOKIE_NAME)
self.data.del_all_provider_sessions(gid)
self.clear_cookie(config.USER_ID_COOKIE_NAME)
self.clear_cookie(config.USER_SESSION_COOKIE_NAME)
def error_redirect(self, code=0, message=''):
pass
def data_received(self, chunk):
pass
def selector_redirect(self, provider):
selector_url = self.settings['auth_redirects']['selector']
self.redirect(selector_url + '?p=' + provider)
def get_redirect_url(self):
# ALWAYS HTTPS
redirect_url = 'https://{1}{2}{3}'.format(self.request.protocol, self.request.host, self.settings['api_path'], self.request.path)
self.logger.info('AuthLoginHandler: redirect_uri = [{0}]'.format(redirect_url))
return redirect_url |
#Joseph Harrison 2020
#solve linear diophantine equations in 2 variables
import gcdbez
import timeit
def main():
print('solve linear diophantine equations of the form:\n')
print(' ax + by = c\n')
print('by finding an integer solution (x, y)\n')
#get a, b and c
a = gcdbez.get_int_input('a: ')
b = gcdbez.get_int_input('b: ')
c = gcdbez.get_int_input('c: ')
start = timeit.default_timer()
#find the gcd of a and b and a u and v satisfying au + bv = d using Bezout's identity
d, u, v = gcdbez.gcd_bez(a, b)
#find a particular solution by multiplying each Bezout coefficient by c / d s.t ax0 + by0 = c
x0 = c // d * u
y0 = c // d * v
end = timeit.default_timer()
#output the solutions (if they exist)
print('\nequation and solutions:\n')
print(f' {a}x + {b}y = {c}\n')
if c % d != 0:
print(f"no integer solutions, because {d} doesn't divide {c}")
else:
print(f'x = {x0} - {b // d}n and y = {y0} + {a // d}n for any integer n')
print(f'finished in {round(end - start, 4)}s (4d.p)')
if __name__ == '__main__':
main() |
from collections import Counter
from string import ascii_lowercase
def decrypt(test_key):
cnt = Counter(test_key.lower())
return ''.join(str(cnt[a]) for a in ascii_lowercase)
|
#!/usr/bin/env /data/mta/Script/Python3.8/envs/ska3-shiny/bin/python
#################################################################################################
# #
# extract_soloar_panel_data.py: extract soloar panel related msid data #
# #
# author: t. isobe (tisobe@cfa.harvard.edu) #
# #
# last update: Mar 12, 2021 #
# #
#################################################################################################
import os
import sys
import re
import string
import math
import time
import random
import numpy
import astropy.io.fits as pyfits
import unittest
import Chandra.Time
import Ska.engarchive.fetch as fetch
#
#--- from ska
#
from Ska.Shell import getenv, bash
ascdsenv = getenv('source /home/ascds/.ascrc -r release; punlearn dataseeker ', shell='tcsh')
#
#--- reading directory list
#
path = '/data/mta/Script/Sol_panel/Scripts/house_keeping/dir_list'
with open(path, 'r') as f:
data = [line.strip() for line in f.readlines()]
for ent in data:
atemp = re.split(':', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec("%s = %s" %(var, line))
#
#--- append pathes to private folders to a python directory
#
sys.path.append(bin_dir)
sys.path.append(mta_dir)
#
#--- import several functions
#
import mta_common_functions as mcf #---- contains other functions commonly used in MTA scripts
#
#--- temp writing file name
#
rtail = int(time.time() * random.random())
zspace = '/tmp/zspace' + str(rtail)
#
#--- set a few lists
#
msid_list = ['tmysada', 'tpysada', 'tsamyt', 'tsapyt', 'tfssbkt1', 'tfssbkt2', 'tpc_fsse']
dcol_list = ['obattpwr', 'ohrmapwr', 'oobapwr']
angle_list = [40, 60, 80, 100, 120, 140, 160]
#
#--- some other definitions
#
header = 'time\tsuncent\ttmysada\ttpysada\ttsamyt\ttsapyt\ttfssbkt1\ttfssbkt2'
header = header + '\ttpc_fsse\telbi\telbv\tobattpwr\tohrmapwr\toobapwr'
solor_file = 'solar_panel_all_data'
orb_data = '/data/mta/DataSeeker/data/repository/orb_angle.rdb'
#---------------------------------------------------------------------------------------
#-- extract_soloar_panel_data: extract sun cneter angle and soloar panel ---
#---------------------------------------------------------------------------------------
def extract_soloar_panel_data(tstart='', tstop=''):
"""
extract sun cneter angle and soloar panel related msid quantities to make a data table
input: tstart --- starting time
tstop --- stopping time
output: <data_dir>/<solor_file>
"""
#
#--- if starting time is not given, find the last entry time
#
if tstart == '':
tstart = find_last_entry_time()
tbegin = Chandra.Time.DateTime(tstart).date
#
#--- if stopping time is not given, use today's date
#
if tstop == '':
tend = time.strftime("%Y:%j:00:00:00", time.gmtime())
tstop = Chandra.Time.DateTime(tend).secs
else:
tend = Chandra.Time.DateTime(tstop).date
print("Period: " + tbegin + '<--->' + tend)
#
#--- extract sun cent angle data
#
[stime, suncent] = get_sunangle_data(tstart, tstop)
all_data = [stime, suncent]
#
#--- extract first data, and match which elements match with those of sun center data
#--- and create a list of indicies which will be used for other msid data
#
[mtime, data] = get_data_from_ska(msid_list[0], tstart, tstop)
index = match_to_suncent(stime, mtime)
data = select_by_index(data, index)
all_data.append(data)
#
#--- run the rest
#
for msid in msid_list[1:]:
[mtime, data] = get_data_from_ska(msid, tstart, tstop)
data = select_by_index(data, index)
all_data.append(data)
#
#--- elbi and elbv have 100 times more dense data; so use only 1 in 100 data points
#
[mtime, data] = get_data_from_ska('elbi', tstart, tstop)
mtime = mtime[::100]
data = data[::100]
index = match_to_suncent(stime, mtime)
data = select_by_index(data, index)
all_data.append(data)
[mtime, data] = get_data_from_ska('elbv', tstart, tstop)
data = data[::100]
data = select_by_index(data, index)
all_data.append(data)
#
#--- dataseeker results
#
[dtime, obattpwr, ohrmapwr, oobapwr] = get_data_with_dataseeker(tstart, tstop, dcol_list)
index = match_to_suncent(stime, dtime)
data = select_by_index(obattpwr, index)
all_data.append(data)
data = select_by_index(ohrmapwr, index)
all_data.append(data)
data = select_by_index(oobapwr, index)
all_data.append(data)
#
#--- initialize data saver
#
sdata = []
for k in range(0, len(all_data)):
sdata.append([])
line = ''
for k in range(0, len(all_data[0])):
val = all_data[0][k]
line = line + "%d" % val
sdata[0].append(int(val))
for m in range(1, len(all_data)):
val = "%3.3f" % all_data[m][k]
line = line + "\t" + val
sdata[m].append(float(val))
line = line + '\n'
#
#--- write out data
#
outname = data_dir + solor_file
if os.path.isfile(outname):
with open(outname, 'a') as fo:
fo.write(line)
#
#--- if this is the first time, add the hader
#
else:
aline = "#" + header + '\n'
aline = aline + "#" + '-'*120 + '\n'
aline = aline + line
with open(outname, 'w') as fo:
fo.write(aline)
#
#--- separate data into several angle interval files
#
separate_data_into_angle_step(sdata, tstart)
#---------------------------------------------------------------------------------------
#-- find_last_entry_time: find the last data entry time --
#---------------------------------------------------------------------------------------
def find_last_entry_time():
"""
find the last data entry time
input: none, but read <data_dir>/solar+panel_all_data
output: ltiem --- the last entry time in seconds from 1998.1.1
"""
try:
ifile = data_dir + solor_file
data = mcf.read_data_file(ifile)
atemp = re.split('\s+', data[-1])
ltime = int(float(atemp[0]))
except:
ltime = 63071999 #--- 2000:001:00:00:00
return ltime
#---------------------------------------------------------------------------------------
#-- select_by_index: select data elements by a list of indicies --
#---------------------------------------------------------------------------------------
def select_by_index(data, index):
"""
select data elements by a list of indicies
input: data --- data list
index --- a list of inicies of which elements will be selected
output: data --- selected data
"""
data = numpy.array(data)
data = data[index]
data = list(data)
return data
#---------------------------------------------------------------------------------------
#-- get_sunangle_data: read sunangle data --
#---------------------------------------------------------------------------------------
def get_sunangle_data(tstart, tstop):
"""
read sunangle data
input: tstart --- starting time in seconds from 1998.1.1
tstop --- stopping time in seconds from 1998.1.1
output stime --- a list of time
suncent --- a list of sun center angle
"""
#
#--- sun center angle
#
data = mcf.read_data_file(orb_data)
data = mcf.separate_data_to_arrays(data)
#
#--- first two rows are headers; skip them
#
stime = data[0][2:]
suncent = data[1][2:]
istart = -999
istop = len(stime)
for k in range(0, len(stime)):
if (istart < 0) and (stime[k] > tstart):
istart = k
elif stime[k] > tstop:
istop = k
break
if istart < 0:
istart = 0
try:
stime = stime[istart:istop]
suncent = suncent[istart:istop]
except:
exit(1)
if len(stime) < 1:
exit(1)
return [stime, suncent]
#---------------------------------------------------------------------------------------
#-- get_data_from_ska: extract data from ska database --
#---------------------------------------------------------------------------------------
def get_data_from_ska(msid, tstart, tstop):
"""
extract data from ska database
input: msid --- msid
tstart --- starting time in seconds from 1998.1.1
tstop --- stopping time in seconds from 1998.1.1
output: time --- a list of time
data --- a list of data
"""
out = fetch.MSID(msid, tstart, tstop)
time = out.times
data = out.vals
return [time, data]
#---------------------------------------------------------------------------------------
#-- match_to_suncent: create an index list which indicates which elements match with the suncent entries
#---------------------------------------------------------------------------------------
def match_to_suncent(stime, mtime):
"""
create an index list which indicates which elements match with the suncent entries
input: stime --- a list of time from suncent
mtime --- a list of time in which index will be selected
output: save --- a list of indices which match the suncent time
"""
slen = len(stime)
mlen = len(mtime)
save = []
m = 0
for k in range(0, slen):
kup = 0
for n in range(m, mlen-1):
if (stime[k] >= mtime[n]) and (stime[k] < mtime[n+1]):
save.append(n)
kup = 1
m = n
break
elif (stime[k] < mtime[n]):
if n >= mlen-2:
m = mlen -2
save.append(m)
continue
save.append(n)
kup = 1
elif stime[k] > mtime[n+1]:
m -= 5
if m < 0:
m = 0
if stime[k] < mtime[m]:
save.append(m)
kup = 1
break
else:
continue
if kup == 1:
continue
return save
#---------------------------------------------------------------------------------------
#-- get_data_with_dataseeker: extract data using dataseeker ---
#---------------------------------------------------------------------------------------
def get_data_with_dataseeker(tstart, tstop, col_list):
"""
extract data using dataseeker
input: tstart --- starting time in seconds from 1998.1.1
tstop --- stopping time in seconds from 1998.1.1
col_list --- data name to be extracted (without _ or _avg part)
output: save --- a list of lists of data, including time list
"""
#
#--- check wehter enmpty command file exist. if not, create
#
if not os.path.isfile('test'):
cmd = 'touch test'
os.system(cmd)
#
#--- create dataseeker command
#
cmd1 = '/usr/bin/env PERL5LIB="" '
cmd2 = 'dataseeker.pl infile=test outfile=temp.fits '
cmd2 = cmd2 + 'search_crit="columns='
#
#--- column name start with '_' and end '_avg'
#
for k in range(0, len(col_list)):
col = col_list[k]
if k == 0 :
acol = '_' + col + '_avg'
else:
acol = ',_' + col + '_avg'
cmd2 = cmd2 + acol
cmd2 = cmd2 + ' timestart=' + str(tstart) + ' timestop=' + str(tstop) + '"'
cmd2 = cmd2 + ' loginFile=' + house_keeping + 'loginfile '
#
#--- run the dataseeker command under ascds environment
#
cmd = cmd1 + cmd2
bash(cmd, env=ascdsenv)
#
#--- read the data and create a list of lists
#
hrd = pyfits.open('temp.fits')
data = hrd[1].data
hrd.close()
dtime = data['time']
save = [dtime]
for col in col_list:
acol = col + '_avg'
save.append(data[acol])
#
#--- clean up
#
mcf.rm_files('test')
mcf.rm_files('temp.fits')
return save
#---------------------------------------------------------------------------------------
#-- separate_data_into_angle_step: separate a full data set into several angle interval data sets
#---------------------------------------------------------------------------------------
def separate_data_into_angle_step(data, tstart=0):
"""
separate a full data set into several angle interval data sets
input: data --- data matrix of <col numbers> x <data length>
tstart --- starting time in seconds from 1998.1.1
output: <data_dir>/solar_panel_angle_<angle>
"""
#
#--- set a few things
#
alen = len(angle_list) #--- the numbers of angle intervals
clen = len(data) #--- the numbers of data columns
save = [] #--- a list of lists to sve the data
for k in range(0, alen):
save.append([])
#
#--- go through all time entries, but ignore time before tstart
#
for k in range(0, len(data[0])):
if data[0][k] < tstart:
continue
for m in range(0, alen):
#
#--- set angle interval; data[1] is the column to keep sun center angle
#
abeg = angle_list[m]
aend = abeg + 20
if (data[1][k] >= abeg) and (data[1][k] < aend):
line = create_data_line(data, clen, k)
save[m].append(line)
break
#
#--- create/update the data file for each angle interval
#
for k in range(0, alen):
outname = data_dir + 'solar_panel_angle_' + str(angle_list[k])
#
#--- print the data
#
if len(save[k]) == 0:
continue
line = ''
for ent in save[k]:
line = line + ent + '\n'
if os.path.isfile(outname):
with open(outname, 'a') as fo:
fo.write(line)
#
#--- if this is the first time, add the header
#
else:
aline = "#" + header + '\n'
aline = aline + '#' + '-'*120 + '\n'
aline = aline + line
with open(outname, 'w') as fo:
fo.write(aline)
#---------------------------------------------------------------------------------------
#-- create_data_line: create output data line --
#---------------------------------------------------------------------------------------
def create_data_line(data, clen, k):
"""
create output data line
input: data --- data matrix of clen x len(data[0])
output: line --- a line of data of clen elements
"""
line = str(data[0][k])
for m in range(1, clen):
line = line + '\t' + str(data[m][k])
return line
#---------------------------------------------------------------------------------------
#-- TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST --
#---------------------------------------------------------------------------------------
class TestFunctions(unittest.TestCase):
"""
testing functions
"""
#---------------------------------------------------------------------------------------
def test_get_sunangle_data(self):
tstart = 631151994
tstop = 631929594
out = get_sunangle_data(tstart, tstop)
print("get_sunangle_data: " + str(out[0][0]) + '\t' + str(out[1][0]) + '\n')
#---------------------------------------------------------------------------------------
def test_match_to_suncent(self):
stime = range(0, 30)
mtime = range(0, 30, 4)
index = match_to_suncent(stime, mtime)
print("match_to_suncent:" + str(index))
#---------------------------------------------------------------------------------------
def test_get_data_from_ska(self):
msid = 'tmysada'
tstart = 631151994
tstop = 631929594
out = get_data_from_ska(msid, tstart, tstop)
print("get_data_from_ska:" + str(out[0][0]) + '\t' + str(out[1][0]) + '\n')
#---------------------------------------------------------------------------------------
def test_get_data_with_dataseeker(self):
print("I AM HERE RUNNIGN Dataseeker")
tstart = 631151994
tstop = 631929594
col_list = dcol_list
out = get_data_with_dataseeker(tstart, tstop, col_list)
print("get_data_with_dataseeker: " + str(out[0][0]) + '\t' + str(out[1][0]) + '\t' + str(out[2][0]) + '\t' + str(out[3][0]) + '\n')
#---------------------------------------------------------------------------------------
if __name__ == "__main__":
if len(sys.argv) >= 2:
if sys.argv[1].lower() == 'test':
#
#--TEST TEST TEST TEST TEST TEST ----------------------------
#
sys.argv = [sys.argv[0]]
unittest.main()
#
#-- REGULAR RUN ----------------------------
#
else:
if len(sys.argv) == 2:
tstart = float(sys.argv[1])
if len(sys.argv) > 2:
tstart = float(sys.argv[1])
tstop = float(sys.argv[2])
else:
tstart = ''
tstop = ''
extract_soloar_panel_data(tstart, tstop)
else:
extract_soloar_panel_data()
exit(1)
#-- DATA RE-RUN ----------------------------
ifile = data_dir + solor_file
cmd = 'mv ' + ifile + ' ' + file + '~'
os.system(cmd)
ystart = ['001', '090', '180', '270']
ystop = ['090', '180', '270', '001']
for year in range(2000, 2019):
for k in range(0, 4):
if year == 2018 and k ==2:
break
nyear = year
if k == 3:
nyear += 1
tstart = str(year) + ':' + ystart[k] + ':00:00:00'
tstop = str(nyear) + ':' + ystop[k] + ':00:00:00'
print("Period: " + tstart + '<-->' + tstop)
tstart = Chandra.Time.DateTime(tstart).secs
tstop = Chandra.Time.DateTime(tstop).secs
print("\t" + str(tstart) + '<-->' + str(tstop))
extract_soloar_panel_data(tstart, tstop)
|
import smtplib,os
from email.header import Header
from email.utils import parseaddr, formataddr
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email import encoders
from emailServer import EmailService
class EmailSend(object):
@staticmethod
def format_header_addr(s):
'''
:param s: [简称 <邮箱地址>] or 简称 <邮箱地址>
:return: Header
'''
if isinstance(s,str):
emname, emaddr = parseaddr(s)
return formataddr((Header(emname, 'utf-8').encode(), emaddr))
elif isinstance(s,list):
res=[]
for li in s:
emname, emaddr = parseaddr(li)
res.append(formataddr((Header(emname, 'utf-8').encode(), emaddr)))
return ",".join(res)
@staticmethod
def get_email_addr(s):
'''
:param s: [简称 <邮箱地址>]
:return: addr列表
'''
if isinstance(s,str):
emname, emaddr = parseaddr(s)
return emaddr
elif isinstance(s,list):
res=[]
for li in s:
emname, emaddr = parseaddr(li)
res.append(emaddr)
return res
@staticmethod
def get_email_type(s):
'''
:param s: [简称 <邮箱地址>]
:return: 邮件类型列表 eg: QQ
'''
if isinstance(s,str):
emname, emaddr = parseaddr(s)
return emaddr.split('@')[-1].split('.')[0].upper()
elif isinstance(s,list):
res=[]
for li in s:
emname, emaddr = parseaddr(li)
res.append(emaddr.split('@')[-1].split('.')[0].upper())
return res
@staticmethod
def sendEmail(from_addr,key,to_addr,title,content,files=None):
'''
:param key: 邮件授权码 eg:"key":rvueixdphgjdbjeb
:param from_addr: 发送者地址 eg: "我是发送者 <47029316@qq.com>"
:param to_addr: 接受者地址列表 eg: ["接受者1 <445789@qq.com>","接受者2 <8888@qq.com>"]
:param title: 邮件标题 eg: "我是标题"
:param content: 邮件正文 eg:"邮件正文"
:param files: 邮件附件 eg: ["附件地址"]
:return:
'''
message = MIMEMultipart()
message['From'] = EmailSend.format_header_addr(from_addr)#发送地址
message['To'] = EmailSend.format_header_addr(to_addr)#接收地址
message['Subject'] = Header(title, 'utf-8')#标题
message.attach(MIMEText(content, 'plain', 'utf-8'))#正文
if files is not None:#附件
for file in files:
with open(file,'rb') as f:
# 设置MIMEBase对象包装附件
attachment = MIMEBase('application', 'octet-stream')
# 添加附件
attachment.set_payload(f.read())
# 添加附件标题
attachment.add_header('Content-Disposition', 'attachment', filename=os.path.basename(file))
# 编码附件
encoders.encode_base64(attachment)
# 添加附件到邮件中
message.attach(attachment)
try:
server = EmailService.getSMTPServer(EmailSend.get_email_type(from_addr))
if server[2]:
smtpObj = smtplib.SMTP_SSL(server[0], server[1])
else:
smtpObj = smtplib.SMTP(server[0],server[1])
smtpObj.starttls()
smtpObj.set_debuglevel(1)
smtpObj.login(EmailSend.get_email_addr(from_addr), key)
# 发送邮件
smtpObj.sendmail(from_addr=EmailSend.get_email_addr(from_addr), to_addrs=EmailSend.get_email_addr(to_addr), msg=message.as_string())
# 发送完成
smtpObj.quit()
return True
except smtplib.SMTPException as e:
print(e)
return False
if __name__ == '__main__':
print(EmailSend().sendEmail("测试发送 <2507461149@qq.com>",'luminati error',["测试接收1 <782992280@qq.com>","测试接收2 <2507461149@qq.com>"],
"标题","luminati error"))
'''
GnkuirBpdwhud@outlook.com bY7HX9Y9Qkii
JamarcusBarkerMT@aol.com W6CfVP9c
SusanMaynardr@aol.com OT071NO2
RolandoSmithS@aol.com BtFr09X1
MaxRudolfzR@yahoo.com v7nn21yAa
ElmerAshburbBr@yahoo.com k2J3cw1VI
ChristWendelliNc@yahoo.com rXxINc69l
LyndonAlgernongA@yahoo.com jzf13eXB0
LenMarthaxHw@yahoo.com p8i0X1Ol7
NuwdmbmEwobzbc@hotmail.com tUAbFDxE0NUl
YehrftfrAflcvjtj@hotmail.com aQLKD1PsqxK0
XnovlPrkinu@hotmail.com jl3UxUH5k9L8
JmhwqlqRfcrjgv@hotmail.com 9IEh391J4DU2
''' |
# Variation in speed of sound with temperature
def speed_of_sound():
temp = int(input("Enter a temp. between 0 and 50: "))
if 0 < temp < 50:
speed = 331 + 0.6*temp
else:
print("Temperature out of range.")
return speed |
import datetime
class Person:
def __init__(self, name, age):
self.name = name
self.age = age
@classmethod
def create_person(cls, name, year_of_birth):
actual_year = datetime.datetime.now().year
age = actual_year - year_of_birth
return cls(name, age)
def __str__(self):
return f'{self.name} is {self.age} years old.'
John = Person.create_person('Jan', 1987)
Cindy = Person.create_person('Cindy', 1977)
print(John)
print(Cindy)
|
import numpy as np
import cv2
from matplotlib import pyplot as plt
img = cv2.imread('dep.jpg')
mask = np.zeros(img.shape[:2], np.uint8)
bgdModel = np.zeros((1, 65), np.float64)
fgdModel = np.zeros((1, 65), np.float64)
rect = (10, 10, 375, 500)
cv2.grabCut(img, mask, rect, bgdModel, fgdModel, 5, cv2.GC_INIT_WITH_RECT)
mask2 = np.where((mask==2) | (mask==0), 0, 1).astype('uint8')
img = img*mask2[:,:,np.newaxis]
# cv2.imwrite('newmask.png', img)
# cv2.imshow('img', img)
# newmask is the mask image I manually labelled
newmask = cv2.imread("newmask.png", 0)
img1 = cv2.imread('dep.jpg')
# whereever it is marked white (sure foreground), change mask=1
# whereever it is marked black (sure background), change mask=0
mask[newmask == 0] = 0
mask[newmask == 255] = 1
cv2.imshow("mask", mask)
mask, bgdModel, fgdModel = cv2.grabCut(img1, mask, None, bgdModel, fgdModel, 5, cv2.GC_INIT_WITH_MASK)
mask = np.where((mask==2)|(mask==0),0,1).astype("uint8")
img1 = img1*mask[:,:,np.newaxis]
# cv2.imshow('img1', img1)
cv2.waitKey()
cv2.destroyAllWindows()
# plt.imshow(img),plt.colorbar(),plt.show()
# plt.imshow(img),plt.colorbar(),plt.show()
# cv2.imshow("kết quả", img)
# cv2.waitKey()
# cv2.destroyAllWindows() |
from signalr_aio import Connection
from base64 import b64decode
from zlib import decompress, MAX_WBITS
from requests import Session
import json
import asyncio
hub = None;
appHub = None;
connection = None;
def process_message(message):
print(message);
# deflated_msg = decompress(b64decode(message), -MAX_WBITS)
# return json.loads(deflated_msg.decode())
# Create debug message handler.
async def on_debug(**msg):
# In case of 'queryExchangeState'
print(msg)
if 'R' in msg and type(msg['R']) is not bool:
# decoded_msg = process_message(msg['R'])
# print(decoded_msg);
# session.headers.update({'Authorization': 'Bearer {token}'.format(token=token)})
token = msg['R'];
print('Token is: ', token);
print('-------------------------');
# session = Session();
server_url = server_url + "?token=${token}".format(token=token);
conn = Connection(server_url, session);
# conn.session.headers.update({'Authorization': 'Bearer {token}'.format(token=token)})
appHub = conn.register_hub('omsclienthub');
conn.received += on_recieved;
conn.error += on_error
companies = appHub.server.invoke('GetInstrumentList');
connection.close();
conn.start()
async def on_recieved(**msg):
companies = msg['R'];
# Create error handler
async def on_error(msg):
print(msg)
# Create hub message handler
async def on_message(msg):
decoded_msg = process_message(msg[0])
print(decoded_msg)
if __name__ == "__main__":
server_url = 'http://firouzex.exphoenixtrade.com/realtime';
# Create a connection
session = Session();
connection = Connection(server_url, session)
hub = connection.register_hub('omsclienttokenhub')
appHub = connection.register_hub('omsclienthub');
connection.received += on_debug
connection.error += on_error
hub.server.invoke('GetAPIToken', 'fTTTTTT', 'XXXXX');
connection.start()
|
#!/usr/bin/env python
import urllib2, os, subprocess, shutil, time, re, sys
from sys import argv, exit
from distutils.version import LooseVersion
script, log_file = argv
class install(object):
def __init__(self):
self.app_name = "Silverlight"
self.latest_version = "5.1.20913.0"
self.url = "http://www.microsoft.com/getsilverlight/handlers/getsilverlight.ashx"
self.dmg_path = "/tmp/silverlight.dmg"
self.mnt_path = "/Volumes/Silverlight"
self.inst_base = "/Library/Internet Plug-Ins/Silverlight.plugin"
self.pkg = '/Volumes/Silverlight/Silverlight.pkg'
self.inst_inf = self.inst_base+"/Contents/Info.plist"
self.user_agent = [('User-agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/536.28.10 (KHTML, like Gecko) Version/6.0.3 Safari/536.28.10')]
def check_inst(self):
return os.path.isdir(self.inst_base)
def convert_plist(self, fmt, path):
cmd = ["plutil", "-convert", fmt, path]
subprocess.call(cmd)
def check_version(self):
self.convert_plist("xml1", self.inst_inf)
a = open(self.inst_inf, "r")
lines = a.readlines()
r = re.compile(r'CFBundleVersion')
for i in range(len(lines)):
if r.search(lines[i]):
c = lines[max(0, i+1)]
d = c.split('<string>')
e = ''.join(d[1])
f = e.split('</string>')
g = f[0]
self.convert_plist("binary1", self.inst_inf)
return LooseVersion(g) >= LooseVersion(self.latest_version), g
def download(self):
opener = urllib2.build_opener()
opener.addheaders = self.user_agent
f = opener.open(self.url)
local_file = open(self.dmg_path, "w") # open the file to write to
local_file.write(f.read()) # write the download to the local file
local_file.close() # close the file
a = os.path.isfile(self.dmg_path)
if a == True:
return "%s was downloaded.\n" % self.app_name
elif a == False:
print "Something went wrong and %s wasn't downloaded, exiting." % self.app_name
exit(1)
else:
print "[Error] at download function, exiting."
exit(1)
def mount(self):
cmd = ['/usr/bin/hdiutil', 'attach', '-nobrowse', self.dmg_path]
subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
return self.check()
def check(self):
a = False
b = 0
while (a == False) and (b < 60):
time.sleep(1)
a = os.path.isdir(self.mnt_path)
print "..."
b += 1
if a == False:
print "%s wasn't mounted." % self.app_name
exit(1)
elif a == True:
return "%s was mounted.\n" % self.app_name
else:
print "[Error] at mount/check functions, exiting."
exit(1)
def inst(self):
cmd = ['/usr/sbin/installer', '-pkg', self.pkg, '-target', '/']
subprocess.call(cmd)#, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
a = os.path.isdir(self.inst_base)
if a == True:
vers = self.check_version()
if vers[0] == True:
notice = "%(x)s version %(y)s was installed.\n" % {"x" : self.app_name, "y" : vers[1]}
elif vers[0] == False:
notice = "%s is installed but is still not up-to-date. current version: %s" % self.app_name, vers
else:
print "[Error] at install success version function/statement, exiting."
print a
print vers
exit(1)
return notice, vers
elif a == False:
return "%s wasn't installed, exiting." % self.app_name
else:
print "[Error] at install function, exiting."
exit(1)
def unmount(self):
cmd = ['/usr/bin/hdiutil', 'detach', '-force', self.mnt_path]
subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
a = os.path.isdir(self.mnt_path)
b = 0
while (a == True) and (b < 30):
time.sleep(1)
a = os.path.isdir(self.mnt_path)
print "..."
b += 1
if a == False:
return "Unmounted %s.\n" % self.app_name
elif a == True:
return "[Error] couldn't unmount %s, exiting." % self.app_name
else:
print "[Error] at unmount function, exiting."
exit(1)
def clean(self):
os.unlink(self.dmg_path)
aa = os.path.isfile(self.dmg_path)
bb = 0
while (aa == True) and (bb < 30):
time.sleep(1)
aa = os.path.isdir(self.mnt_path)
print "..."
bb += 1
if bb == False:
y = "%s dmg has been deleted.\n" % self.app_name
return y
elif bb == True:
return "[Error] couldn't delete the %s DMG, exiting." % self.app_name
else:
return "[Error] at clean function, exiting."
exit(1)
def verify(self):
verify_inst = os.path.isdir(self.inst_base)
verify_vers = self.check_version()
verify_unmount = os.path.isdir(self.mnt_path)
if verify_unmount == True:
verify_unmount = False
elif verify_unmount == False:
verify_unmount = True
else:
print "[Error] at verify_unmount, exiting."
exit(1)
# check that the dmg was deleted
verify_rmdmg = os.path.isfile(self.dmg_path)
if verify_rmdmg == True:
verify_rmdmg = False
elif verify_rmdmg == False:
verify_rmdmg = True
else:
print "[Error] at verify_rmdmg, exiting."
exit(1)
# report
verified = verify_inst, verify_vers, verify_unmount, verify_rmdmg
if False in verified:
a = "[Error] Something couldn't complete:\n"
b = "Install: %s\n" % verify_inst
c = "Updated: %(x)s %(y)s\n" % {"x":verify_vers[0], "y":verify_vers[1]}
d = "Unmounted: %s\n" % verify_unmount
e = "Removed DMG: %s\n" % verify_rmdmg
return a, b, c, d, e
elif False not in verified:
a = "[Success], %s has been installed.\n" % self.app_name
b = "Install: %s\n" % verify_inst
c = "Updated: %s\n" % verify_vers[0]
d = "Unmounted: %s\n" % verify_unmount
e = "Removed DMG: %s\n" % verify_rmdmg
return a, b, c, d, e
else:
print "[Error] at verification return, exiting."
exit(1)
def the_log(self, log_file, log_out):
for i in log_out:
print i
a = open(log_file, "a")
for i in log_out:
a.write(i)
a.close()
print "Local log file found at: %s" % log_file
the_run = install()
var = the_run.check_inst()
if var == True:
# check version
ver, g = the_run.check_version()
if ver == True:
print "%s is already up-to-date, exiting." % the_run.app_name
print g
elif ver == False:
print "%s needs to update, starting the updater." % the_run.app_name
print g+" > "+the_run.latest_version
print the_run.download()
print the_run.mount()
print the_run.inst()
print the_run.unmount()
print the_run.clean()
log_out = the_run.verify()
the_run.the_log(log_file, log_out)
elif var == False:
print "%s is not installed, starting the installer." % the_run.app_name
# need to get the latest version,
# download the latest version,
# mount and then check the info.plist,
# maybe try to find a better way
print the_run.download()
print the_run.mount()
print the_run.inst()
print the_run.unmount()
print the_run.clean()
log_out = the_run.verify()
the_run.the_log(log_file, log_out)
else:
print "?"
# sudo python Desktop/inst_flash.py /Users/Shared/.labtech/logs/`date +%m-%d-%Y_%H%M%S`.txt |
print("Enter a number between 1 and 100")
x = input()
if x ?? 100:
print("That number is too big!")
elif x ?? 1:
print("That number is too small!")
else:
print("{} is a good number.".format(x))
|
"""
T1: Implementati un sistem criptografic bazat pe functia XOR.
https://github.com/ucv-cs/Securitatea-sistemelor-informatice
"""
import sys
# setarea implicită de afișare
hex_output = True
def encrypt(text, key):
"""
Criptează un text cu o cheie, folosind operația xor.
@param text
@param key
@returns textul criptat
"""
result = ""
if hex_output:
for i in range(len(text)):
result += hex(ord(text[i]) ^ ord(key[i % len(key)]))[2:].zfill(2)
else:
for i in range(len(text)):
result += chr(ord(text[i]) ^ ord(key[i % len(key)]))
return result
def decrypt(text, key):
"""
Decriptează un text cu o cheie, folosind operația xor.
@param text
@param key
@returns textul decriptat
"""
result = ""
temp = ""
if hex_output:
# dacă outputul este în format hexazecimal, trebuie citite grupuri
# de câte 2 caractere, apoi transformate în int
for i in range(len(text)):
if (i + 1) % 2 == 0:
s = "0x" + text[i - 1] + text[i]
temp += chr(int(eval(s)))
text = temp
for i in range(len(text)):
result += chr(ord(text[i]) ^ ord(key[i % len(key)]))
return result
def main():
"""
Execuția programului.
"""
# switch pentru afișarea textului criptat în format ASCII sau
# hexazecimal (implicit)
# utilizare: py -i affine.py -a
if "-a" in str(sys.argv):
global hex_output
hex_output = False
separator = "\n----------------------"
# input de la utilizator
flag = True
while flag:
plain_text = input("[*] Scrie textul de criptat: ")
while True:
key = input("[*] Scrie cheia: ")
if key == "":
continue
else:
break
# output
cipher_text = encrypt(plain_text, key)
print(f"\n[*] Text criptat: {cipher_text}")
print(f"[*] Text decriptat: {decrypt(cipher_text, key)}")
flag = True if input(
"\n[?] Continuăm? (d / n): ").lower() == "d" else False
print(separator)
if __name__ == "__main__":
main() |
fruit="banana"
pos=fruit.find("na")
print(pos)
pos=fruit.find("z")
print(pos)
|
#calculate primes to a given range
def isPrime(num):
for i in range(2, num):
if (num % i) == 0:
return False
return True
def getPrimes(max_number):
list_of_primes = []
for num1 in range(2, max_number):
if isPrime(num1):
list_of_primes.append(num1)
return list_of_primes
max_num_to_check = int(input("Search primes up to :"))
list_of_primes = getPrimes(max_num_to_check)
for prime in list_of_primes:
print(prime)
|
#!/usr/bin/env python
import flickrquery
import argparse
import os.path, os
import subprocess, math
parser = argparse.ArgumentParser()
parser.add_argument("output_dir", help="output directory where images will be stored")
parser.add_argument("input_lists", nargs='+', help="input files containing images to be downloaded")
args = parser.parse_args()
def check_image(filename):
try:
jhead_output = subprocess.check_output(['jhead', filename])
except:
return False
else:
for line in jhead_output.splitlines():
tokens = line.split()
if len(tokens) == 5 and tokens[0] == 'Resolution' and int(tokens[2]) > 0 and int(tokens[4]) > 0:
return True
return False
def download_image(urls, filename):
for url in urls:
cmd = 'wget -t 3 -T 5 --quiet --max-redirect 0 %s -O %s' % (url, filename)
res = os.system(cmd)
if res == 0:
if check_image(filename):
return True
# All tries failed, clean up
if os.path.exists(fname):
os.remove(fname)
return False
images = { }
dups = 0
for f in args.input_lists:
lines = open(f, 'r').readlines()
for l in lines:
tokens = l.strip().split(' ')
if tokens[0] in images:
dups = dups + 1
else:
images[tokens[0]] = tokens[1:]
print 'Total images to download: %d, dups %d' % (len(images), dups)
if not os.path.exists(args.output_dir):
os.mkdir(args.output_dir)
downloaded_images = 0
skipped_images = 0
i = 0
for im, urls in images.items():
fname = os.path.join(args.output_dir, '%s.jpg' % im)
if os.path.exists(fname):
skipped_images = skipped_images + 1
continue
if download_image([urls[1], urls[2]], fname):
downloaded_images = downloaded_images + 1
i = i + 1
if i % 100 == 0: print 'Processed %d / %d images.' % (i, len(images))
print 'Downloaded %d images, skipped %d existing images.' % (downloaded_images, skipped_images)
|
from django.urls import path
from . import views
app_name = 'apiv1'
urlpatterns = [
path('cats/', views.CatPhotoListRegisterView.as_view()),
path('cat/<uuid:pk>/', views.CatPhotoCRUDView.as_view()),
]
|
import os
import tempfile
class File:
""" Class with predefined properties """
# 1. Initial with full path
def __init__(self, path):
self.path_to = path # Полное имя файла
self.current_line = 1 # Указатель на текущую строку для считывания
# Прочитать содержимое файла для реализации __add__
with open(self.path_to, "w+") as f:
self.value = f.readlines()
# 2. Method write
def write(self, line):
with open(self.path_to, "a+") as fw:
fw.write(line)
# 3. Adding (__add__)
def __add__(self, obj):
third = os.path.join(tempfile.gettempdir(), 'third.txt')
with open(third, "w+") as fd:
fd.writelines(self.value)
fd.writelines(obj.value)
return File(third)
# 4. Iteration (__iter__)
def __iter__(self):
return self
def __next__(self):
with open(self.path_to) as fr:
try:
# Прочитать n-строк до текущей
for _ in range(1, self.current_line):
fr.readline()
line = fr.readline()
# Признак EOF - не прочитано ни одной строки
if not line:
raise StopIteration
self.current_line += 1
except:
raise StopIteration
return line
# 5. print(obj)
def __str__(self):
return self.path_to
def _main():
first_class = File(os.path.join(tempfile.gettempdir(), 'first.txt'))
first_class.write("Trying123...\n")
second_class = File(os.path.join(tempfile.gettempdir(), 'second.txt'))
second_class.write("Trying213...\n")
third = first_class + second_class
# for line in third:
# print(line)
print(first_class)
if __name__ == "__main__":
_main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 13 19:44:11 2020
@author: nick
this will email myself.
its simply a test
"""
import ezgmail
try:
ezgmail.send("nickallee8529@gmail.com","this was sent from python code", "the body of this message is pretty amazing")
print("email sent")
except:
print("failed")
|
# -*- coding: utf-8 -*-
i = int(input()) # Recebe o valor de entrada e converte em inteiro
if i % 2 == 0: # Se a entrada dividio por dois for zero, o número é par, logo
print(i + 2) # O próximo número par é ele mesmo mais dois.
else:
print(i + 1) # Se o número é impar, o próximo número (entrada + 1) será par,
|
#!/usr/bin/env python3
from lilaclib import *
build_prefix = 'extra-x86_64'
def pre_build():
# obtain base PKGBUILD, e.g.
download_official_pkgbuild('festival')
for line in edit_file('PKGBUILD'):
# edit PKGBUILD
if 'pkgname=festival' in line:
print('pkgname=festival-gcc5')
print('_'+line)
elif 'pkgdesc' in line:
print(line[:-1]+' (built with gcc5 and add text2utt)"')
elif 'depends=' in line:
print(line.replace(')',' gcc5)'))
print('provides=(\'festival\')')
print('conflicts=(\'festival\')')
elif 'pkgname' in line:
print(line.replace("pkgname","_pkgname"))
elif 'sed -i "s#examples bin doc#examples#" festival/Makefile' in line:
print(line)
print(' sed -e "s/CC=gcc/CC=gcc-5/" -e "s/CXX=gcc/CXX=gcc-5/" -i speech_tools/config/compilers/gcc_defaults.mak')
elif 'text2wave' in line:
print(' install -m755 examples/text2utt "$pkgdir"/usr/bin')
print(line)
else:
print(line)
def post_build():
# do something after the package has successfully been built
git_add_files('PKGBUILD')
git_add_files('*.patch')
git_commit()
# do some cleanup here after building the package, regardless of result
# def post_build_always(success):
# pass
if __name__ == '__main__':
single_main()
|
import json
from http.server import BaseHTTPRequestHandler, HTTPServer
import socket
import time
# sys.path.append(os.path.realpath(__file__))
import jedi
class http_completion(BaseHTTPRequestHandler):
"""
Completion handler which returns the completions for a given source,
line and cursor positon.
"""
def _set_headers(self):
"""set the standard headers for a JSON response"""
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
def do_POST(self):
"""
Payload to receive:
source: whole source to parse
line / column : current line and column
Returns:
array with dictionaries in it (name, description, docstring)
"""
self._set_headers()
content_length = self.headers.get('Content-Length')
length = int(content_length)
read = self.rfile.read(length).decode('utf-8')
read = json.loads(read)
if read["type"] == "goto":
payload = goto_def(read["source"], read["line"], read["column"], read["path"])
payload = json.dumps(payload)
else:
payload = completions(read["source"], read["line"], read["column"], read["path"])
payload = json.dumps(payload)
self.wfile.write(bytes(payload,'utf-8'))
return
def run_server():
"""run the httpd"""
address = ('127.0.0.1', 7777)
while True:
try:
print("Starting httpd")
httpd = HTTPServer(address, http_completion)
httpd.serve_forever()
except (socket.error, KeyboardInterrupt) as exc:
if exc.__class__ == KeyboardInterrupt:
break
# If we cannot bind to the port sleep wait 1 second and retry.
# This can happen when reloading Atom e.x.
time.sleep(1)
def completions(source, line, column, path):
"""
generate list with completions for the line and column.
Arguments:
source: source code to generate completion for
line, column: current cursor position in the source code
Returns:
list with dictionaries containing the name, docstring and description
for all completions.
"""
script = jedi.api.Script(
source = source,
line = line + 1,
column = column,
path = path
)
completions = list()
try:
for completion in script.completions():
completions.append({
"name": completion.name,
"description": completion.description,
"type":completion.type
})
return completions
except:
return []
def goto_def(source, line, column, path):
try:
script = jedi.api.Script( source, line , column ,path)
defs = script.goto_definitions()
except:
return []
if defs:
is_built = script.goto_definitions()[0].in_builtin_module()
module_name = script.goto_definitions()[0].module_name
defs_string = list()
for get in defs:
defs_string.append({
"type": get.type,
"description": get.description,
"module_path": get.module_path,
"line": get.line,
"column":get.column,
"is_built_in": is_built,
"module_name": module_name
})
break
return defs_string
if __name__ == "__main__":
run_server()
|
class Main:
t=int(input())
while(t>0):
t-=1
s=raw_input().split(" ")
a=int(s[0])%10
b=int(s[1])
d=1
if(a==1 or b==0):
d=0
print 1
elif(a==2):
l=[2,4,6,8]
y=b%4-1
elif(a==3):
l=[3,9,7,1]
y=b%4-1
elif(a==4):
l=[4,6]
y=b%2-1
elif(a==5 or a==6 or a==0):
d=0
print a
elif(a==7):
l=[7,9,3,1]
y=b%4-1
elif(a==8):
l=[8,4,2,6]
y=b%4-1
elif(a==9):
l=[9,1]
y=b%2-1
if(d==1):
print l[y]
|
from flask import Blueprint, request, jsonify, Response
from ..controller import Pekerjaan
from flask_cors import cross_origin
import json
pekerjaan_routes = Blueprint('Pekerjaan', __name__)
@pekerjaan_routes.route("/all", methods=['GET'])
@cross_origin()
def get_all():
pekerjaan = Pekerjaan.get_all()
return jsonify(pekerjaan)
@pekerjaan_routes.route("/id/<id>", methods=['GET'])
@cross_origin()
def get_by_id(id:int):
pekerjaan = Pekerjaan.get_by_id(request.view_args["id"])
return jsonify(pekerjaan)
@pekerjaan_routes.route("/nama/<nama>", methods=['GET'])
@cross_origin()
def get_by_nama(nama:str):
pekerjaan=Pekerjaan.get_by_nama(request.view_args["nama"])
return jsonify(pekerjaan)
@pekerjaan_routes.route("/add", methods=['POST'])
@cross_origin()
def add():
nama = request.json.get('nama')
pekerjaan = Pekerjaan.add(nama)
return jsonify(pekerjaan)
@pekerjaan_routes.route("/delete", methods=['POST'])
@cross_origin()
def delete_by_id():#mendelete dengan ID yang di perlukan
id = request.json.get('id') # membuata parameter untuk mengakses ID
pekerjaan = Pekerjaan.delete_by_id(id)
return jsonify(pekerjaan)
@pekerjaan_routes.route("/update", methods=['PUT'])
@cross_origin()
def update_by_id():#update dengan mencari ID yang diperlukan
id = request.json.get('id') # untuk memanggil entitas yang ada di dict
nama = request.json.get('nama') # memanggil entitas yang ada di dict
pekerjaan = Pekerjaan.update_by_id(id,nama)
return jsonify(pekerjaan)
|
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from django.contrib.auth.models import User, Group
class UserTests(APITestCase):
def test_user_list(self):
# must be rejected without validation
response = self.client.get('/api/users/', {}, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
# must be success
user = User.objects.create(username='user', email='user@example.com', password='user123', is_staff=True)
self.client.force_authenticate(user=user)
response = self.client.get('/api/users/', {}, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
actual = response.data['results'][0]
self.assertEqual(actual['username'], user.username)
self.assertEqual(actual['email'], user.email)
|
__author__ = 'alex-gugz'
import socket
import sys
import Common
def create_soc():
# Create the socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('socket created')
return s
def bind_soc(s):
# Bind the socket to a port on the comp
try:
s.bind((Common.host, Common.port))
print('binding socket to port ' + str(Common.port))
# Start listening to the port
s.listen(1)
print('socket listening for 1 incoming connection')
except socket.error as msg:
print("bind failed. Error: " + str(msg[1]))
s.close()
sys.exit()
def accept_soc(s):
# Talk with client
for i in range(3):
try:
# loop count
print('\n')
# check timeout statement
print('socket default timeout: "' + str(s.gettimeout()) + '"')
s.settimeout(10)
print('set this socket timeout')
print('new socket timeout: "' + str(s.gettimeout()) + '"')
# accept connection
conn, addr = s.accept()
print("connected with '" + str(addr[0]) + "':" + str(addr[1]))
print('conn = ' + str(conn) + '\n\n' + 'addr = ' + str(addr))
except socket.timeout:
print('Server connection timed out.')
# Close the open socket
s.close()
print('\n>>>DEAD')
socket.setdefaulttimeout(20)
s = create_soc()
bind_soc(s)
accept_soc(s) |
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO! Make sure module is installed and try running as root.")
import time
import pygame
# use GPIO header pin numbers
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
# set up pins as outputs
m1step = 8
m1dir = 10
m1en = 12
m2step = 16
m2dir = 18
m2en = 22
outputPins = [m1step, m1dir, m1en, m2step, m2dir, m2en]
GPIO.setup(outputPins, GPIO.OUT)
GPIO.output(outputPins, 0)
# set up arrays for motor information
# To add additional (or remove) motors, update motor count,
# add a new motor definition, and add an element to each list
motorCount = 2
motor1 = 0
motor2 = 1
#motor3 = 2
# format for pins list:
# [[Motor 1 Step, Motor 1 Direction, Motor 1 Enable], [Motor 2 Step, Motor 2 Direction, Motor 2 Enable], ... ]
motorPins = [[m1step, m1dir, m1en], [m2step, m2dir, m2en]]
# add or remove an element from each of these for additional motors
stepTime = [0, 0]
oldTime = [0, 0]
direction = [0, 0]
position = [0, 0]
# initialize pygame and joystick
pygame.init()
pygame.joystick.init()
joystick = pygame.joystick.Joystick(0)
joystick.init()
# function for updating speed array
def updateSpeedVals(motorNum, speed):
# set up some things
maxStepRate = 1000
# set direction
if speed < 0:
# reverse
direction[motorNum] = 0
else:
# forward
direction[motorNum] = 1
# get magnitude of speed and remap it to the step rate
speedMagnitude = abs(speed)
stepRate = remap(speedMagnitude, 0, 1, 0, maxStepRate)
# set up compare values
if (stepRate == 0):
stepTime[motorNum] = 0
else:
stepTime[motorNum] = float(1 / stepRate)
# end updateSpeed
# function for updating only the speed of the motor
def updateMotorSpeed():
# get current time
newTime = time.time()
for i in range(motorCount):
# set direction pin
GPIO.output(motorPins[i][1], direction[i])
# check time
if (newTime - oldTime[i] > stepTime[i]):
oldTime[i] = newTime
# update position array
if ((direction[i] == 1) and (stepTime[i] != 0)):
position[i] += 1
elif ((direction[i] == 0) and (stepTime[i] != 0)):
position[i] -= 1
GPIO.output(motorPins[i][0], 1)
elif (newTime - oldTime[i] > (stepTime[i] / 2)):
GPIO.output(motorPins[i][0], 0)
# end updateMotorSpeed
# function to set absolute angle (assuming no steps are lost)
def setAngle(motorNum, speed, angle):
updateSpeedVals(motorNum, speed)
newPosition = int(angle / 1.8)
if (position[motorNum] != newPosition):
updateMotorSpeed()
return 0
return 1
# end setAngle
# function to set motor speed
def setSpeed(motorNum, speed):
updateSpeedVals(motorNum, speed)
updateMotorSpeed()
# end setSpeed
# function to disable motor control
def disableMotors():
for i in range(motorCount):
GPIO.output(motorPins[i][2], 1)
# end disableMotors
# function to enable motor control
def enableMotors():
for i in range(motorCount):
GPIO.output(motorPins[i][2], 0)
# end enableMotors
# simple function to re-map a range of values
def remap(value, fromLow, fromHigh, toLow, toHigh):
# get how wide each range is
fromRange = fromHigh - fromLow
toRange = toHigh - toLow
# convert low range into a 0-1 range
valueNew = float(value - fromLow) / float(fromRange)
# convert 0-1 range to high range
return toLow + (valueNew * toRange)
# end remap()
loopRunning = 0
disabled = 0
while not loopRunning:
# do this thing for some reason
axis0 = joystick.get_axis(0)
axis1 = -1 * joystick.get_axis(1)
loopRunning = joystick.get_button(9)
for event in pygame.event.get():
if event.type == pygame.JOYBUTTONDOWN:
if joystick.get_button(0):
disabled = not disabled
if disabled:
disableMotors()
print("Motors disabled!")
else:
enableMotors()
print("Motors enabled!")
m1Speed = remap(axis0, -1.0, 1.0, -0.75, 0.75)
m2Speed = remap(axis1, -1.0, 1.0, -0.75, 0.75)
setSpeed(motor1, m1Speed)
setSpeed(motor2, m2Speed)
# clean up gpio pins
GPIO.cleanup()
|
# Copyright (c) 2013, Indictrans and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns, data = [], []
columns = get_colums()
data = get_data(filters)
return columns, data
def get_data(filters):
if filters:
result = frappe.db.sql("""select checklist_name,expected_start_date,end_date,count from `tabChecklist Requisition`
{0} order by expected_start_date desc""".format(get_conditions(filters)),as_list=1)
return result
else:
result = []
return result
def get_conditions(filters):
cond = ''
if filters.get('checklist') and filters.get("checklist_status"):
cond = "where name = '{0}' and checklist_status = '{1}'".format(filters.get('checklist'),filters.get('checklist_status'))
elif filters.get('checklist'):
cond = "where name ='{0}'".format(filters.get("checklist"))
elif filters.get("checklist_status"):
cond = "where checklist_status ='{0}'".format(filters.get("checklist_status"))
return cond
def get_colums():
columns = [_("Checklist") + ":Data:250"] + [_("Start Date") + ":Date:250"] + \
[_("End Date") + ":Date:250"] + [_("Actual Time(In Days)") + ":Int:150"]
return columns
|
from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from .apps import WebappConfig
from .forms import NameForm
def index(request):
if request.method == "POST":
review = request.POST.get("your_review")
vec = WebappConfig.vectorizer.transform([review])
film_class = WebappConfig.logreg.predict(vec)
rating = float(WebappConfig.linreg.predict(vec))
if rating > 8:
rating = 10
elif rating < 1:
rating = 1
elif rating < 4.5:
rating = round(rating)
else:
rating = round(rating+2)
if film_class == 1:
film_type = 'positive'
else:
film_type = 'negative'
return HttpResponse("<h2>Type of review: {0}. Rating: {1}</h2>".format(film_type, rating))
else:
userform = NameForm()
return render(request, "index.html", {"form": userform})
|
# Generated by Django 3.0.6 on 2020-05-10 23:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('beers', '0004_auto_20200510_1604'),
]
operations = [
migrations.RenameField(
model_name='beer',
old_name='style',
new_name='styles',
),
]
|
import numpy as np
import scipy.spatial.distance as sd
from scipy.stats.mstats import gmean
from ._object_properties import _get_objects_property
def scai(
object_labels,
min_area=0,
periodic_domain=False,
return_nn_dist=False,
reference_lengthscale=1000,
dx=1,
):
"""
compute the Simple Convective Aggregation Index (SCAI)
(Tobin et al 2012, https://doi.org/10.1175/JCLI-D-11-00258.1)
from a (cloud) mask, assuming distances are in pixels
NB: SCAI isn't resolution independent. Instead, from the same domain
sampled at two different resolutions the value of SCAI will increase with
dx^2. To correct for you should scale the SCAI value by 1/dx^2. See
detailed discussion in
https://github.com/cloudsci/cloudmetrics/pull/42#issuecomment-1021156313
Parameters
----------
object_labels : numpy array of shape (npx,npx) - npx is number of pixels
2D Field with numbered object labels.
min_area : int
Minimum area (in pixels) an object must have. Default is 0.
periodic_domain : bool (optional)
Flag for whether to copute the measure with periodic boundary conditions.
Default is False
return_nn_dist : bool
Flag for whether to return the (geometric) mean nearest neighbour distance
between object. Default is False.
reference_lengthscale : float (optional)
reference scale (in pixels) to divide the returned scai through. For
similar interpretations as Tobin et al. (2012), set to the domain's
characteristic length. Default is 1000
dx : float (optional)
Pixel size, for computing physical D0. Default is 1.
Returns
-------
D0 : float
Mean geometric nearest neighbour distance between objects. Only returned
if return_nn_dist is True.
scai : float
Simple Convective Aggregation Index.
"""
area = _get_objects_property(object_labels=object_labels, property_name="area")
centroids = _get_objects_property(
object_labels=object_labels, property_name="centroid"
)
num_objects = len(area)
idx_large_objects = area > min_area
if np.count_nonzero(idx_large_objects) == 0:
D0 = scai = np.nan
else:
area = area[idx_large_objects] * dx**2
pos = centroids[idx_large_objects, :] * dx
nCl = len(area)
if periodic_domain:
dist_sq = np.zeros(nCl * (nCl - 1) // 2) # to match the result of pdist
for d in range(object_labels.ndim):
box = object_labels.shape[d] * dx // 2
pos_1d = pos[:, d][:, np.newaxis]
dist_1d = sd.pdist(pos_1d)
dist_1d[dist_1d > box * 0.5] -= box
dist_sq += dist_1d**2
dist = np.sqrt(dist_sq)
else:
dist = sd.pdist(pos)
D0 = gmean(dist)
Nmax = object_labels.shape[0] * object_labels.shape[1] / 2
scai = num_objects / Nmax * D0 / reference_lengthscale * 1000
# Force SCAI to zero if there is only 1 region (completely aggregated)
# This is not strictly consistent with the metric (as D0 is
# objectively undefined), but is consistent with its spirit
if pos.shape[0] == 1:
scai = 0
if return_nn_dist:
return scai, D0
return scai
|
from django.core.urlresolvers import reverse, NoReverseMatch
from django import template
register = template.Library()
@register.simple_tag()
def edit_link(obj):
""" Return link to admin site for given object """
try:
return reverse("admin:%s_change" % obj._meta.db_table, args=[obj.pk])
except (AttributeError, NoReverseMatch):
return ''
|
def collatz:
A[1]=0
A[2]=1
i=3
flag=0
while i<=1000000 and !flag:
x=i
nsteps=0
while x!=1:
if x%2==0:
if A[x/2]!=0:
nsteps+=A[x/2]+1
A[i]=nsteps;
if A[i]>maxm:
maxi=i
maxm=A[i]
flag=1
else:
x=x/2
nsteps++
else
|
from tkinter import *
from tkinter.font import Font
from tkinter.filedialog import askopenfile
from tkinter import filedialog as fd
import os
import webbrowser
from tkinter import scrolledtext
##########################__View__###############################
def view_tree():
os.system("gedit temp/temp_tree.txt")
##########################__Browse/Upload__######################
def browse_file():
file_n.delete(1.0,END)
b_file = fd.askopenfilename()
file_n.insert(INSERT, b_file)
file_n.pack(side = TOP)
file_path.set(b_file)
file_open = open(file_path.get())
temp_data = file_open.read()
file_write = open("temp/temp_data.txt", "w+")
file_write.write(temp_data)
############################__Submit__############################
def submit_file():
status_message.set("\nRunning Phylobit...\n")
status.insert(INSERT, status_message.get())
status.pack(side = TOP)
os.system("python PhyloBit.py temp/temp_data.txt")
status_message.set("\nTree construction complete...\n")
status.insert(END, status_message.get())
status.pack(side = TOP)
##########################__PhyloBit__############################
def PhyloBit():
pb_win = Tk()
pb_win.geometry("400x400")
pb_win.maxsize(400,300)
pb_win.title("PhyloBit")
Label(pb_win, text = "PhyloBit", font = "Arial 20 bold", bg = "spring green",fg = "black").pack(padx = 20, pady = 20)
Label(pb_win,
text =
"""
PhyloBit is a Phylogenetic tree making
tool based on Distance-matrix method
and use GUI(Graphical User Interface)
as a interface between the user and
the program.
It is developed by Rajan (M.Sc. Student)
of University of Delhi, Delhi, India.
It has been developed as an independent
project just to learn about phylogenetic
tree.
""", font ="Arial 11 bold", bg = "white").pack(padx = 10)
mainloop()
###########################__About__#############################
def about():
about_win = Tk()
about_win.geometry("300x200")
about_win.maxsize(300,200)
about_win.title("About")
Label(about_win,
text =
"""
PhyloBit_GUI 1.1
""", font ="Arial 14 bold", bg = "white").pack(pady = 30,padx = 10)
mainloop()
############################__Help__#############################
def help():
webbrowser.open("https://github.com/rajanbit")
###########################__Support__###########################
def support():
sup_win = Tk()
sup_win.geometry("300x200")
sup_win.maxsize(300,200)
sup_win.title("Support")
Label(sup_win,
text =
"""
Linux OS
Python 3.7 or above
NumPy [Matrix handling]
Tkinter [G.U.I]
""", font ="Arial 14 bold", bg = "white").pack(pady = 30,padx = 10)
mainloop()
#################################################################
# Configuration
gui = Tk()
gui.geometry("500x650")
gui.maxsize(500,650)
gui.title("PhyloBit")
menu = Menu(gui)
gui.config(menu=menu)
###############################################################
# Variables
status_message = StringVar(gui,"")
status_message.set(" ")
file_path = StringVar(gui,"")
file_path.set("No file selected...")
##################################################################
# FRAME_1
frame1 = Frame(gui, bg = "ghost white", height = 640, width = 490)
frame1.pack(side = TOP, pady = 20)
# File Menu
f_menu = Menu(menu, tearoff = 0)
f_menu.add_command(label = "Upload", command = lambda:browse_file())
f_menu.add_separator()
f_menu.add_command(label = "Exit", command = gui.quit)
menu.add_cascade(label = "File", menu = f_menu)
# Tools Menu
t_menu = Menu(menu, tearoff = 0)
t_menu.add_command(label = "PhyloBit", command =lambda:PhyloBit())
menu.add_cascade(label = "Tools", menu = t_menu)
# Help Menu
h_menu = Menu(menu, tearoff = 0)
h_menu.add_command(label = "Help", command = lambda:help())
h_menu.add_command(label = "Support", command = lambda:support())
menu.add_cascade(label = "Help", menu = h_menu)
# About Menu
a_menu = Menu(menu, tearoff = 0)
a_menu.add_command(label = "About", command = lambda:about())
menu.add_cascade(label = "About", menu = a_menu)
# Header
rt_init = os.getcwd()
rt_images = rt_init.replace("lib", "images")
name = PhotoImage(file = rt_images+"/n.png")
logo = PhotoImage(file= rt_images+"/s.png")
Label(frame1,
image = name).pack(pady = 10)
Label(frame1,
image = logo).pack()
Label(frame1,
text="""A tool for constructing Phylogenetic Tree
based on Distance-matrix method.""",
fg = "black",
bg = "ghost white",
font = "Arial 12 bold").pack(padx = 20,pady = 10)
# FRAME_2
frame2 = Frame(frame1, bg = "pale green", height = 300, width = 420)
frame2.pack(side = TOP, pady = 15)
# Upload Button
browse = Button(frame2, text = "Browse", font = "Arial 15 bold", command = lambda:browse_file()).pack(pady = 10, padx = 160)
file_n = Text(frame2, height = 2, width = 55,bg = "white")
file_n.insert(INSERT, file_path.get())
file_n.pack(side = TOP)
# Submit Button
submit = Button(frame2, text = "Submit", font = "Arial 15 bold", command = lambda:submit_file()).pack(pady = 10)
status = scrolledtext.ScrolledText(frame2,height = 8, width = 55,bg = "white")
status.insert(INSERT, status_message.get())
status.pack(side = TOP)
# View Button
view = Button(frame2, text = " View ", font = "Arial 15 bold", command = lambda:view_tree()).pack(pady = 10)
mainloop()
|
import dash_bootstrap_components as dbc
items = [
dbc.DropdownMenuItem("First"),
dbc.DropdownMenuItem(divider=True),
dbc.DropdownMenuItem("Second"),
]
dropdown = dbc.Row(
[
dbc.Col(
dbc.DropdownMenu(
label="Dropdown (default)", children=items, direction="down"
),
width="auto",
),
dbc.Col(
dbc.DropdownMenu(
label="Dropstart", children=items, direction="start"
),
width="auto",
),
dbc.Col(
dbc.DropdownMenu(label="Dropend", children=items, direction="end"),
width="auto",
),
dbc.Col(
dbc.DropdownMenu(label="Dropup", children=items, direction="up"),
width="auto",
),
],
justify="between",
)
|
from _typeshed import Incomplete
from collections.abc import Generator
def dijkstra_path(G, source, target, weight: str = "weight"): ...
def dijkstra_path_length(G, source, target, weight: str = "weight"): ...
def single_source_dijkstra_path(
G, source, cutoff: Incomplete | None = None, weight: str = "weight"
): ...
def single_source_dijkstra_path_length(
G, source, cutoff: Incomplete | None = None, weight: str = "weight"
): ...
def single_source_dijkstra(
G,
source,
target: Incomplete | None = None,
cutoff: Incomplete | None = None,
weight: str = "weight",
): ...
def multi_source_dijkstra_path(
G, sources, cutoff: Incomplete | None = None, weight: str = "weight"
): ...
def multi_source_dijkstra_path_length(
G, sources, cutoff: Incomplete | None = None, weight: str = "weight"
): ...
def multi_source_dijkstra(
G,
sources,
target: Incomplete | None = None,
cutoff: Incomplete | None = None,
weight: str = "weight",
): ...
def dijkstra_predecessor_and_distance(
G, source, cutoff: Incomplete | None = None, weight: str = "weight"
): ...
def all_pairs_dijkstra(
G, cutoff: Incomplete | None = None, weight: str = "weight"
) -> Generator[Incomplete, None, None]: ...
def all_pairs_dijkstra_path_length(
G, cutoff: Incomplete | None = None, weight: str = "weight"
) -> Generator[Incomplete, None, None]: ...
def all_pairs_dijkstra_path(
G, cutoff: Incomplete | None = None, weight: str = "weight"
) -> Generator[Incomplete, None, None]: ...
def bellman_ford_predecessor_and_distance(
G,
source,
target: Incomplete | None = None,
weight: str = "weight",
heuristic: bool = False,
): ...
def bellman_ford_path(G, source, target, weight: str = "weight"): ...
def bellman_ford_path_length(G, source, target, weight: str = "weight"): ...
def single_source_bellman_ford_path(G, source, weight: str = "weight"): ...
def single_source_bellman_ford_path_length(G, source, weight: str = "weight"): ...
def single_source_bellman_ford(
G, source, target: Incomplete | None = None, weight: str = "weight"
): ...
def all_pairs_bellman_ford_path_length(
G, weight: str = "weight"
) -> Generator[Incomplete, None, None]: ...
def all_pairs_bellman_ford_path(
G, weight: str = "weight"
) -> Generator[Incomplete, None, None]: ...
def goldberg_radzik(G, source, weight: str = "weight"): ...
def negative_edge_cycle(G, weight: str = "weight", heuristic: bool = True): ...
def find_negative_cycle(G, source, weight: str = "weight"): ...
def bidirectional_dijkstra(G, source, target, weight: str = "weight"): ...
def johnson(G, weight: str = "weight"): ...
|
#Author: James Nicholson
#Date: 6/10/2018
#Write a program that creates a list of 5 to 15 numbers from 1 to 75.
#Write one line of Python that takes this list a and makes a new list that has only the even elements of this list in it.
import random
a = [] #random numbers list
b = random.randint(5,15) #random list size from 5 to 15
while len(a) < b: #Loop to generate random list of numbers from 1 to 75 and poplate a
a.append(random.randint(1,75))
even_list = [number for number in a if number % 2 == 0] #even numbers from list
print(a) # prints list a
print(even_list) #prints list b
#End Script
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from .views import GroupOfGeneralProfCompetencesInGeneralCharacteristicsSet, \
GeneralProfCompetencesInGroupOfGeneralCharacteristicSet, IndicatorGroupOfGeneralProfCompetencesInGeneralCharacteristicSet
router = DefaultRouter()
router.register(r'api/general_ch/group_of_general_prof_competence/competence/indicator',
IndicatorGroupOfGeneralProfCompetencesInGeneralCharacteristicSet,
basename='indicator-in-general-prof-competences-in-pkgroup-in-gh')
router.register(r'api/general_ch/group_of_general_prof_competence/competence',
GeneralProfCompetencesInGroupOfGeneralCharacteristicSet, basename='general-prof-competences-in-pkgroup-in-ghh')
router.register(r'api/general_ch/group_of_general_prof_competence',
GroupOfGeneralProfCompetencesInGeneralCharacteristicsSet, basename='pkgroup-in-gh')
urlpatterns = [
url(r'^', include(router.urls))
] |
from common.run_method import RunMethod
import allure
@allure.step("员工手册/列表查询")
def regime_listRegimes_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/列表查询"
url = f"/api-periodical/regime/listRegimes"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/内容查询")
def regime_listRegimeContents_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/内容查询"
url = f"/api-periodical/regime/listRegimeContents"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/书架")
def regime_listRegimeOnReadNum_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/书架"
url = f"/api-periodical/regime/listRegimeOnReadNum"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/埋点记录/每页时长")
def regime_log_content_duration_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/埋点记录/每页时长"
url = f"/api-periodical/regime/log/content/duration"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/埋点记录/缩略图跳转")
def regime_log_thumbnail_from_to_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/埋点记录/缩略图跳转"
url = f"/api-periodical/regime/log/thumbnail/from/to"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/埋点记录/发送通知")
def regime_log_send_notice_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/埋点记录/发送通知"
url = f"/api-periodical/regime/log/send/notice"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/埋点记录/通知进入")
def regime_log_visit_from_notice_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/埋点记录/通知进入"
url = f"/api-periodical/regime/log/visit/from/notice"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/埋点记录/访问页面")
def regime_log_visit_content_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/埋点记录/访问页面"
url = f"/api-periodical/regime/log/visit/content"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("员工手册/埋点记录/关闭页面")
def regime_log_leave_content_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "员工手册/埋点记录/关闭页面"
url = f"/api-periodical/regime/log/leave/content"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
|
#small =-1
#for thenum in [9,41,12,3,74,15]:
# if thenum < small:
# small=thenum
#print(small)
#n=50
#while n>0:
# print(n)
#print('all done')
#count= 0
#for thing in[9,41,3,74,15]:
# count=count +thing
#print('perfectplanb',count)
n = 0
while n>0:
print('perfect')
print('plan')
print('b')
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import os
import signal
from typing import Mapping
import pytest
from workunit_logger.register import FINISHED_SUCCESSFULLY
from pants.testutil.pants_integration_test import (
PantsResult,
run_pants,
setup_tmpdir,
temporary_workdir,
)
from pants.util.dirutil import maybe_read_file
from pants_test.pantsd.pantsd_integration_test_base import attempts, launch_waiter
def workunit_logger_config(log_dest: str, *, pantsd: bool = True) -> Mapping:
return {
"GLOBAL": {
"pantsd": pantsd,
"backend_packages.add": ["workunit_logger", "pants.backend.python"],
},
"workunit-logger": {"dest": log_dest},
"python": {"interpreter_constraints": "['>=3.7,<3.10']"},
}
def run(
args: list[str], success: bool = True, *, files: dict[str, str] | None = None
) -> tuple[PantsResult, str | None]:
with setup_tmpdir(files or {}) as tmpdir:
dest = os.path.join(tmpdir, "dest.log")
normalized_args = [arg.format(tmpdir=tmpdir) for arg in args]
pants_run = run_pants(normalized_args, config=workunit_logger_config(dest))
if success:
pants_run.assert_success()
confirm_eventual_success(dest)
else:
pants_run.assert_failure()
return pants_run, maybe_read_file(dest)
def confirm_eventual_success(log_dest: str) -> None:
for _ in attempts("The log should eventually show that the SWH shut down."):
content = maybe_read_file(log_dest)
if content and FINISHED_SUCCESSFULLY in content:
break
def test_list() -> None:
run(["list", "{tmpdir}/foo::"], files={"foo/BUILD": "target()"})
def test_help() -> None:
run(["help"])
run(["--version"])
@pytest.mark.parametrize("pantsd", [True, False])
def test_ctrl_c(pantsd: bool) -> None:
with temporary_workdir() as workdir:
dest = os.path.join(workdir, "dest.log")
# Start a pantsd run that will wait forever, then kill the pantsd client.
client_handle, _, _, _ = launch_waiter(
workdir=workdir, config=workunit_logger_config(dest, pantsd=pantsd)
)
client_pid = client_handle.process.pid
os.kill(client_pid, signal.SIGINT)
# Confirm that finish is still called (even though it may be backgrounded in the server).
confirm_eventual_success(dest)
def test_restart() -> None:
# Will trigger a restart
run(["--pantsd-max-memory-usage=1", "roots"])
|
input_list = input().split()
input_list.sort(reverse=True)
# input_list.reverse() за обратния ред може да се ползва и това
print("".join(input_list)) |
import setuptools
with open("README.md", "r") as fh:
readme = fh.read()
requires = ['nltk',
'numpy',
'matplotlib',
'pandas',
'scipy']
setuptools.setup(
name="Conceptual dependency",
version="0.0.1",
author="Neneka",
author_email="makiasagawa@gmail.com",
description="Something I wrote for the MARGIE text generator efficiency test ",
long_description=readme,
long_description_content_type="text/markdown",
url="https://github.com/ningkko/ConceptualDependency",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=requires) |
import numpy as np
def int_quad(n,m):
zero = np.zeros((n,m), dtype = int)
one = np.ones((n,m), dtype = int)
two = 2*np.ones((n,m),dtype = int)
three = 3*np.ones((n,m),dtype = int)
rowOne = np.hstack((zero,one))
rowTwo = np.hstack((two,three))
return (np.vstack((rowOne,rowTwo))) |
from django.urls import path, re_path
from django.conf.urls import include
from django.contrib.auth.models import User
from Profile import views
urlpatterns = [
re_path(r'ProfileList/$', views.ProfileList.as_view()),
re_path(r'ProfileList/CiudadList/$', views.CiudadList.as_view()),
re_path(r'ProfileList/GeneroList/$', views.GeneroList.as_view()),
re_path(r'ProfileList/OcupacionList/$', views.OcupacionList.as_view()),
re_path(r'ProfileList/EstadoList/$', views.EstadoList.as_view()),
re_path(r'ProfileList/EstadoCivilList/$', views.EstadoCivilList.as_view()),
] |
x = raw_input()
casen = 1
while x != 0:
ans = 0
x = x.split(" ")
ins = raw_input()
ins = ins.split(" ")
s = 0
while ans<int(x[0]) and s<int(x[1]):
s+=int(ins[ans])
ans+=1
if s>int(x[1]):
ans-=1
print("Case " + str(casen)+": " +str(ans))
casen+=1
x = 0
try:
x = raw_input()
int(x.split(" ")[0])
except:
x = 0
pass |
# Generated by Django 2.0 on 2018-04-29 06:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('room', '0002_auto_20180426_2356'),
]
operations = [
migrations.AlterField(
model_name='time',
name='room',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='times', to='room.Room', verbose_name='면접실'),
),
]
|
import math
ang = float(input('Qual o valor do ângulo? '))
s = math.sin(math.radians(ang))
c = math.cos(math.radians(ang))
t = math.tan(math.radians(ang))
print('O seno de {} é {:.2f}\nO cosseno {:.2f}\ne a tangente {:.2f}'.format(ang, s, c, t)) |
'''
Alessia Pizzoccheri - CS 5001 02
'''
MODULUS = 10
MULTIPLY = 3
ZERO = 0
MIN = 2
# I created a separate function to check for zeroes before the
# main program starts running
def check_zeroes(lst):
'''
Function check_zeroes
Input: lst
Returns: bool
'''
tot = 0
# iterate through list and sum together the value of
# each item
for num in lst:
tot += num
# if the total is zero return True
if tot == ZERO:
return True
else:
return False
def is_valid_upc(upc_code):
'''
Function is_valid_upc
Input: lst
Returns: bool
Test Case #1:
0 7 3 8 5 4 0 0 8 0 8 9
True
Test Case #2:
0 0 0 0 0
False
Test Case #3:
9
False
Test Case #4:
0 8 2 8 4 5 9 0 7 0 9 1
False
Test Case #5:
1 9 4 0 9 5 5 8 3 3 0 9
True
'''
total = 0
even = 0
odd = 0
zeroes = check_zeroes(upc_code)
is_valid = True
# if the upc code is less than 2 numbers or check_zeroes returns True
if len(upc_code) < MIN or zeroes:
is_valid = False
else:
# sum numbers at even position
for i in range(len(upc_code)-1,0-1,-2):
even += upc_code[i]
# sum numbers at odd position and multiply by 3
for i in range(len(upc_code)-2,0-1,-2):
odd += upc_code[i] * MULTIPLY
# sum everything together
total = (odd + even)
# final check to see if the UPC code is a multiple of 10
if total % MODULUS == ZERO:
is_valid = True
else:
is_valid = False
return is_valid |
import matplotlib.pyplot as plt
import numpy as np
def show_img(img):
img = img / 5 + 0.47 # unnormalize
npimg = img.numpy()
plt.imshow(np.transpose(npimg, (1, 2, 0)))
plt.show()
def show_grid(imgs):
plt.figure()
plt.axis('off')
f, axarr = plt.subplots(1,5)
for idx, img in enumerate(imgs):
img = img / 5 + 0.47
img = np.transpose(img.numpy(), (1, 2, 0))
axarr[idx].axis('off')
axarr[idx].imshow(img)
plt.show()
|
# Bài 05: Viết hàm
# def extract_characters(*file)
# trả lại tập các ký tự trong các file
def extract_characters(*file) :
for i in range(len(file)):
data = file[i]
print(data)
with open(data , 'r',encoding= 'utf-8') as text :
data = text.read()
print(set(data))
extract_characters('text/text.txt','text/text2.txt','text/text3.txt') |
from django.shortcuts import render, get_object_or_404
from django.urls import reverse_lazy,reverse
from .models import Category,Product
from .serializers import CategorySerializer,ProductSerializer
from .forms import CategoryForm,ProductForm
from rest_framework import permissions,viewsets
from rest_framework.decorators import permission_classes
# from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.filters import SearchFilter, OrderingFilter
from django.views.generic import ListView,DetailView
from django.views.generic.edit import CreateView,UpdateView,DeleteView
#*** DRF ENDPOINTS***
class CategoryList(viewsets.ModelViewSet):
permission_classes = (permissions.AllowAny,)
queryset = Category.objects.all()
serializer_class = CategorySerializer
filterset_fields = '__all__'
search_fields = '__all__'
ordering = ['name']
class ProductList(viewsets.ModelViewSet):
permission_classes = (permissions.AllowAny,)
queryset = Product.objects.all()
serializer_class = ProductSerializer
filterset_fields = '__all__'
search_fields = '__all__'
ordering = ['name']
#****TEMPLATE VIEWS***************
class CategoryView(ListView):
template_name= "product/index.html"
def get_queryset(self):
return Category.objects.all()
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form'] = CategoryForm
return context
def post(self, request, *args, **kwargs):
form = CategoryForm(request.POST)
if form.is_valid():
form.save()
self.object_list = self.get_queryset()
context = super().get_context_data(**kwargs)
context['form'] = CategoryForm
return self.render_to_response(context=context)
else:
context = super().get_queryset(**kwargs)
context['form'] = CategoryForm
return self.render_to_response(context=context)
class CategroyDetailView(ListView):
model = Product
template_name ="product/detail.html"
paginate_by = 5
def get_queryset(self):
category_name = get_object_or_404(Category, name=self.kwargs.get('name'))
return Product.objects.filter(category = category_name)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form'] = ProductForm
return context
def post(self, request, *args, **kwargs):
form = ProductForm(request.POST)
if form.is_valid():
form.save()
self.object_list = self.get_queryset()
context = context = super().get_context_data(**kwargs)
context['form'] = ProductForm
return self.render_to_response(context=context)
else:
self.object_list = self.get_queryset()
context = super().get_context_data(**kwargs)
context['form'] = ProductForm
return self.render_to_response(context=context)
class CategoryCreate(CreateView):
model = Category
fields = ["name"]
template_name='product/category_form.html'
class ProductCreate(CreateView):
model = Product
fields = "__all__"
template_name='product/product_form.html'
success_url = reverse_lazy("Product:category-list")
class ProductUpdate(UpdateView):
model = Product
fields = '__all__'
template_name='product/product_update.html'
success_url = reverse_lazy("Product:category-list")
class ProductDelete(DeleteView):
model = Product
template_name='product/product_delete.html'
success_url = reverse_lazy("Product:category-list") |
# path
PATH_INPUT_STYLE = 'input/style/'
PATH_INPUT_CONTENT = 'input/content/'
PATH_OUTPUT = 'output/'
# pre-trained network data
TRAINED_NETWORK_DATA = 'imagenet-vgg-verydeep-19.mat'
|
from psana.psexp import DataSourceBase
from psana.dgrmdsource import DgrmDsource
class DrpDataSource(DataSourceBase):
def __init__(self, *args, **kwargs):
super(DrpDataSource, self).__init__(**kwargs)
self.runnum_list = [0]
self.runnum_list_index = 0
self._setup_run()
super(). _start_prometheus_client()
def __del__(self):
super(). _end_prometheus_client()
def runs(self):
yield 0
def is_mpi(self):
return False
def _setup_run(self):
if self.runnum_list_index == len(self.runnum_list):
return False
runnum = self.runnum_list[self.runnum_list_index]
self.dgds = DgrmDsource()
# Both SingleFile and Shmem DataSource
self.dm = DgramManager(['shmem'], tag=self.tag)
self._configs = self.dm.configs
super()._setup_det_class_table()
super()._set_configinfo()
self.runnum_list_index += 1
return True
|
from flexp.flow.flow import *
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 19 11:38:40 2020
@author: pgood
"""
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objs as go
import dash_table
from dash.dependencies import Input, Output
import pandas as pd
from pymongo import MongoClient
#define the mongo connection
connection = MongoClient('ds145952.mlab.com', 45952, retryWrites = False)
db = connection['capstone']
db.authenticate('cuny', 'datasci3nce')
#Extract some of the larger DFs upfront so we don't need to compute these
#on every client request
raw_skill_data = []
iterator = db.full_details.find()
for row in iterator:
raw_skill_data.append(row)
raw_skill_df = pd.DataFrame(raw_skill_data)
all_titles = [{'label' : row['job_title'], 'value' : row['dice_id']}
for index, row in raw_skill_df[['job_title', 'dice_id']].drop_duplicates().iterrows()]
all_skills = [{'label' : row['skill'], 'value' : row['skill']}
for index, row in raw_skill_df[['skill']].drop_duplicates().iterrows()]
cateogries_df = (raw_skill_df[['topic', 'top_num']]
.groupby('topic')
.size()
.reset_index()
.sort_values(by='topic')
)
all_categories = [{'label' : row['topic'] + ' ({})'.format(row[0]), 'value' : row['topic']}
for index, row in cateogries_df.iterrows()]
def get_cos ():
iterator = db.company_pct.find()
all_cos = []
for row in iterator:
all_cos.append(row)
return pd.DataFrame(all_cos)
def company_list():
cos = get_cos()
cos_list = cos['company'].drop_duplicates().sort_values().to_list()
cos_list.insert(0, 'Select All')
return cos_list
def raw_data():
iterator = db.dice_id_pcts.find()
all_docs = []
for row in iterator:
all_docs.append(row)
return pd.DataFrame(all_docs)
def table_data_default():
raw = []
docs = db.pivot_words.find()
for item in docs:
raw.append(item)
return pd.DataFrame(raw)
#define our vizes used in callbacks
def word_graph(word_probs, topic):
figure={
'data': [go.Bar(x = word_probs['word'], y = word_probs['saliency'],
marker = {'color':'rgba(55, 128, 191, 0.7)'}
)],
'layout': {
'title': 'Topic "{}" '.format(topic) + 'Word Relevance'
}
}
return figure
def company_portions(co_tops, topic):
figure={
'data': [
go.Bar(
y = co_tops['company'],
x = co_tops['percentage'],
orientation = 'h',
text = co_tops['company'],
marker = {'color':'rgba(55, 128, 191, 0.7)'}
)],
'layout': {
'xaxis': {'tickformat': ',.0%', 'range': [0,10]},
'yaxis' : {'automargin': True},
'title': 'Topic "{}" '.format(topic) + 'Companies'
}
}
return figure
def scatter_graph(graph_data):
traces = []
sizeref = 2.*max(graph_data['percentage'])/(40.**2)
data = go.Scatter(
x = graph_data['pc1'],
y = graph_data['pc2'],
mode = 'markers',
#name = topic,
marker={
'size': graph_data['percentage'],
'sizemode' : 'area',
'sizeref': sizeref,
'sizemin': 4,
},
text = graph_data['top_num'],
customdata = graph_data['topic'],
hovertemplate = "Category: %{customdata} <br>Category Number: %{text}"
)
traces.append(data)
figure={
'data': traces,
'layout': {
'xaxis': {'showticklabels' :False},
'yaxis': {'showticklabels' :False},
'title': 'Topic Explorer'
}
}
return figure
#########################Styling##############################################
side_style = {
'height': '8%',
'width' : '15%',
'position': 'absolute',
'z-index': 1,
'top': 0,
'left': 0,
'padding-top': '10px',
}
graph_style = {
'margin-left': '20%',
'padding': '0px 10px'
}
graph_1_style = {'height': '35%', 'width': '70%', 'bottom': 0, 'padding-top': '10px',
'padding-left': '10px', 'position': 'absolute', 'left': '400px'}
graph_2_style = {'height': '65%', 'width': '70%', 'top' : 0,'padding-top': '20px',
'position' : 'absolute'}
graph_3_style = {'height': '65%', 'width': '25%', 'bottom': 0, 'padding-top': '10px',
'padding-left': '1px', 'left': '60px', 'position' : 'absolute'}
graph_4_style = {
'bottom': 0,
'margin-left':'60%',
'position' : 'absolute'
}
raw_style = {
'border': 'thin lightgrey solid',
'overflowY': 'scroll',
'height' : '80%', 'width': '40%', 'bottom' : '0px', 'left' : '0px' , 'padding-left': '10px',
'position': 'absolute'}
dd_style = {'width' : '50%', 'right': '0px', 'top': '0px', 'position': 'absolute', 'padding-top': '5px'}
sector_title = {'width' : '30%', 'left': '0px', 'top': '75px', 'position': 'absolute', 'padding-top': '5px'}
sector_style = {'width' : '20%', 'left': '0px', 'top': '120px', 'position': 'absolute', 'padding-top': '5px'}
tabs_styles = {
'height': '44px'
}
tab_style = {
'borderBottom': '1px solid #d6d6d6',
'padding': '6px',
'fontWeight': 'bold'
}
tab_selected_style = {
'borderTop': '1px solid #d6d6d6',
'borderBottom': '1px solid #d6d6d6',
'backgroundColor': '#119DFF',
'color': 'white',
'padding': '6px'
}
table_style = {
'top': '120px',
'right': '15px',
'position': 'absolute',
'font-size': '80%'
}
st_style = {
'top': '250px',
'right': '600px',
'position': 'absolute',
'font-size': '80%'
}
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css' ]
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
app.config['suppress_callback_exceptions'] = True
################################App Layout#####################################
app.layout = html.Div(children = [
html.Div(style = side_style, children = [
html.H3('Skills Market'),
dcc.Tabs(id="tabs_input", value='tab1', children=[
dcc.Tab(label='Overview', value='tab1', style = tab_style, selected_style = tab_selected_style),
dcc.Tab(label='Search', value='tab2', style = tab_style, selected_style = tab_selected_style)
])
]),
html.Div(id='tabs_content')
])
@app.callback(Output('tabs_content', 'children'),
[Input('tabs_input', 'value')])
def render_tabs(tabname):
raw = []
docs = db.coords.find()
for item in docs:
raw.append(item)
topics = pd.DataFrame(raw)
best_top = topics.sort_values('percentage').tail(1)
best_num = best_top['top_num'].values[0]
best_name = best_top['topic'].values[0]
cos_raw = []
docs = db.company_pct.find()
for item in docs:
cos_raw.append(item)
cos = pd.DataFrame(cos_raw).sort_values('percentage').tail(1)
#best_co = cos['company'].values[0]
cos = cos[['company']]
#co_vals = cos.to_dict("rows")
#cols = [{"name": i, "id": i} for i in cos.columns]
cos_list = company_list()
if tabname == 'tab1':
return html.Div(children=[
html.Div(style = sector_style, children = [
html.H5('Choose Competitor:'),
dcc.Dropdown(
id = 'sector',
options = [{'label' : company, 'value': company}
for company in cos_list
],
value = 'Select All'
)]
),
html.Div(style = graph_style, children=[
dcc.Graph(
id='topic_explorer', style = graph_2_style,
clickData={'points': [{'text': best_num,
'customdata': best_name}]},
),
dcc.Graph(id='word_probs', style = graph_1_style ),
dcc.Graph(id = 'company_score', style = graph_3_style)
])
])
elif tabname == 'tab2':
return html.Div(children=[
html.Div(style=dd_style, children=[
dcc.Dropdown(options=all_categories, id='dd_cat'),
dcc.Dropdown(options=all_skills, id='dd_skill'),
dcc.Dropdown(id = 'dd_title', options=all_titles)
]),
dcc.Graph(id = 'company_break', style = graph_4_style,
clickData={'points': [{ 'label': '7'}]}),
html.Div(style = raw_style, children = [
html.H2('Job Listing (skill words in bold)'),
html.P(id = 'raw_text')
]),
html.Div(id = 'table', style = table_style),
html.Div(style = st_style, id = 'skill_table', children=[
dash_table.DataTable(id='skill_table',
columns=[{'name': 'skill', 'id': 'skill'}],
page_size=13
)
])
])
###################################Callbacks#################################
##############Market Tab##############
@app.callback(Output('topic_explorer', 'figure'),
[Input('sector', 'value')]
)
def topic_exlorer(company_choice):#topic explorer
import pandas as pd
if company_choice == 'Select All':
raw = []
docs = db.coords.find()
for item in docs:
raw.append(item)
else:
raw = []
docs = db.coords_cos.find({'company': company_choice})
for item in docs:
raw.append(item)
graph_data = pd.DataFrame(raw)
return scatter_graph(graph_data)
@app.callback(
Output('word_probs', 'figure'),
[Input('topic_explorer', 'clickData')]
)
def update_word_probs(clickData):#Word Frequency Graph
#topic_df = get_topics()
topic = int(clickData['points'][0]['text'])
name = clickData['points'][0]['customdata']
raw = []
docs = db.top_words.find({'top_num' : topic})
for item in docs:
raw.append(item)
topic_words = pd.DataFrame(raw).sort_values(by='saliency', ascending=False)
figure = word_graph(topic_words, name)
return figure
@app.callback(
Output('company_score', 'figure'),
[Input('topic_explorer', 'clickData')]
)
def update_cos(clickData): #Company breakdown graph
#company_df = get_cos()
topic = int(clickData['points'][0]['text'])
name = clickData['points'][0]['customdata']
raw = []
docs = db.company_pct.find({'top_num' : topic})
for item in docs:
raw.append(item)
df = pd.DataFrame(raw)
df.sort_values(by = 'percentage', inplace = True, ascending = False)
#company_topics = company_df.loc[
# company_df.topic == topic, :].sort_values(by = 'percentage', ascending = False)
rows = len(df.index)
company_topics = df.head(min(15, rows))
figure = company_portions(company_topics, name)
return figure
@app.callback(
Output('dd_skill', 'options'),
[Input('dd_cat', 'value')]
)
############Search Tab############
def filter_skill(value): #Skill dropdown
filtered_df = (raw_skill_df.loc[(raw_skill_df.topic == value) & (raw_skill_df.percentage > .3),
['skill', 'topic']]
.groupby('skill')
.size()
.sort_values(ascending=False)
.reset_index()
)
all_skills = [{'label' : row['skill'] + ' ({})'.format(row[0]), 'value' : row['skill']}
for index, row in filtered_df.iterrows()]
return all_skills
@app.callback(
Output('dd_title', 'options'),
[Input('dd_skill', 'value'), Input('dd_cat', 'value')]
)
def filter_title(skill, cat): #category dropdown. Titles filtered in the
#dd_title div directly
filtered_df = (raw_skill_df.loc[(raw_skill_df['skill'] == skill) & (raw_skill_df['topic'] == cat),
['dice_id', 'job_title']]
.drop_duplicates()
.sort_values(by='job_title')
)
all_titles = [{'label' : row['job_title'], 'value' : row['dice_id']}
for index, row in filtered_df.iterrows()]
return all_titles
@app.callback(
Output('company_break', 'figure'),
[Input('dd_title', 'value')]
)
def pie_graph(value): #Company Breakdown
raw = []
docs = db.dice_id_pcts.find({'dice_id' : value})
for item in docs:
raw.append(item)
df = pd.DataFrame(raw)
#print("MY Value::::::::::::::::" + value)
#print(raw)
#df = get_pcts()
#df = df.loc[df['ticker'] == value, :]
figure = {
'data': [go.Pie(labels = df['top_num'],
values = df['percentage'],
hoverinfo = 'text+percent',
text = df['topic']
)],
'layout': {
'title': '{} Category Breakdown'.format(value),
'showlegend': False
}
}
return figure
@app.callback(
Output('raw_text', 'children'),
[Input('company_break', 'clickData'),
Input('dd_title', 'value')]
)
def highlight_text(clickData, value): #Highlight skill words in job desc
topic = int(clickData['points'][0]['label'])
page = db.pages_raw.find_one({'dice_id' : value})['job_desc']
top_words = []
docs = db.top_words.find({'top_num': topic})
for item in docs:
top_words.append(item['word'])
print(top_words)
#top_words = topics.loc[topics.top_num == topic, 'word'].values
new_words = page.split()
"""
children = []
for word in new_words:
if word in top_words:
children.append(html.Span(word, style = {'color': 'red'}))
else:
children.append(html.P(word))
"""
my_string = ''
for word in new_words:
if word.lower() in top_words:
my_string += '**{}**'.format(word) + ' '
else:
my_string += word + ' '
return dcc.Markdown(my_string)
@app.callback(
Output('skill_table', 'data'),
[Input('dd_title', 'value')]
)
def skill_table(value): #All skills in job desc
df = (raw_skill_df.loc[raw_skill_df.dice_id==value]['skill']
.drop_duplicates()
.sort_values()
)
df = pd.DataFrame(df)
print(df)
data=df.to_dict('records')
return data
@app.callback(
Output('table', 'children'),
[Input('dd_title', 'value')]
)
def attr_table(value): #Listing attributes
raw = []
docs = db.pages_raw.find({'dice_id' : value})
for item in docs:
desired_attrs = [item['job_title']]
desired_attrs.append(item['company'])
desired_attrs.append(item['job_attrs'][2])
desired_attrs.append(item['job_attrs'][3])
raw.append(desired_attrs)
columns = ['Title', 'Company', 'Pay', 'WHF']
df = pd.DataFrame(raw, columns=columns)
return html.Table(
# Header
[html.Tr([html.Th(col) for col in df.columns])] +
# Body
[html.Tr([
html.Td(df.iloc[i][col]) for col in df.columns
]) for i in range(len(df))]
)
if __name__ == '__main__':
app.run_server() |
name = input("Informe seu nome: ")
password = input("Informe sua senha: ")
while (name == password):
print("Nome e senha não podem ser iguais!")
print("Informe as informações novamente")
name = input("Informe seu nome: ")
password = input("Informe sua senha: ")
print(f"Acesso autorizado, {name}")
|
import os
from PIL import Image
from pylab import *
from numpy import *
def imresize(im,sz):
pil_im = Image.fromarray(uint8(im))
return array(pil_im.resize(sz))
|
import matplotlib.pyplot as plt
import numpy as np
import sys
import os
import tensorflow as tf
import mnist_cnn
from PIL import Image
imageDir = '/home/mhkim/data/images'
summary = '/home/mhkim/data/summaries/image2'
if tf.gfile.Exists(summary):
tf.gfile.DeleteRecursively(summary)
tf.gfile.MakeDirs(summary)
#if os.path.exists(summary) == False : os.mkdir(summary)
img1 = Image.open(os.path.join(imageDir, 'number_font.png'))
SEED = 66478 # Set to None for random seed.
NUM_LABELS = 10
_width = img1.size[0]
_height = img1.size[1]
_basis = np.min(img1.size)
_widthPadding = 0
_heightPadding = 0
if _width % _basis != 0 :
_widthPadding = _width // _basis
if _height % _basis != 0 :
_heightPadding = _height // _basis
_im = Image.new("RGB", (_width + _widthPadding, _height + _heightPadding), "white")
_im.paste(img1, (0,0))
_pix = _im.load()
_width = _im.size[0]
_height = _im.size[1]
_shiftWidth = int(_width / _basis)
_shiftHeight = int(_height / _basis)
_batchSize = _shiftWidth * _shiftHeight
images = []
for row in range(_shiftHeight) :
for cell in range(_shiftWidth):
cropImage = _im.crop((cell * _basis, row * _basis , (cell+1)*_basis , (row + 1) * _basis))
pixel = cropImage.load()
cropImage = []
for x in range(_basis) :
cropImage.append([ [round(0.2126 * pixel[y, x][0] + 0.7152 * pixel[y, x][1] + 0.0722 * pixel[y, x][2])] for y in range(_basis)])
images.append(cropImage)
X = tf.placeholder(tf.float32 , name='image_node')
W1 = tf.Variable(tf.truncated_normal(shape=[5, 5, 1, 32], stddev=0.1, seed=SEED, dtype=tf.float32), name='weight_1')
B1 = tf.Variable(tf.zeros(shape=[32], dtype=tf.float32), name='bias_1')
W2 = tf.Variable(tf.truncated_normal(shape=[5, 5, 32, 64], stddev=0.1, seed=SEED, dtype=tf.float32), name='weight_2')
B2 = tf.Variable(tf.constant(0.1, shape=[64], dtype=tf.float32), name='bias_2')
fc1_weight = tf.Variable(tf.truncated_normal(shape=[_basis // 4 * _basis // 4 * 64, 512], stddev=0.1 , seed=SEED, dtype=tf.float32), name='fc1_weight')
fc1_bias = tf.Variable(tf.constant(0.1, shape=[512], dtype=tf.float32), name='fc1_bias')
fc2_weight = tf.Variable(tf.truncated_normal(shape=[NUM_LABELS], stddev=0.1 , seed=SEED, dtype=tf.float32), name='fc2_weight')
fc2_bias = tf.Variable(tf.constant(0.1, shape=[NUM_LABELS], dtype=tf.float32) , name='fc2_bias')
with tf.name_scope('model') :
conv1 = tf.nn.conv2d(X , W1, strides=[1,1,1,1], padding='SAME', name='conv1')
relu1 = tf.nn.relu(tf.nn.bias_add(conv1, B1), name='relu1')
pool1 = tf.nn.max_pool(relu1, ksize=[1,2,2,1], strides=[1,2,2,1],padding='SAME', name='pool2')
# conv2 = tf.nn.conv2d(X, W2, strides=[1, 1, 1, 1], padding='SAME', name='conv2')
# relu2 = tf.nn.relu(tf.nn.bias_add(conv2, B2), name='relu1')
# pool2 = tf.nn.max_pool(relu2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool2')
pool_shape = pool1.get_shape().as_list()
# reshape = tf.reshape(pool2, [pool_shape[0] , pool_shape[1] * pool_shape[2] * pool_shape[3]])
# hidden = tf.nn.relu(tf.matmul(reshape, fc1_weight) + fc1_bias)
# hidden = tf.nn.dropout(hidden, 1. , seed=SEED)
# logits = tf.matmul(hidden, fc2_weight) + fc2_bias
eval = tf.nn.softmax(pool1, name='eval')
tf.summary.scalar('eval_2', eval)
sess = tf.InteractiveSession()
merged = tf.summary.merge_all()
writer = tf.summary.FileWriter(summary, sess.graph)
tf.global_variables_initializer().run()
tfImg = tf.summary.image('image1', X)
summaryEval , summary = sess.run([ eval , tfImg ], feed_dict={X:images})
writer.add_summary(summary)
writer.add_summary(summaryEval)
writer.close()
sess.close()
|
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import tensorflow as tf
# https://www.tensorflow.org/tutorials/structured_data/time_series
mpl.rcParams['figure.figsize'] = (8, 6)
mpl.rcParams['axes.grid'] = False
df = pd.read_csv("jena_climate_2009_2016_02.csv")
# Fourier Transform
# RFFT = FFT sans calculer la symétrie (valeur conjugées) = FFT sans redondance inutile
fft = tf.signal.rfft(df['T (degC)'])
f_per_dataset = np.arange(0, len(fft))
n_samples_h = len(df['T (degC)'])
hours_per_year = 24*365.2524
years_per_dataset = n_samples_h/hours_per_year
f_per_year = f_per_dataset/years_per_dataset
plt.step(f_per_year, np.abs(fft))
plt.xscale('log')
plt.ylim(0, 400000)
plt.xlim([0.1, max(plt.xlim())])
plt.xticks([1, 365.2524], labels=['1/Year', '1/day'])
_ = plt.xlabel('Frequency (log scale)')
plt.show()
# Les températures sont en fréquence identiques tous les 1j et les 1 ans
|
class Symbol:
color = ["Hearts", "Diamonds", "Clubs", "Spades"]
icon = ["♥", "♦", "♣", "♠"]
def __init__(self, color, icon):
self.color = color
self.icon = icon
def __str__(self):
return self.icon
class Card (Symbol):
value = ['A, 2, 3, 4, 5, 6, 7, 8, 9, 10, J, Q, K']
def __init__(self, value, color, icon):
super(Card, self).__init__(color, icon)
self.value = value
def __str__(self):
return self.value + self.icon
new_card = Card('A', "Hearts", "♠")
print(new_card)
|
import os
def rename_files():
#(1) get files from the folder
file_list = os.listdir(r"/Users/navdeepsingh/Projects/udacity/python/prank/")
saved_path = os.getcwd();
os.chdir('/Users/navdeepsingh/Projects/udacity/python/prank/')
#(2) rename those files
for filename in file_list:
translation_table = str.maketrans("0123456789", " ", "0123456789")
newfilename = filename.translate(translation_table)
os.rename(filename, newfilename)
print(newfilename)
rename_files()
|
from dataclasses import dataclass
from enum import Enum
from http import HTTPStatus
import logging
from typing import Any, List, Optional
from core.env import Environment
import requests
from core.exceptions import NoTokensException
from core.models import ChatResponse, UserPreference, UserPreferencePatch
class EventType(Enum):
Discord_UserJoined = 1
Discord_UserLeft = 2
VK = 3
TelegramMessage = 4
@dataclass
class TelegramClient:
user_id: int
user_name: str
chat_id: int
chat_name: str
@dataclass
class ClientContext:
telegram: TelegramClient
def to_json(self) -> Any:
return {
'telegram': {
'user_id': self.telegram.user_id,
'user_name': self.telegram.user_name,
'chat_id': self.telegram.chat_id,
'chat_name': self.telegram.chat_name,
}
}
@staticmethod
def from_json(json: Any):
telegram = TelegramClient(
user_id=json['telegram']['user_id'],
user_name=json['telegram']['user_name'],
chat_id=json['telegram']['chat_id'],
chat_name=json['telegram']['chat_name'],
)
return ClientContext(telegram)
def get_scope(self):
return f'telegram_chat/{self.telegram.chat_id}'
class EventHub:
def __init__(self, env: Environment) -> None:
self._env = env
def from_discord(
self,
type: EventType,
discord_user_id: int,
member_name: str,
channel_name: str,
channel_category_id: Optional[str],
member_count: int
):
try:
requests.post(f'{self._env.hub_listener}/api/event', json={
'type': type.value,
'discord_user_id': discord_user_id,
'member_name': member_name,
'channel_name': channel_name,
# currently not used, will be required after moving channel blacklisting to hub
'channel_category_id': channel_category_id,
'member_count': member_count,
}, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
except Exception as e:
logging.error(f"Error occured during discord_user_joined event sending: {e}")
def from_vk(self, vk_payload):
try:
requests.post(f'{self._env.hub_listener}/api/event', json={
'type': EventType.VK.value,
'events': vk_payload
}, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
except Exception as e:
logging.error(f"Error occured during discord_user_joined event sending: {e}")
def from_telegram(self, client: ClientContext, message: str):
try:
requests.post(f'{self._env.hub_listener}/api/event', json={
'type': EventType.TelegramMessage.value,
'client': client.to_json(),
'message': message
}, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
except Exception as e:
logging.error(f"Error occured during discord_user_joined event sending: {e}")
def create_telegram_chat_token(self, name: str, chat_id: int, telegram_user_id: int):
try:
response = requests.post(f'{self._env.hub_listener}/api/message/token', json={
'name': name,
'chat_id': chat_id,
'telegram_user_id': telegram_user_id
}, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
return response.json()
except Exception as e:
logging.error(f"Error occured during telegram chat token creation: {e}")
raise
def chatgpt_question(self, client: ClientContext, question: str, force_new: bool) -> ChatResponse:
try:
json = {
'client': client.to_json(),
'question': question,
'force_new': force_new,
}
response = requests.post(f'{self._env.hub_listener}/api/openai/chatgpt', json=json, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
if (response.ok):
return ChatResponse.from_json(response.json())
if (response.status_code == HTTPStatus.PAYMENT_REQUIRED):
json = response.json()
raise NoTokensException.from_json(json)
raise Exception(response.text)
except Exception as e:
logging.error(f"Error occured during openai prompt: {e}")
raise
def load_preference(self, client: ClientContext) -> UserPreference:
try:
json = {
'client': client.to_json(),
}
response = requests.get(f'{self._env.hub_listener}/api/user/preference', json=json, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
if (response.ok):
return UserPreference.from_json(response.json())
raise Exception(response.text)
except Exception as e:
logging.error(f"Error occured during preference loading: {e}")
raise
def update_preference(self, client: ClientContext, patch: UserPreferencePatch) -> UserPreference:
try:
json = {
'client': client.to_json(),
'patch': patch.to_json(),
}
response = requests.patch(f'{self._env.hub_listener}/api/user/preference', json=json, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
if (response.ok):
return UserPreference.from_json(response.json())
raise Exception(response.text)
except Exception as e:
logging.error(f"Error occured during preference patch: {e}")
raise
def get_chat_presets(self, client: ClientContext) -> List[str]:
try:
json = {
'client': client.to_json(),
}
response = requests.get(f'{self._env.hub_listener}/api/chat/preset', json=json, headers={
'Authorization': f'Basic {self._env.hub_local_auth_token}'
})
if (response.ok):
return response.json()
raise Exception(response.text)
except Exception as e:
logging.error(f"Error occured during chat presets loading: {e}")
raise |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-14 10:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('application', '0011_auto_20170413_1548'),
]
operations = [
migrations.CreateModel(
name='track',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=500)),
('cover_image', models.TextField()),
('mp3_url', models.TextField()),
('m_order', models.IntegerField(default=0)),
('activeyn', models.BooleanField()),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='video',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=500)),
('cover_image', models.TextField()),
('mpd_url', models.TextField()),
('mp4_url', models.TextField()),
('type', models.IntegerField(default=0)),
('activeyn', models.BooleanField()),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.AddField(
model_name='track',
name='video',
field=models.ManyToManyField(to='application.video'),
),
migrations.AddField(
model_name='album',
name='tracks',
field=models.ManyToManyField(to='application.track'),
),
migrations.AddField(
model_name='album',
name='trailer',
field=models.ManyToManyField(to='application.video'),
),
]
|
import numpy as np
import math
from random import randint
from sklearn.metrics import confusion_matrix
################################################################################
# Loads a data file into numpy array
#
# Bug fixed. Previous indices should be taken into account.
################################################################################
def load_file(path, max_col_prior=0):
matrix = []
label = []
max_col_cnt = 0
with open(path) as f:
for line in f:
data = line.split()
label.append(float(data[0])) # label value
row = []
col_cnt = 0
prev_idx = 0
for i, (idx, value) in enumerate([item.split(':') for item in data[1:]]):
curr_idx = int(idx) - 1
# Use difference in expected missing thing to get the indexes
# which are zeroed
if i == 0:
n = curr_idx
else:
n = curr_idx - (prev_idx+1)
prev_idx = curr_idx
#print("Current ids = " + str(curr_idx))
#print("Diff = " + str(n))
# for all missing entries
for _ in range(n):
row.append(0)
col_cnt += 1
row.append(float(value))
#print(row)
#print("\n")
col_cnt += 1
matrix.append(row)
if(col_cnt > max_col_cnt):
max_col_cnt = col_cnt
#Check which of the two max_col one from prior or one in this set is maximum
if(max_col_cnt < max_col_prior):
max_col_cnt = max_col_prior
#print("max col count = " + str(max_col_cnt))
# Append zeros to columns which have less column count initially
for i in range(len(matrix)):
for j in range(max_col_cnt - len(matrix[i])):
matrix[i].append(0)
return np.array(matrix), np.array(label), max_col_cnt#.astype(int)
################################################################################
# Gets accuracy from ground truth and predictions
################################################################################
def get_accuracy(ground, predicted):
correct = 0
if (ground.shape[0] != predicted.shape[0]):
print("Array sizes do not match")
return 0.0
correct = np.sum(ground == predicted)
return float(correct)*100/ground.shape[0]
################################################################################
# Gets Precision, Recall and F-scores ground truth and predictions
################################################################################
def get_f_scores(ground, predicted):
correct = 0
if (ground.shape[0] != predicted.shape[0]):
print("Array sizes do not match")
return np.zeros((3,))
mat = confusion_matrix(ground, predicted)
eps = 1e-8
#print(mat)
# https://scikit-learn.org/stable/modules/generated/sklearn.metrics.confusion_matrix.html
# It will be increasing order of labels
#
# -- --
# | TN | FP |
# |---------------|
# | FN | TP |
# -- --
p = float(mat[1][1])/(mat[0][1] + mat[1][1] + eps)
r = float(mat[1][1])/(mat[1][0] + mat[1][1] + eps)
f = 2*p*r/(p + r + eps)
return np.array([p, r, f])
|
# -*- coding: utf-8 -*-
# author: Luke
# 1. 冻结集合
l = [1,2,3,4,1,2,4]
fz = frozenset(l)
print(fz)
def multipliers():
return [lambda x: x * i for i in range(4)]
print([m(2) for m in multipliers()])
# 后面的四个都正常,关于第一个,我只想说,匿名函数闭包生成器?劳资不会!!!
# 好像是这么回事,lambda 闭包只有在调用匿名函数的时候,才会取出里面的内容
# 上面执行顺序,先生成四个匿名函数,然后传入形参x为 2 调用 4个匿名函数,匿名函数调用时 i 的 值已经为 3 了!!!
def multipliers():
return [lambda x, i=i: i * x for i in range(4)]
print([m(2) for m in multipliers()])
def multipliers():
for i in range(4):
yield lambda x: x * i
print([m(2) for m in multipliers()])
|
import requests
class Board(object):
def __init__(self, board):
self.board = board
self.base = "https://a.4cdn.org/{}/".format(board)
def get_catalog(self):
"""
A JSON representation of all thread OPs (and the replies shown on indexes)
"""
r = requests.get(self.base + "catalog.json")
return r.json()
def get_threads(self):
"""
A list of thread IDs, their modification times, and respective pages
"""
r = requests.get(self.base + "threads.json")
return r.json()
def get_archive(self):
r = requests.get(self.base + "/archive.json")
return r.json()
def get_index(self):
r = requests.get(self.base + "1.json")
return r.json()
def get_page(self, page_number):
r = requests.get(self.base + "{}.json".format(page_number))
return r.json()
def get_thread(self, thread_number):
return Thread(thread_number, self.board)
class Thread(object):
def __init__(self, board, thread_number):
self.board = board
self.thread_number = thread_number
self.url = "https://a.4cdn.org/{}/thread/{}.json".format(self.board, self.thread_number)
r = requests.get(self.url)
json = r.json()
self.posts = {post['no']: Post(post) for post in json['posts']}
def get_posts(self):
return self.posts
class Post(object):
def __init__(self, post):
self.number = post['no']
self.replies = post['resto']
self.sticky = bool(post.get('sticky'))
self.closed = bool(post.get('closed'))
self.archived = bool(post.get('archived'))
self.now = post['now']
self.time = post['time']
self.name = post['name']
self.trip = post.get('trip')
self.id = post.get('id')
self.capcode = post.get('capcode')
self.country = post.get('country')
self.country_name = post.get('country_name')
self.subject = post.get('sub')
self.comment = post.get('com')
if post.get('tim'):
self.renamed_file = post['tim']
self.original_file = post['filename']
self.file_ext = post['ext']
self.file_size = post['fsize']
self.md5 = post['md5']
self.width = post['w']
self.height = post['h']
self.tn_w = post['tn_w']
self.tn_h = post['tn_h']
self.file_deleted = bool(post.get('filedeleted'))
self.ommitted_posts = post.get('omitted_posts')
self.omitted_images = post.get('ommited_images')
self.replies_total = post.get('replies')
self.images_total = post.get('images')
self.bump_limit = bool(post.get('bumplimit'))
self.image_limit = bool(post.get('imagelimit'))
self.last_modified = post.get('last_modified')
self.thread_url = post.get('semantic_url')
def get_replies(self):
if self.replies:
return self.replies
return None
r = requests.get("https://a.4cdn.org/g/thread/50575159.json")
json = r.json()
#posts = {post['no']: Post(post) for post in json['posts']}
thread = Thread('g', 50575159)
posts = thread.get_posts()
for post in posts:
print(posts[post])
|
from django.shortcuts import render,redirect
from .models import Product,Order,Cart
from .forms import CreateProductForm
from .forms import UserRegistrationForm,LoginForm,OrderForm,CartForm
from django.contrib.auth import authenticate,login,logout
from .decorators import login_required,admin_only
# Create your views here.
def index(request):
return render(request,"mobile/base.html")
@login_required
def list_mobiles(request):
mobiles=Product.objects.all()
context={}
context["mobiles"]=mobiles
return render(request,"mobile/listmobiles.html",context)
@admin_only
def add_product(request):
form=CreateProductForm()
context={}
context["form"]=form
if request.method=="POST":
form=CreateProductForm(request.POST,files=request.FILES)
if form.is_valid():
form.save()
return redirect("index")
return render(request,"mobile/createmobile.html",context)
def get_mobile_object(id):
return Product.objects.get(id=id)
@login_required
def mobile_detail(request,id):
mobile=get_mobile_object(id)
context={}
context["mobile"]=mobile
return render(request,"mobile/mobiledetail.html",context)
@admin_only
def mobile_delete(request,id):
mobile = get_mobile_object(id)
mobile.delete()
return redirect("index")
@admin_only
def update(request,id):
mobile = get_mobile_object(id)
form = CreateProductForm(instance=mobile)
context = {}
context["form"] = form
if request.method=="POST":
form=CreateProductForm(instance=mobile,data=request.POST)
if form.is_valid():
form.save()
return redirect("index")
return render(request, "mobile/mobileupdate.html", context)
def registration(request):
form=UserRegistrationForm()
context={}
context["form"]=form
if request.method=="POST":
form=UserRegistrationForm(request.POST)
if form.is_valid():
form.save()
return render(request,"mobile/login.html")
else:
form=UserRegistrationForm(request.POST)
context["form"]=form
return redirect("userlogin")
return render(request,"mobile/registration.html",context)
def login_user(request):
context={}
form=LoginForm()
context["form"]=form
if request.method=="POST":
form=LoginForm(request.POST)
if form.is_valid():
username=form.cleaned_data.get("username")
password=form.cleaned_data.get("password")
user=authenticate(request,username=username,password=password)
if user:
login(request,user)
return render(request,"mobile/base.html")
else:
context["form"]=form
return render(request,"mobile/login.html",context)
return render(request,"mobile/login.html",context)
def signout(request):
logout(request)
return redirect("userlogin")
@login_required
def item_order(request,id):
product=get_mobile_object(id)
form=OrderForm(initial={'user':request.user,'product':product})
context={}
context["form"]=form
if request.method=="POST":
form=OrderForm(request.POST)
if form.is_valid():
form.save()
return redirect("index")
else:
context["form"]=form
return render(request,"mobile/ordereditem.html",context)
return render(request,"mobile/ordereditem.html",context)
@login_required
def view_my_orders(request):
orders=Order.objects.filter(user=request.user)
context={}
context["orders"]=orders
return render(request,"mobile/vieworders.html",context)
@login_required
def order_cancel(request,id):
order=Order.objects.get(id=id)
order.status="cancelled"
order.save()
return redirect("vieworder")
@login_required
def add_to_cart(request,id):
product = get_mobile_object(id)
form = CartForm(initial={'user':request.user,'product':product})
context = {}
context["form"] = form
if request.method == "POST":
form = CartForm(request.POST)
if form.is_valid():
form.save()
return redirect("listmobile")
else:
context["form"] = form
return render(request, "mobile/cartitem.html", context)
return render(request, "mobile/cartitem.html", context)
@login_required
def view_my_cart(request):
carts = Cart.objects.filter(user=request.user)
context = {}
context['carts'] = carts
return render(request, 'mobile/viewcart.html', context)
@login_required
def delete_cart_item(request,id):
carts = Cart.objects.get(id=id)
carts.delete()
return redirect('viewcart')
@login_required
def cart_order(request,id):
carts=Cart.objects.get(id=id)
form=OrderForm(initial={'user':request.user,'product':carts.product})
context={}
context['form']=form
if request.method=='POST':
form=OrderForm(request.POST)
if form.is_valid():
form.save()
delete_cart_item(request,id)
return redirect('viewcart')
else:
context['form']=form
return render(request,'mobile/ordereditem.html',context)
return render(request, 'mobile/ordereditem.html', context)
|
import api.parsers.fls980
import api.analysis
file_name = input("Enter csv file name: ")
data = api.parsers.fls980.read_csv(file_name)
result = api.analysis.stern_volmer(1.0, 0.5, data)
print(result)
|
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
from rest_framework.parsers import JSONParser
from rest_framework.permissions import IsAuthenticated
from django.http.response import JsonResponse
from django.core.files.storage import default_storage
from django.conf import settings
import threading
from apicontent.models import File
from apicontent.serializers import FileSerializer
@csrf_exempt
def fileUpload(request):
file = request.FILES['uploadedFile']
file_name = default_storage.save(file.name, file)
fileUpload = File(
fileName = file_name
)
fileUpload.save()
response = JsonResponse(fileUpload.as_dict())
response.status_code = 201
thread = threading.Thread(fileUpload.process())
thread.start()
return JsonResponse(file_name, safe = False)
|
from configuration import cityDict, refreshFrequency
from mysite.celery import app as celery_app
from django.utils import timezone
from mornings.models import City
from decouple import config
import requests
@celery_app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
# Calls periodic task
sender.add_periodic_task(refreshFrequency, update_city_data.s(cityDict), name='update')
# function calls api function and updates data to the database
@celery_app.task
def update_city_data(cities):
for city in cities:
city_id = cityDict[city]
# Takes weather api call output and assigns it to data
data = weather_api_call(city_id)
# If API call fails, ends function early without hitting database
if data is None:
return None
# IF API call is successful, updates database
else:
# Retrieve temp and humidity data and assign to readable variables
temp = data['temp']
humidity = data['humidity']
weather = data['weather']
update = timezone.now()
# Creates city if it does not exist. Updates if it does exist
City.objects.update_or_create(
city_id=city_id, defaults={'city_id': city_id, 'name': city, 'temperature': temp, 'humidity': humidity,
'weather': weather, 'last_update': update}
)
return print("updated server successfully")
# make API call to retrieve weather data
def weather_api_call(city_id):
# Creates our empty dictionary
output = {}
# imperial gives us temp in fahrenheit
units = "imperial"
# Timout parameter in seconds
timeout = 5
# API Key
api_key = config('api_key')
try:
# GET request sent to weather api and converts response to JSON
response = requests.get(
"http://api.openweathermap.org/data/2.5/weather?id=%d&appid=%s&units=%s"
% (city_id, api_key, units), timeout=timeout)
# Raises error if the API response is an error code
response.raise_for_status()
# Catches and prints errors and returns none so program continues running
except requests.exceptions.HTTPError as errh:
print("Http Error:", errh)
return None
except requests.exceptions.ConnectionError as errc:
print("Error Connecting:", errc)
return None
except requests.exceptions.Timeout as errt:
print("Timeout Error:", errt)
return None
except requests.exceptions.RequestException as err:
print("OOps: Something Else", err)
return None
response_data = response.json()
# Populates out output dictionary
output['temp'] = response_data['main']['temp']
output['humidity'] = response_data['main']['humidity']
output['weather'] = response_data['weather'][0]['description']
return output |
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
# Originally contributed by Check Point Software Technologies, Ltd.
from lib.common.abstracts import Package
from lib.api.adb import execute_browser
class default_browser(Package):
"""Default Browser analysis package."""
def __init__(self, options={}):
super(default_browser, self).__init__(options)
def start(self, target):
execute_browser(target)
def check(self):
return True
def finish(self):
return True
|
from collections import defaultdict, OrderedDict
from olutils import countiter, display
from warnings import warn
EMPTY_VAL = None
def compute_colstats(data, fill_thld=0.1, empty_val=EMPTY_VAL, as_list=False,
verbose=False):
"""Compute statistics for each column of dataframe
Args:
data (pandas.DataFrame)
fill_thld (float) : min filling ratio for sufficient quality
empty_val (scalar-object) : dft value when no number can be computed
as_list (bool) : get result as list
verbose (int) : level of verbose
0 for None
n>0, some display (update iteration counter every n iteration)
Return:
if as_list:
(list[OrderedDict])
else:
(dict): stats of each column in OrderedDict
where stats for given as OrderedDict:
{
'empty_row_nb': (int),
'total_usage': (int),
'filling_ratio': (float),
'uniq_val_nb': (int),
'uniq_val_95prc_usage_nb': (float),
'95prc_usage_val_nb': (int),
'sufficient_quality': (0 or 1),
'comment': (str),
'max_occurrences_value': (scalar-object),
'max_occurrences_count': (int),
'min_occurrences_value': (scalar-object),
'min_occurrences_count': (int),
'max_value': (scalar-object),
'min_value': (scalar-object),
}
"""
vbatch = None if not verbose else verbose
display(". compute root column stats", v=verbose)
stats_by_col = root_colstats(data, empty_val=empty_val, vbatch=vbatch)
row_n = len(data)
display(". enrich column stats", v=verbose)
stats_by_col = enrich_colstats(
stats_by_col, row_n, fill_thld=fill_thld, verbose=verbose
)
if not as_list:
return stats_by_col
display(". convert stats dictionary to list", v=verbose)
rows = []
for i, (col, colstats) in enumerate(stats_by_col.items(), 1):
row = OrderedDict([('order', i), ('column', col)])
for indicator, value in colstats.items():
row[indicator] = value
rows.append(row)
return rows
def root_colstats(data, empty_val=EMPTY_VAL, vbatch=1):
"""Build root statistics for column in data
Args:
data (pandas.DataFrame)
empty_val (scalar-object): default value when no number can be computed
vbatch (int): number of iteration on columns b/w each display
Return:
(dict): stats of each column in OrderedDict
"""
row_n = len(data)
stats_by_col = OrderedDict()
for column in countiter(data.columns.values, vbatch=vbatch):
series = data[column]
values_nb = series.count()
occurrences = list(series.value_counts().iteritems())
values = len(occurrences)
stats_by_col[column] = OrderedDict([
('empty', row_n - values_nb),
('occurrences', occurrences),
('filling_ratio', values_nb / row_n),
('values', values),
('val_occ_max', occurrences[0] if occurrences else (empty_val, 0)),
('val_occ_min', occurrences[-1] if occurrences else (empty_val, 0)),
])
return stats_by_col
def enrich_colstats(stats_by_col, row_n, fill_thld=0.1, verbose=None):
"""Enrich stats created by root_colstats
Args:
stats_by_col (dict): stat per column
row_n (int): number of rows in original data (for ratio computation)
fill_thld (float): min tolerated filling ratio for sufficient quality
Return:
(dict): enriched stats of each column in OrderedDict
"""
ind_by_col = OrderedDict()
counter = defaultdict(int)
for col, colstats in stats_by_col.items():
# Usage
total_usage = row_n - colstats['empty']
values = [val for val, _ in colstats['occurrences']]
# Group of values with same number of uses
val_grps = [] # List of [nb of vals, nb of usage per vals]
for _, val_usage in colstats['occurrences']:
if not val_grps:
val_grps.append([1, val_usage])
continue
lst_grp = val_grps[-1]
if val_usage == lst_grp[1]:
lst_grp[0] += 1
else:
val_grps.append([1, val_usage])
# val_95prc_usage is the number of values required to reach 95% usage,
# # when values of similar used they are all counted at once
val_95prc_usage = 0
usage = 0
for val_nb, val_usage in val_grps:
if usage >= (0.95 * total_usage):
break
usage += val_nb * val_usage
val_95prc_usage += val_nb
# Quality
reason = EMPTY_VAL
if colstats['filling_ratio'] < fill_thld:
reason = f"filling_ratio < {fill_thld}"
elif colstats['values'] == 1:
reason = "only one value"
is_sufficient = int(reason is None)
counter['insufficient'] += (1 - is_sufficient)
# Compute max, min
try:
max_value = max(values) if values else EMPTY_VAL
min_value = min(values) if values else EMPTY_VAL
except TypeError as err:
warn(f"TypeError when building min, max value of column '{col}': {err}")
min_value, max_value = None, None
# Filling stats
ind_by_col[col] = OrderedDict([
# Filling
('empty_row_nb', colstats['empty']),
('total_usage', total_usage),
('filling_ratio', colstats['filling_ratio']),
# Usage
('uniq_val_nb', colstats['values']),
# ('uniq_val_95prc_usage_nb', uniq_val_95prc_usage),
('95prc_usage_val_nb', val_95prc_usage),
('sufficient_quality', is_sufficient),
('comment', reason),
('max_occurrences_value', colstats['val_occ_max'][0]),
('max_occurrences_count', colstats['val_occ_max'][1]),
('min_occurrences_value', colstats['val_occ_min'][0]),
('min_occurrences_count', colstats['val_occ_min'][1]),
('max_value', max_value),
('min_value', min_value),
])
if verbose:
col_nb = len(ind_by_col)
count = counter['insufficient']
prc = 100 * count / col_nb
print(f"{prc:0.2f}% ({count}/{col_nb}) columns have not enough data")
return ind_by_col
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
FILE $Id$
AUTHOR Ksenia Shakurova <ksenia.shakurova@firma.seznam.cz>
Copyright (c) 2017 Seznam.cz, a.s.
All rights reserved.
"""
import argparse
import common.flow.flog as flog
from common.flow import Chain
from flexp import flexp
from flexp.flow.cache import PickleCache
log = flog.Log(__name__)
from flexp.flow import cache
# define module with some attributes
class TestModule:
# PickleCacheBlackList will be the list
PickleCacheBlackList = ['attr3']
def __init__(self, attr1, attr2, attr3):
self.attr1 = attr1
self.attr2 = attr2
self.attr3 = attr3 # this parameter will be skipped
def process(self, data):
#....
pass
class FlowData(object):
def __init__(self):
self.id = ""
self.attrs = [1, 2, 3]
def __iter__(self):
return iter(self.__dict__)
def items(self):
return [(attr, getattr(self, attr)) for attr in self.__dict__]
def __setitem__(self, key, item):
setattr(self, key, item)
def __getitem__(self, key):
return getattr(self, key)
def main():
flexp.setup("experiments/", "exp01", False)
flog.setup("debug", path=flexp.get_file_path("experiment.log.txt")) # , disable_stderr=not cfg.SHORT_RUN)
log.debug('Starting.')
data = FlowData()
data.id = "a"
# debug level 2 - all detailes will be printed
data_chain = PickleCache("cached_pkls", "id", chain=[TestModule(12, 14, 18)], debug_level=2)
data_chain.process(data)
# hash of this and previous are same
data_chain = PickleCache("cached_pkls", "id", chain=[TestModule(12, 14, 20)], debug_level=1)
data_chain.process(data)
if __name__ == "__main__":
main()
|
def open_file(filename):
print('Opening ' + filename)
if sys.platform == "win32":
os.startfile(filename)
else:
opener ="open" if sys.platform == "darwin" else "xdg-open"
subprocess.call([opener, filename])
input("Press the Enter key in the Python IDLE once you are finished editing the text file. Remember to save.")
def skip_trailing(file): #reads from the passed file with modifying, then returns an array of the processed lines (see comment at return)
with open(file, 'r') as f:
lines = f.readlines()
for i in range(len(lines)):
line = lines[i]
if line.strip().strip('\n') != '': #if line contains some actual characters besides newline and whitespace
lines[i] = line.strip().strip('\n')
elif line.replace(' ', '') == '\n': #if line contains only some whitespace (optional) and then a newline, i.e. a dividing empty line
lines[i] = line.replace(' ','') #strip th eoptional whitespace and keep the newline char as its own line
return lines # ['Hello', 'There', '\n', 'General', '\n', 'kenobi'] instead of ['Hello \n', 'There\n', ' \n', 'General\n', '\n', 'kenobi']
def remove_trailing(file): #modifies the passed file
lines = skip_trailing(file) #read the existing lines in the file
with open(file, 'w') as f: #empty existing file contents
#for line in lines:
# f.write('\n' if line=='\n' else line+'\n')
for i, line in enumerate(lines):
f.write('\n' if (line=='\n') else (line+'\n') if (i!=len(lines)) else line) #if i==len(lines) then at the last line so do not append a newline
def replace_line_in_file(query, replacement, file): #query should have no trailing whitespace
lenDiff = max(0, len(query) - len(replacement))
replacement += ' '*lenDiff
lines = skip_trailing(file)
with open(file, 'r+') as f:
f.seek(0) #probably optional
for i, line in enumerate(lines):
if line == query:
f.write(replacement+'\n')
else:
f.write((line+'\n') if i!=len(lines) else line) #if i==len(lines) then at the last line so do not append a newline
f.truncate() #might not be needed, test in cases where a match is found and where one isn't
remove_trailing(file) #to remove the potential lenDiff
def check_line_in_file(query, file): #if query is in any line of the file
lines = skip_trailing(file)
for line in lines:
if query in line:
return True
return False
def read_after_line(query, file):
lines = skip_trailing(file)
i=0
for line in lines:
i+=1 #i++?
if query in line: #since we have stripped all trailing whitespace from each line, the query should be == to the line, not just in
break
lines_after = lines[i:]
return lines_after
#with open(file, 'r') as f:
# i=0 #the line number where the query was (first) matched
# for line in f:
# i+=1 #update the line number to the current line
# if query in line: #if query matched, break out and keep var i at the number of that last line
# break
# f.seek(0) #back to beginning of file
# lines = skip_trailing(file)
# #lines = f.read().split('\n') #read entire contents removing newline char
# lines = lines[i:] #keep only the part of the lines array after the line that the query was matched at
# return lines
|
from django.conf import settings
from django.core.context_processors import csrf
from django.conf import settings
from django.contrib.auth.models import User
from django.shortcuts import HttpResponseRedirect
from ctypes import *
from mapFriends.models import UserProfile
import urllib
import urllib2
import urlparse
import json
import re
def get_authorization_url(request):
# URL to where Facebook will redirect to
redirect_url = urllib.quote_plus(settings.SITE_URL)
request.session['facebook_state'] = unicode(csrf(request)['csrf_token'])
# redirect to facebook for approval
url = 'https://www.facebook.com/dialog/oauth?' \
+ 'client_id=' + settings.FACEBOOK_APP_ID \
+ '&redirect_uri=' + redirect_url \
+ '&scope=email,public_profile,user_friends,user_hometown,user_location,user_about_me' \
+ '&state=' + request.session['facebook_state']
return url
def verify(request):
# Facebook will direct with state and code in the URL
# ?state=ebK3Np...&code=AQDJEtIZEDU...#_=_
# ensure we have a session state and the state value is the same as what facebook returned
# also ensure we have a code from facebook (not present if the user denied the application)
if 'facebook_state' not in request.session \
or 'state' not in request.GET \
or 'code' not in request.GET \
or request.session['facebook_state'] != request.GET['state']:
return False
else:
return True
def get_token(request):
redirect_url = urllib.quote_plus(settings.SITE_URL)
url = 'https://graph.facebook.com/oauth/access_token?' \
+ 'client_id=' + settings.FACEBOOK_APP_ID \
+ '&redirect_uri=' + redirect_url \
+ '&client_secret=' + settings.FACEBOOK_API_SECRET \
+ '&code=' + request.GET['code']
response = urllib2.urlopen(url).read()
params = urlparse.parse_qs(response)
return params
def get_code(request):
# URL to where Facebook will redirect to
redirect_url = urllib.quote_plus(settings.SITE_URL_MAP)
request.session['facebook_state'] = unicode(csrf(request)['csrf_token'])
# redirect to facebook for approval
url = 'https://www.facebook.com/dialog/oauth?' \
+ 'client_id=' + settings.FACEBOOK_APP_ID \
+ '&redirect_uri=' + redirect_url \
+ '&scope=email,public_profile,user_friends,user_hometown,user_location,user_about_me' \
+ '&state=' + request.session['facebook_state']
return url
def update_token(request):
redirect_url = urllib.quote_plus(settings.SITE_URL_MAP)
url = 'https://graph.facebook.com/oauth/access_token?' \
+ 'client_id=' + settings.FACEBOOK_APP_ID \
+ '&redirect_uri=' + redirect_url \
+ '&client_secret=' + settings.FACEBOOK_API_SECRET \
+ '&code=' + request.GET['code']
response = urllib2.urlopen(url).read()
params = urlparse.parse_qs(response)
return params
def test_token(request, token):
graph_url = 'https://graph.facebook.com/me?' \
+ 'access_token=' + token
try:
response = urllib2.urlopen(graph_url)
print "[test_token] El token funciona"
except urllib2.HTTPError, error:
con = error.read()
print con
data = json.loads(con)
error = data['error']['message']
regix = '^Invalid\s\D*\stoken\.$' #Invalid OAuth access token.
if re.match(regix, error):
print "[test_token] El token no funciona"
if 'code' not in request.GET: #Comprobamo si tenemos el code
print "[test_token] new code"
return get_code(request)
print "[test] Obteniendo nuevo token"
token = update_token(request)
print token
user = User.objects.get(username=request.user)
print user
profile = UserProfile.objects.get(user=user)
profile.access_token = token['access_token'][0]
profile.save()
print "[test_token] Guardando en nuevo token"
return "/map"
else:
print "[test] Error desconocido"
return ""
def get_user_data(request, token):
print "[get_user_data] Obteniendo informacion de uno mismo"
data = {}
graph_url = 'https://graph.facebook.com/me?' \
+ 'access_token=' + token
# get the user's data from facebook
response = urllib2.urlopen(graph_url).read()
user = json.loads(response)
data['name'] = user['name']
data['id'] = user['id']
data['email'] = user['email']
graph_url = 'https://graph.facebook.com/me/picture?' \
+ 'type=normal' \
+ '&redirect=false' \
+ '&access_token=' + token
response = urllib2.urlopen(graph_url).read()
picture = json.loads(response)
if not picture['data']['is_silhouette']:
data['picture'] = picture['data']['url']
else:
data['picture'] = ''
return data
def get_user_friends(request, token):
data = []
sites_list = []
print "[get_user_friends] Obteniendo amigos"
graph_url = 'https://graph.facebook.com/me/friends?' \
+ 'access_token=' + token
response = urllib2.urlopen(graph_url).read()
friends = json.loads(response)
print "[get_user_friends] Obteniendo lugares e imagenes"
#Bucle para recorrer todo el array
for friend in friends['data']:
dicc = {}
dicc['name'] = str(friend['name'])
dicc['id'] = str(friend['id'])
graph_url = 'https://graph.facebook.com/' \
+ dicc['id'] \
+ '?access_token=' + token
response = urllib2.urlopen(graph_url).read()
user = json.loads(response)
if user.has_key('location'):
dicc['location'] = str(user['location']['id'])
else:
dicc['location'] = None
if user.has_key('hometown'):
dicc['hometown'] = str(user['hometown']['id'])
else:
dicc['hometown'] = None
if dicc['hometown'] not in sites_list:
sites_list.append(dicc['hometown'])
if dicc['location'] not in sites_list:
sites_list.append(dicc['location'])
graph_url = 'https://graph.facebook.com/' \
+ dicc['id'] \
+ '/picture?' \
+ 'type=square' \
+ '&redirect=false' \
+ '&access_token=' + token
response = urllib2.urlopen(graph_url).read()
picture = json.loads(response)
if not picture['data']['is_silhouette']:
dicc['picture'] = str(picture['data']['url'])
else:
dicc['picture'] = ''
data.append(dicc)
return data, sites_list
def get_coordinates(request, sites, token):
data = []
print "[get_coordinates]"
for site in sites:
if not site is None:
position = {}
graph_url = 'https://graph.facebook.com/' \
+ site \
+ '?access_token=' + token
response = urllib2.urlopen(graph_url).read()
coordinates = json.loads(response)
position['id'] = str(site)
position['longitude'] = coordinates['location']['longitude']
position['latitude'] = coordinates['location']['latitude']
data.append(position)
return data |
# WS server example that synchronizes state across clients
import asyncio
import json
import logging
import websockets
import buddy_manager
logging.basicConfig()
STATE = {"value": 0}
JSON = {"wingle": "middle", "wing": "low", "heart": False, "color": "NOCOLOUR", "value": 0}
USERS = set()
global websockets
def state_event():
return json.dumps({"type": "state", **JSON})
def users_event():
return json.dumps({"type": "users", "count": len(USERS)})
async def notify_state():
if USERS: # asyncio.wait doesn't accept an empty list
message = state_event()
print(message)
await asyncio.wait([user.send(message) for user in USERS])
async def notify_users():
if USERS: # asyncio.wait doesn't accept an empty list
message = users_event()
await asyncio.wait([user.send(message) for user in USERS])
async def register(websocket):
USERS.add(websocket)
await notify_users()
async def unregister(websocket):
USERS.remove(websocket)
await notify_users()
async def counter(websocket, path):
# register(websocket) sends user_event() to websocket
await register(websocket)
try:
await websocket.send(state_event())
async for message in websocket:
data = json.loads(message)
if data["action"] == "minus":
JSON["value"] -= 1
await notify_state()
elif data["action"] == "plus":
JSON["value"] += 1
await notify_state()
else:
logging.error("unsupported event: {}", data)
finally:
await unregister(websocket)
def stsrv():
start_server = websockets.serve(counter, "192.168.0.60", 6789)
loop = asyncio.get_event_loop()
loop.run_until_complete(start_server)
loop.run_forever()
|
from .CMdApi import MdApi as MiniMdApi
from .CTdApi import TdApi as MiniTdApi
from .mini_constant import *
|
import pygame
import random
from pygame.locals import*
# Imports everything from pygame.
from sprite_loader import SpriteSheet
from cat import Cat
pygame.init()
# Initalizes pygame
screen_info = pygame.display.Info()
# Gets information about user's screen.
size = (width, height) = (800, 600)
# Sets size parameters
screen = pygame.display.set_mode(size)
# Sets up the variable screen which will set up the user's screen.
clock = pygame.time.Clock()
# Sets up the clock variable
color = (0, 147, 255)
# Sets the color variable to a color using RGB
cat_images = []
# Sets an empty arraylist for cat_image
def get_cat_images():
cat_sheet = SpriteSheet("runningcat.png")
# Sets the cat_sheet equal to the runningcat.png
cat_sheet_width = cat_sheet.sprite_sheet.get_rect().width
# Sets cat_sheet width to the width of the display.
cat_sheet_height = cat_sheet.sprite_sheet.get_rect().height
# Sets cat_sheet width to the width of the display.
nrows = 4
# Sets the number rows to 4.
ncols = 2
# Sets the number colums to 2.
cat_width = cat_sheet_width / ncols
# Sets the cat_width by diving the width by number of columns.
cat_height = cat_sheet_height / nrows
for row in range(nrows):
for col in range(ncols):
cat_images.append(
cat_sheet.get_image(
col * cat_width,
row * cat_height,
cat_width,
cat_height
)
)
# Appends the chat sheet and adds it to the images.\
scale = 0.5
cat_images[-1] = pygame.transform.smoothscale(
cat_images[-1],
(int(cat_width * scale),
int(cat_height * scale)
)
)
# Smoothscales the animation of the cat running.abs
def main():
get_cat_images()
cat = Cat((-90, random.randint(50, (height - 50))), cat_images)
while True:
# Creates an infinite loop.
clock.tick(60)
# for event in pygame.event.get():
# if event.type == QUIT:
# pygame.exit()
# Makes sure that if a user presses any keys or moves the mouse the
# program doesnt crash.
cat.update()
screen.fill(color)
cat.draw(screen)
pygame.display.flip()
# Fills the screen with color and flips the display.
if __name__ == "__main__":
main()
|
def custom_sort(a,b):
return a if a > b else b
my_list = [22,1,2,6,4,26,23,15,14]
i = 0
k = i + 1
while k < len(my_list):
custom_sort(my_list[i], my_list[k])
i += 1
k += 1
print(my_list) |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2019-10-08 03:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0009_auto_20191007_2320'),
]
operations = [
migrations.RenameField(
model_name='gameassignments',
old_name='exe',
new_name='exec_people',
),
]
|
from django import forms
from .models import UserInfo
class UserRegisterForm(forms.ModelForm):
class Meta:
model = UserInfo
fields = ['zip_code','unit_of_temperature'] |
from pyspark.sql.functions import udf
from pyspark.sql.types import StringType
from ctutil import convert
def regist_datetime_int_format_udf(ss, logger):
'''
注册UDF
:param ss: spark session
'''
logger.info("注册datetime_int_format_udf UDF", event="regist_udf")
datetime_int_format_udf = udf(lambda dateint, timeint: convert.datetime_int_format(dateint, timeint), StringType())
ss.udf.register("datetime_int_format_udf", datetime_int_format_udf)
return datetime_int_format_udf |
from datetime import datetime
from architect import utils
from django.db import models
from django.contrib.postgres.fields import JSONField
from django.utils.safestring import mark_safe
class Repository(models.Model):
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
engine = models.CharField(max_length=32, default='packer')
metadata = JSONField(blank=True, null=True)
cache = JSONField(blank=True, null=True)
status = models.CharField(max_length=32, default='unknown')
def client(self):
client_class = utils.get_module(self.engine, 'repository')
return client_class(**{
'name': self.name,
'engine': self.engine,
'metadata': self.metadata})
def class_list(self, resource=None):
return self.client().class_list(resource=None)
def resource_count(self, resource=None):
return len(self.client().inventory(resource=None))
def color(self):
if self.status == 'active':
return 'success'
if self.status == 'error':
return 'danger'
if self.status == 'build':
return 'info'
else:
return 'warning'
def get_images(self):
images = {}
output = ''
for image in self.images.all():
if 'hostname' in image.metadata:
images[image.metadata['hostname']] = image.name
sorted_images = sorted(images.items())
for image_name in sorted_images:
output += '{}<br>'.format(image_name[0])
return mark_safe(output)
def conn_detail(self):
if self.metadata is None:
return '-'
elif self.engine in ['rpi23', 'bbb']:
return mark_safe('Manager: {}<br>Inventories: {}'.format(
self.metadata.get('manager', '-'),
', '.join(self.metadata.get('inventories', [])))
)
else:
return '-'
def __str__(self):
return self.name
class Meta:
verbose_name_plural = "Repositories"
ordering = ['name']
class Resource(models.Model):
uid = models.CharField(max_length=511)
name = models.CharField(max_length=511)
repository = models.ForeignKey(Repository,
on_delete=models.CASCADE,
related_name='images')
kind = models.CharField(max_length=32)
size = models.IntegerField(default=0)
metadata = JSONField(blank=True, null=True)
cache = JSONField(blank=True, null=True)
status = models.CharField(max_length=32, default='unknown')
def __str__(self):
return '{} {}'.format(self.kind, self.name)
def get_created(self):
if 'create_time' in self.metadata:
return datetime.fromtimestamp(self.metadata['create_time'])
else:
return None
def get_platform(self):
if 'type' in self.metadata:
for image_type in self.repository.client().get_image_types():
if image_type[0] == self.metadata['type']:
return image_type[1]
return None
def color(self):
if self.status == 'active':
return 'success'
if self.status == 'error':
return 'danger'
if self.status == 'build':
return 'info'
else:
return 'warning'
class Meta:
ordering = ['-id']
|
from flask import Flask, request, jsonify, render_template
from flask_cors import CORS
from werkzeug.exceptions import abort
from exceptions.invalid_prameter import InvalidParameter
from exceptions.resource_not_found import ResourceNotFound
from exceptions.invalid_prameter import InvalidParameter
from daos.employee_dao import EmployeeDAO
from daos.employee_dao_postgres import EmployeeDaoPostgres
from services.employee_service import EmployeeService
from services.erequest_service_impl import ErequestServiceImpl
from entities.erequest import Erequest
from daos.erequest_dao import ErequestDAO
from daos.erequest_dao_postgres import ErequestDaoPostgres
from services.erequest_service import ErequestService
from services.erequest_service_impl import ErequestServiceImpl
erequest_dao: ErequestDAO = ErequestDaoPostgres()
erequest_service = ErequestServiceImpl(erequest_dao)
def create_erequest_route(app: Flask):
CORS(app)
@app.route("/requests/<employee_id>", methods=["GET"])
def get_all_requests_by_eid(employee_id: str): # get all request for one particular employee
try:
if not employee_id.isnumeric():
raise InvalidParameter
records = erequest_service.get_all_requests_by_eid(employee_id)
app.logger.info(f'Retrieve all requests related to the employee with ID: {employee_id}')
return jsonify([r.as_json_dic() for r in records]), 200
except ResourceNotFound as e:
return str(e), 404
except InvalidParameter as e:
return str(e), 404
@app.route("/requests", methods=["POST"])
def post_request():
try:
erequest = Erequest.deserialize(request.json)
erequest_service.create_request(erequest)
app.logger.info(f'An expense request created with ID: {erequest.erequest_id}')
return jsonify(erequest.as_json_dic()), 201
except ResourceNotFound as e:
return str(e), 404
# to render to html
@app.route("/requests", methods=["GET"])
def get_all_requests():
try:
erequests = erequest_service.get_all_requests()
app.logger.info(f'A total of {len(erequests)} requested retrieved successfully')
jsonized = [e.as_json_dic() for e in erequests]
return jsonify(jsonized), 200
except ResourceNotFound as e:
return str(e), 404
@app.route("/requests/id/<erequest_id>/status/<rstatus>/comment/<message>", methods=["PATCH"])
def update_request(erequest_id: str, rstatus: str, message: str):
try:
if not erequest_id.isnumeric() or not rstatus:
raise InvalidParameter
erequest = erequest_service.update_request(int(erequest_id), rstatus, message)
app.logger.info(f'Request with ID: {erequest_id} updated successfully')
return jsonify(erequest.as_json_dic()), 200
except ResourceNotFound as e:
return str(e), 404
except InvalidParameter as e:
return str(e), 404
# for statics
@app.route("/reports", methods=["GET"])
def get_reports_for_all():
try:
reports = erequest_service.get_report_for_all()
app.logger.info(f'Get all expense {len(reports)} reimbursement requests')
return jsonify(reports), 200
except ResourceNotFound as e:
return str(e), 40
|
# -*- coding: utf-8 -*-
from cliff.command import Command
try:
import configparser as ConfigParser
except ImportError:
import ConfigParser
import os
class Config(Command):
'Set config.'
def get_parser(self, prog_name):
parser = super(Config, self).get_parser(prog_name)
parser.add_argument('-u', '--username', help='username(email)')
parser.add_argument('-p', '--password', help='password')
return parser
def take_action(self, parsed_args):
config_dir = '~/.kddcup2015-cli'
config_dir = os.path.expanduser(config_dir)
if not os.path.isdir(config_dir):
os.mkdir(config_dir)
config = ConfigParser.ConfigParser(allow_no_value=True)
if os.path.isfile(config_dir + '/config'):
config.readfp(open(config_dir + '/config'))
if not config.has_section('user'):
config.add_section('user')
if parsed_args.username:
username = parsed_args.username
config.set('user', 'username', username)
if parsed_args.password:
password = parsed_args.password
config.set('user', 'password', password)
config.write(open(config_dir + '/config', 'w'))
|
# https://developers.facebook.com/docs/facebook-login/access-tokens/
# https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow
# create an app ID https://developers.facebook.com/docs/apps/register
# register as dev, then select "basic app" to create new app ID. i used msan692-testing name
# ugh. had to drop my mobile and add again to get phone to verify my accout to get app ID
# browser will attempt to log the user in and FB does a redirect back to
# http://localhost:8000/?code=XXX
import sys
import BaseHTTPServer
import urlparse
import urllib
import urllib2
import json
import webbrowser
APP_CODE = None
APP_ACCESS_TOKEN = None
# Do $ python -m SimpleHTTPServer or do this:
def _wait_for_user_to_enter_browser():
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
global APP_CODE, APP_ACCESS_TOKEN
p = self.path.split('?')
if len(p) > 1:
params = urlparse.parse_qs(p[1], True, True)
if p[0]=='/login':
APP_CODE = params['code'][0]
self.send_response(200)
self.end_headers()
self.wfile.write("You logged in!")
elif p[0]=='/exchange':
APP_ACCESS_TOKEN = params['access_token'][0]
self.send_response(200)
self.end_headers()
self.wfile.write("Access token obtained!")
server_address = ('', 8000)
httpd = BaseHTTPServer.HTTPServer(server_address, MyHandler)
httpd.handle_request()
APP_ID = sys.argv[1]
APP_SECRET = sys.argv[2]
LOGIN_URL = "https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=http://localhost:8000/login"
webbrowser.open_new_tab(LOGIN_URL % APP_ID)
_wait_for_user_to_enter_browser()
# print "App code is", APP_CODE
EXCH_URL = "https://graph.facebook.com/v2.3/oauth/access_token" \
"?client_id=%s" \
"&redirect_uri=http://localhost:8000/login" \
"&client_secret=%s" \
"&code=%s"
URL = EXCH_URL % (APP_ID, urllib.quote(APP_SECRET), urllib.quote(APP_CODE))
# print URL
#webbrowser.open_new_tab(URL)
response = urllib2.urlopen(URL)
jsondata = response.read()
json_dict = json.loads(jsondata)
ACCESS_TOKEN = json_dict['access_token']
# print ACCESS_TOKEN
# Ok, now we can pull data
# https://developers.facebook.com/docs/graph-api/using-graph-api/
FEED_URL = "https://graph.facebook.com/%s/feed?access_token=%s"
who = "whitehouse" # works for pages (like msnbc) but not users
FEED_URL = FEED_URL % (who, ACCESS_TOKEN)
response = urllib2.urlopen(FEED_URL)
jsondata = response.read()
json_dict = json.loads(jsondata)
for story in json_dict['data']:
if "message" in story:
print "http://www.facebook.com/"+story['id']
print story["message"][0:80]
print
|
"""
Check if Palindrome - Checks if the string entered
by the user is a palindrome. That is that it reads
the same forwards as backwards like "racecar"
"""
string = raw_input('Enter a string: ').lower()
if string == string[::-1]:
print '%s is a palindrome' % string
else:
print '%s is not a palindrome' % string
|
#!/bin/python
'''
Script to select valid observations from data downloaded by get_flux.R.
Intended to be used on data from a single year.
Writes valid observations to a csv called
`SITE/flux_observations/flux_observations_YYYY.csv`
where YYYY is the year.
usage:
python src/select_complete_observations.py --site=TALL --file_path=TALL/filesToStack00200/
or in container:
docker run -ti --rm -v "$PWD":/home/docker -w /home/docker --user $(id -u):$(id -g) quay.io/kulpojke/neon-timeseries:py-shadd689ac python src/select_complete_observations.py --site=TALL --file_path=/home/docker/TALL/filesToStack00200/
'''
import os
import pandas as pd
pd.options.mode.chained_assignment = None
import argparse
from tqdm import tqdm
def parse_arguments():
'''parses the arguments, returns args Namespace'''
# init parser
parser = argparse.ArgumentParser()
# add args
parser.add_argument(
'--site',
type=str,
required=True,
help='NEON site abreviation, e.g. "TEAK"'
)
parser.add_argument(
'--file_path',
type=str,
required=True,
help='Path to `filesToStack00200` directory containing flux data.'
)
# parse the args
args = parser.parse_args()
return args
def get_valid_observations(site, file_path):
'''
Goes through all csvs in file_path returns a df of valid
observations. Valid means that they exist and have a
passing final QF flag.
parameters:
site - str - Four letter code of NEON site being considered, e.g. 'TALL'
file_path - str - Path to `filesToStack00200` directory containing flux data.
'''
# make empty list for dfs
dfs = []
# make list of the files for the site
files = [
os.path.join(file_path, f)
for f
in os.listdir(file_path)
if ('.h5' in f)
]
for f in tqdm(files):
# get the day
day = pd.to_datetime(f.split('nsae.')[1].split('.')[0]).date()
# open the hdf
hdf = pd.HDFStore(f)
try:
# get the flux quality flags
qfqm_CO2 = hdf.get(f'{site}/dp04/qfqm/fluxCo2/nsae')
qfqm_H2O = hdf.get(f'{site}/dp04/qfqm/fluxH2o/nsae')
qfqm_T = hdf.get(f'{site}/dp04/qfqm/fluxTemp/nsae')
qfqm_foot = hdf.get(f'{site}/dp04/qfqm/foot/turb')
# Select observations with no bad flags
qfqm_CO2 = qfqm_CO2.loc[qfqm_CO2.qfFinl == 0]
qfqm_H2O = qfqm_H2O.loc[qfqm_H2O.qfFinl == 0]
qfqm_T = qfqm_T.loc[qfqm_T.qfFinl == 0]
qfqm_foot = qfqm_foot.loc[qfqm_foot.qfFinl == 0]
# get the footprint input stats
stat = hdf.get(f'{site}/dp04/data/foot/stat/')
# get indices of the dfs from above
istat = stat.set_index('timeBgn').index
iqfqmC = qfqm_CO2.set_index('timeBgn').index
iqfqmH = qfqm_H2O.set_index('timeBgn').index
iqfqmT = qfqm_T.set_index('timeBgn').index
iqfqmf = qfqm_foot.set_index('timeBgn').index
# keep only entries in stat which correspond to good
# qfqm flags for all variables
stat = stat[
(istat.isin(iqfqmC)) &
(istat.isin(iqfqmH)) &
(istat.isin(iqfqmT)) &
(istat.isin(iqfqmf))
]
# get the flux data
fluxCo2 = hdf.get(f'{site}/dp04/data/fluxCo2/nsae').drop('timeEnd', axis=1)
fluxH2o = hdf.get(f'{site}/dp04/data/fluxH2o/nsae').drop('timeEnd', axis=1)
fluxTemp = hdf.get(f'{site}/dp04/data/fluxTemp/nsae').drop('timeEnd', axis=1)
# now merge dfs onto stat
stat = stat.merge(fluxCo2, how='left', on='timeBgn', suffixes=('_stat', ''))
stat = stat.merge(fluxH2o, how='left', on='timeBgn', suffixes=('_CO2', ''))
stat = stat.merge(fluxTemp, how='left', on='timeBgn', suffixes=('_H20', '_T'))
dfs.append(stat)
# close file
hdf.close()
except KeyError:
pass
df = pd.concat(dfs)
return df
if __name__ == '__main__':
# parse the args
args = parse_arguments()
# make a directory for results, if absent
parent = os.path.dirname(args.file_path)
results = os.path.join(parent, 'flux_observations')
os.makedirs(results, exist_ok=True)
# print feedback
print()
print('Finding valid observations ...')
# get the valid observations
df = get_valid_observations(args.site, args.file_path)
# get year of observations
year = pd.to_datetime(df.timeBgn).min().year
# print feedback
print()
print(f'Found {len(df)} valid observations for {args.site}-{year}.')
# write observations to csv within results dir
csv_path = os.path.join(results, f'flux_observations_{year}.csv')
df.to_csv(csv_path, index=False) |
import numpy as np
# Application for list
def nested_sum(t):
sum = 0
for i in xrange(len(t)):
if isinstance(t[i], list):
sum += nested_sum(t[i])
else:
sum += t[i]
return sum
# t1 = [[1, 2], [3], [4, 5, 6]]
# print nested_sum(t1)
def cumsum(t):
res = np.zeros(len(t))
for i in xrange(len(t)):
if i == 0:
res[i] = t[i]
else:
res[i] = res[i-1] + t[i]
return res
# t2 = range(10)
# print cumsum(t2)
def get_dict_with_str(str):
dic = {}
t = list(str)
for i in t:
if i in dic:
dic[i] += 1
else:
dic[i] = 1
print dic
return dic
def is_anagram(str1, str2):
if len(str1) != len(str2):
return False
flag = True
dic1 = get_dict_with_str(str1)
dic2 = get_dict_with_str(str2)
for key, value in dic1.items():
if key in dic2 and dic2[key] == value:
continue
else:
flag = False
break
return flag
# str1 = 'aaaasdqwe'
# str2 = 'aqswadeaa'
# print is_anagram(str1, str2) |
# This program is an implementation of the class function to find information about users, using objects.
class physicalFeatures: # Here we create a class "physicalFeatures".
# With the def keywords we define/create methods, i.e. printEyes, printHeight, printWeight, and printHair.
def printEyes(self):
eyes = input("Enter your eye color: ")
print ("Your eye color is: " + eyes)
return 0
def printHeight(self):
height = input("Enter your height: ")
print ("Your height is: " + height)
return 0
def printWeight(self):
weight = input("Enter your weight: ")
print ("Your weight is:" + weight)
return 0
def printHair(self):
hair = input("Enter your hair color: ")
print ("Your hair is: " + hair)
return 0
Loisa = physicalFeatures() # Here we create an object "Loisa" which inherits the data of class "physicalFeatures".
print ("Type y for YES and n for NO.")
print ("\n")
print ("Do you want to print your eye color?")
answer = input("y/n: ")
print ("\n")
if answer == "y":
Loisa.printEyes() # Using the object to call method "printEyes".
print ("\n")
elif answer == "n":
print ("Ok! we shall skip that...")
print ("\n")
print ("Do you want to print your height?")
answer = input("y/n: ")
print ("\n")
if answer == "y":
Loisa.printHeight() # Using the object to call method "printHeight".
print ("\n")
elif answer == "n":
print ("Ok! we shall skip that...")
print ("\n")
print ("Do you want to print your weight?")
answer = input("y/n: ")
print ("\n")
if answer == "y":
Loisa.printWeight() # Using the object to call method "printWeight".
print ("\n")
elif answer == "n":
print ("Ok! we shall skip that...")
print ("\n")
print ("Do you want to print your hair?")
answer = input("y/n: ")
print ("\n")
if answer == "y":
Loisa.printHair() # Using the object to call method "printHair".
print ("\n")
elif answer == "n":
print ("Ok! we shall skip that...")
print ("\n") |
from networkx.algorithms.community.asyn_fluid import *
from networkx.algorithms.community.centrality import *
from networkx.algorithms.community.community_utils import *
from networkx.algorithms.community.kclique import *
from networkx.algorithms.community.kernighan_lin import *
from networkx.algorithms.community.label_propagation import *
from networkx.algorithms.community.louvain import *
from networkx.algorithms.community.lukes import *
from networkx.algorithms.community.modularity_max import *
from networkx.algorithms.community.quality import *
|
from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
email = models.EmailField(max_length=1024, unique=True)
shipping_first_name = models.CharField(max_length=256, blank=True)
shipping_last_name = models.CharField(max_length=256, blank=True)
shipping_country = models.CharField(max_length=256, blank=True)
shipping_state = models.CharField(max_length=256, blank=True)
shipping_city = models.CharField(max_length=256, blank=True)
shipping_street = models.CharField(max_length=256, blank=True)
shipping_zip = models.CharField(max_length=256, blank=True)
shipping_phone = models.CharField(max_length=256, blank=True)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username',] |
#### Class 04
#### Parsing HTML
## Parsing HTML ------------------------------------------------
## pip install beautifulsoup4
from bs4 import BeautifulSoup
import urllib2
import random
import time
import os
## Open a web page
web_address = 'https://polisci.wustl.edu/faculty/specialization'
web_page = urllib2.urlopen(web_address)
## Parse it
soup = BeautifulSoup(web_page.read())
soup.prettify()
## Find all cases of a certain tag
## Returns a list... remember this!
soup.find_all('a')
soup.find_all('h3')
## Get the script of a certain tag
fields = soup.find_all('h3') ## list of html entries
[i.text for i in fields] ## grab just the text from each one
# Get the attributes
all_a_tags = soup.find_all('a')
all_a_tags[22]
all_a_tags[22].attrs ## a dictionary with the attributes
l = {"class" : [], "href" : []}
for p in [22,23]:
l["class"].append(all_a_tags[p].attrs["class"])
all_a_tags[22].attrs.keys()
all_a_tags[22]['href']
all_a_tags[22]['class']
## Use this info about HTML elements to grab them
soup.find_all('a', {'class' : "person-view-primary-field"})
## There may be tags within tags
sections = soup.find_all('div')
len(sections)
sections[2].a ## FIRST 'a' tag within the 'div' tag
sections[2].find_all('a') ## ALL 'a' tags within the 'div' tag
sections[3].a
## Creating a tree of objects
all_fields = soup.find_all('div')
randy = all_fields[50]
randy.find_all("a")
randy.contents ## Gives a list of all children
randy.children ## Creates an iterator for children
for i, child in enumerate(randy.children):
print "Child %d: %s" % (i,child)
for sib in randy.next_siblings:
print sib
for sib in randy.previous_siblings:
print sib
# Other methods to check family:
# parent
# parents
# next_siblings
# previous_siblings
# descendants
# Beautiful Soup documentation
# http://www.crummy.com/software/BeautifulSoup/bs4/doc/
## Function to save a web page ------------------------------------------------
def download_page(address, filename, wait = 5):
time.sleep(random.uniform(0,wait))
page = urllib2.urlopen(address)
page_content = page.read()
if os.path.exists(filename) == False:
with open(filename, 'w') as p_html:
p_html.write(page_content)
else:
print "Can't overwrite file" + filename
download_page('https://polisci.wustl.edu/faculty/specialization', "polisci_ppl.html")
## You can also parse a page that is saved on your computer
## Useful to scrape now, parse later.
with open('polisci_ppl.html') as f:
myfile = f.read()
soup = BeautifulSoup(myfile)
soup.prettify()
|
from loginWidget import*
from ugoChat import*
from registrationPage import *
from registrationPage import Ui_registrationWindow
import sys, sqlite3, time, textwrap, os
from userNotFound import Ui_userNotFoundForm
from loginSuccess import Ui_loginSuccess
from ugoChat import Ui_MainWindow
from socket import AF_INET, socket, SOCK_STREAM
from PyQt5.QtCore import QThread, QObject, pyqtSignal, pyqtSlot, QRectF, QRect
from PyQt5.QtGui import *
import datetime
class myWin(QtWidgets.QMainWindow): # to create and use objects pertaining to the login screen
def __init__(self, parent=None): # function to initialize widget
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_Form()
self.ui.setupUi(self)
self.ui.REGISTER.clicked.connect(self.openRegistration)
self.ui.LOGIN.clicked.connect(self.loginCheck)
self.ui.lineEdit.returnPressed.connect(self.ui.LOGIN.click)
def openRegistration(self): # opens registration page and hides login screen
myRegi.show() # show registration page
Rerror.hide() # If error registering window is displayed then hide it
myapp.hide() # hide main login screen
myRegi.ui.returnButton.clicked.connect(self.returnToLogin) # event to trigger the login screen
myRegi.ui.confirmButton.clicked.connect(self.confirmReg) # event to try logging in
myRegi.ui.lineEdit_3.returnPressed.connect(myRegi.ui.confirmButton.click) # When pressing enter initiates the clicking of confirm button
def confirmReg(self): # Function to store username and password into server-side database
inputUser = myRegi.ui.lineEdit.text() # retrieve text from username textbox
inputPass = myRegi.ui.lineEdit_2.text() # retrieve text from password textbox
confirmPass = myRegi.ui.lineEdit_3.text() # retrieve text from confirm password textbox
if inputPass != confirmPass or inputPass == "" or inputUser == "" or confirmPass == "":
Rerror.show()
myRegi.ui.lineEdit.clear() # clear username
myRegi.ui.lineEdit_2.clear() # clear password inputs
myRegi.ui.lineEdit_3.clear() # clear password inputs
Rerror.error.userNotFoundButton.clicked.connect(self.openRegistration)
else:
send("2") # invoke command number 2 to prompt the server for registration
time.sleep(0.3)
send(inputUser)
time.sleep(0.3)
send(inputPass)
serverResponse = client_socket.recv(BUFSIZ).decode("utf8")
if serverResponse == "Success":
Rsuccess.show()
myRegi.ui.lineEdit.clear() # clear username input text box
myRegi.ui.lineEdit_2.clear() # clear password input text box
myRegi.ui.lineEdit_3.clear() # clear confirm password text box
Rsuccess.success.loginSuccessButton.clicked.connect(self.returnToLogin)
Rsuccess.success.loginSuccessButton.autoDefault()
elif serverResponse == "Username Taken!":
Rerror.show()
myRegi.ui.lineEdit.clear() # clear username input text box
myRegi.ui.lineEdit_2.clear() # clear password input text box
myRegi.ui.lineEdit_3.clear() # clear confirm password text box
Rerror.error.userNotFoundButton.clicked.connect(self.openRegistration)
else:
Rerror.show()
myRegi.ui.lineEdit.clear() # clear username input text box
myRegi.ui.lineEdit_2.clear() # clear password input text box
myRegi.ui.lineEdit_3.clear() # clear confirm password text box
Rerror.error.userNotFoundButton.clicked.connect(self.openRegistration)
def loginCheck(self):
global username
username = self.ui.UNbox.text()
password = self.ui.lineEdit.text()
if not username or not password:
myError.show()
self.ui.lineEdit.clear()
self.ui.UNbox.clear()
myError.er.userNotFoundButton.clicked.connect(self.returnToLogin)
else:
send("1") # signals a login attempt to the server
send(username)
time.sleep(0.2)
send(password)
time.sleep(0.2)
receiveMes = client_socket.recv(BUFSIZ).decode("utf8")
if receiveMes == "Success":
friendIndex = 0
while receiveMes != "FINISHED": # populate friends list from servers database
receiveMes = client_socket.recv(BUFSIZ).decode("utf8")
if receiveMes != "FINISHED":
userFriends[friendIndex] = receiveMes
friendStatus = client_socket.recv(BUFSIZ).decode("utf8")
if friendStatus == "ONLINE":
userFriendsOnline[friendIndex] = "ONLINE"
else:
userFriendsOnline[friendIndex] = "OFFLINE"
friendIndex += 1
time.sleep(0.2)
mySuccess.show() # make the object active on the screen
mySuccess.su.loginSuccessButton.clicked.connect(self.appInitialize)
else:
myError.show()
self.ui.lineEdit.clear()
self.ui.UNbox.clear()
myError.er.userNotFoundButton.clicked.connect(self.returnToLogin)
def returnToLogin(self): # The back button registration page to go back to login screen
myapp.show()
myError.hide()
Rsuccess.hide()
myRegi.close()
def appInitialize(self): # Start the main chat application after connecting to server
myChat.show()
myRegi.close() # will not be needed anymore
myError.close() # will not be needed anymore
mySuccess.close() # will not be needed anymore
myapp.close() # will not be needed anymore
self.receiveMessages = receiverThread(username, userFriends, userFriendsOnline)
self.receiveMessages.start() # start thread to receive messages in pseudo parallel to running the gui
myChat.mainChat.addFriend_Button.clicked.connect(self.addingFriend) # handles addfriend button clicked event
myChat.mainChat.delFriend_Button.clicked.connect(self.deleteFriend) # handles delete friend button clicked event
myChat.mainChat.sendMessage_Button.clicked.connect(self.sendMessage)
myChat.mainChat.sendMessage_LineEdit.returnPressed.connect(myChat.mainChat.sendMessage_Button.click)
def sendMessage(self): # Sends message to server and server then displays it in the global chat
messageToSend = myChat.mainChat.sendMessage_LineEdit.text()
myChat.mainChat.sendMessage_LineEdit.clear()
send(messageToSend, None)
def addingFriend(self):
newFriend = myChat.mainChat.addFriend_LineEdit.text()
myChat.mainChat.addFriend_LineEdit.clear()
if newFriend:
send("//VERIFY ADD FRIEND:"+newFriend)
newFriend = ""
def deleteFriend(self):
oldFriend = myChat.mainChat.addFriend_LineEdit.text()
myChat.mainChat.addFriend_LineEdit.clear()
if oldFriend:
send("//VERIFY DEL FRIEND:"+oldFriend)
oldFriend = ""
def send(msg, event=None): # event is passed by binders.
client_socket.send(bytes(msg, "utf8")) # send the user input to server for handling
# ***********************************************************************************************************************
# Classes to instantiate gui objects##
# ***********************************************************************************************************************
class myReg(QtWidgets.QMainWindow): # class to create and use objects pertaining to the registration page
def __init__(self, parent=None): # function to initialize widget
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_registrationWindow()
self.ui.setupUi(self)
class myErr(QtWidgets.QMainWindow): # class used to create error message window for login screen
def __init__(self, parent=None): # function to initialize widget
QtWidgets.QWidget.__init__(self, parent)
self.er = Ui_userNotFoundForm()
self.er.setupUi(self)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowOpacity(0.9)
class RegError(QtWidgets.QMainWindow):
def __init__(self, parent=None): # function to initialize widget
QtWidgets.QWidget.__init__(self, parent)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.error = Ui_userNotFoundForm()
self.error.setupUi(self)
self.error.userNotFoundLabel.setGeometry(QtCore.QRect(50, 40, 154, 24))
self.error.userNotFoundLabel.setText("Error Registering!")
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowOpacity(0.9)
class RegSuccess(QtWidgets.QMainWindow):
def __init__(self, parent=None): # function to initialize widget
QtWidgets.QWidget.__init__(self, parent)
self.success = Ui_loginSuccess()
self.success.setupUi(self) # setup the user interface
self.success.loginSuccessLabel.setGeometry(QtCore.QRect(15,40,215,24)) # set x,y,width,and height parameters for label
self.success.loginSuccessLabel.setText("Registration Successful!") # set the text inside the label
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowOpacity(0.9)
class mySuc(QtWidgets.QMainWindow): # class used to create successful login popup window for login screen
def __init__(self, parent=None): # function to initialize widget
QtWidgets.QWidget.__init__(self, parent)
self.su = Ui_loginSuccess()
self.su.setupUi(self)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowOpacity(0.9)
class mainChat(QtWidgets.QMainWindow): # class used to create the main chat widget after logging in successfully
def __init__(self, parent=None): # function to initialize widget
QtWidgets.QWidget.__init__(self, parent)
self.mainChat = Ui_MainWindow()
self.mainChat.setupUi(self)
def closeEvent(self, event):
send("//exit")
client_socket.close() # close the socket connection
myChat.close()
exit()
# ----------------------------------------------------------------------------------------------------------------------
# Other Functions and threads
# ----------------------------------------------------------------------------------------------------------------------
class receiverThread(QThread):
def __init__(self, name, friendsOfUser = {}, isOnline = {}, parent=None):
super(receiverThread, self).__init__(parent)
self.friendsList = friendsOfUser
self.friendsOnline = isOnline
self.yourMes = name
def run(self):
synchronizeFriends = friendSync(self.friendsList, self.friendsOnline)
synchronizeFriends.start()
msg = "Admin: Treat each other with respect"
msg.encode("utf8")
currTime = time.time()
stringTime = datetime.datetime.fromtimestamp(currTime).strftime('%H:%M:%S __ %m-%d-%Y ---')
msg = '<p style="background-color: #586e84"><font color="white">%s <br><span>---TimeStamp: %s</span></font></p>' % (msg, stringTime)
myChat.mainChat.textBrowser.append(msg)
while True:
try:
msg = client_socket.recv(BUFSIZ).decode("utf8") # receive messages handled by server
if "//CANNOT ADD FRIEND" in msg and not msg.find("//CANNOT ADD FRIEND"):
print("user does not exit or is already a friend")
elif "//VERIFY ADD FRIEND:" in msg and not msg.find("//VERIFY ADD FRIEND:"): # server verifying the addition of a new friend
synchronizeFriends.friendToAdd = msg[20:len(msg)-1]
if "0" in msg[len(msg)-1:len(msg)]:
synchronizeFriends.friendToAddStatus = "OFFLINE"
else:
synchronizeFriends.friendToAddStatus = "ONLINE"
synchronizeFriends.performAdd = True
elif "//VERIFY DEL FRIEND:" in msg and not msg.find("//VERIFY DEL FRIEND:"): # server sent permission to delete friend
synchronizeFriends.friendToDel = msg[20:len(msg)]
synchronizeFriends.performDel = True
elif "//CANNOT DEL FRIEND:" in msg and not msg.find("//CANNOT DEL FRIEND"):
print("user cannot be deleted")
elif ":" not in msg: # update the online status of a friend
if msg not in self.yourMes:
synchronizeFriends.friendToUpdate = msg
synchronizeFriends.performSync = True
elif not msg.find(self.yourMes+":"):
currTime = time.time()
stringTime = datetime.datetime.fromtimestamp(currTime).strftime('%H:%M:%S __ %m-%d-%Y ---')
msg = '<p style="background-color: #01bdc4">%s <br><span style="background-color:#01bdc4">---TimeStamp: %s</span></p>' % (msg, stringTime)
myChat.mainChat.textBrowser.append(msg) # append to chat box
myChat.mainChat.textBrowser.moveCursor(QTextCursor.End)
else:
currTime = time.time()
stringTime = datetime.datetime.fromtimestamp(currTime).strftime('%H:%M:%S __ %m-%d-%Y ---')
msg = '<p style="background-color: #11ad16">%s <br><span style="background-color:#11ad16">---TimeStamp: %s</span></p>' % (msg, stringTime)
myChat.mainChat.textBrowser.append(msg) # append to chat box
myChat.mainChat.textBrowser.moveCursor(QTextCursor.End)
except OSError: # catch operating system errors.
break
class friendSync(QThread):
def __init__(self, userFriends = [], userFriendsOnline = [], parent=None):
super(friendSync, self).__init__(parent)
self.friendsList = userFriends # list of friends passed into thread
self.friendsOnline = userFriendsOnline # list holding status of each friend passed into thread
self.performSync = False # status flag to perform a synchronization of list with servers up to date list
self.performAdd = False # status flag to perform add friend operation
self.performDel = False # status flag to perform del friend operation
self.friendToAdd = "" # friend that will be added to the list
self.friendToDel = "" # friend that will be deleted from list
self.friendToAddStatus = "" # online status of friend being added to the list
self.friendToUpdate = "" # current friend that has either logged in or out recently
self.friendship = QStandardItemModel(myChat.mainChat.listView) # item model to display friends on QlistView
def run(self):
for user in self.friendsList:
if self.friendsOnline[user] == "ONLINE":
self.friend = QStandardItem(QtGui.QIcon('userOnline.png'), self.friendsList[user])
self.friend.setBackground(QColor('#42f7d3'))
else:
self.friend = QStandardItem(QtGui.QIcon('userOffline.png'), self.friendsList[user])
self.friend.setBackground(QColor('#f77e41'))
self.friendship.appendRow(self.friend)
myChat.mainChat.listView.setModel(self.friendship)
while True:
if self.performSync:
for friend in self.friendsList:
if self.friendsList[friend] == self.friendToUpdate:
if self.friendsOnline[friend] == "ONLINE":
self.friendsOnline[friend] = "OFFLINE"
newItem = QStandardItem(QtGui.QIcon('userOffline.png'), self.friendToUpdate)
newItem.setBackground(QColor('#f77e41'))
self.friendship.setItem(friend, newItem)
else:
self.friendsOnline[friend] = "ONLINE"
newItem = QStandardItem(QtGui.QIcon('userOnline.png'), self.friendToUpdate)
newItem.setBackground(QColor('#41f7b4'))
self.friendship.setItem(friend, newItem)
self.performSync = False
break
if self.performAdd:
self.friendsList[len(self.friendsList)] = self.friendToAdd
if "OFFLINE" in self.friendToAddStatus:
newItem = QStandardItem(QtGui.QIcon('userOffline.png'), self.friendToAdd)
newItem.setBackground(QColor('#f77e41'))
self.friendship.appendRow(newItem)
self.friendsOnline[len(self.friendsOnline)] = "OFFLINE"
else:
newItem = QStandardItem(QtGui.QIcon('userOnline.png'), self.friendToAdd)
newItem.setBackground(QColor('#41f7b4'))
self.friendship.appendRow(newItem)
self.friendsOnline[len(self.friendsOnline)] = "ONLINE"
self.performAdd = False
if self.performDel:
for friend in self.friendsList:
if self.friendsList[friend] == self.friendToDel:
self.friendship.removeRow(friend)
del self.friendsList[friend]
del self.friendsOnline[friend]
break
self.performDel = False
# ----------------------------------------------------------------------------------------------------------------------
# SOCKET SECTION OF THE PROGRAM TO CONNECT TO SERVER
# ----------------------------------------------------------------------------------------------------------------------
global haveLoggedIn
haveLoggedIn = False
userFriends = {}
userFriendsOnline = {}
#host = '73.235.230.212'
host = '127.0.0.1'
port = 50000
BUFSIZ = 1024
ADDR = (host, port)
client_socket = socket(AF_INET, SOCK_STREAM)
client_socket.connect(ADDR)
# ----------------------------------------------------------------------------------------------------------------------
# GUI SECTION OF THE PROGRAM TO INITIALIZE DIFFERENT GUI OBJECTS BEFORE HAVING TO DISPLAY THEM LATER IN THE PROGRAM
# ----------------------------------------------------------------------------------------------------------------------
if __name__ == "__main__": # Main GUI program execution starts here
app = QtWidgets.QApplication(sys.argv)
myapp = myWin()
myRegi = myReg()
myError = myErr()
Rerror = RegError()
Rsuccess = RegSuccess()
mySuccess = mySuc()
myChat = mainChat()
myapp.show()
sys.exit(app.exec_())
# User by Luis Prado from the Noun Project (ugo chat online/ offline icon)
# User by Wilson Joseph from the Noun Project(message received icon)
# add by Roselin Christina.S from the Noun Project(add friend icon)
# Delete by Setyo Ari Wibowo from the Noun Project(delete friend icon)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.