blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M โ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c1315a888e3fa400a7a29daa027a4b8545ff0168 | f3aecb7313ad69241782bd95d2a492299ee554cb | /i3status/py3status/chargestatus.py | 940f90e2e4c47ac252a6d10ff8824134d15115ca | [] | no_license | Kalior/dotfiles | 24357f70eb301503b1cfe4f194a9ecd5853e4eed | ad3a3797ed13c7009ffd39f9bf635aeff3a0f6a2 | refs/heads/master | 2022-04-28T15:54:44.673206 | 2022-04-01T18:40:33 | 2022-04-01T18:40:33 | 51,951,816 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,731 | py | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Display the battery level.
Configuration parameters:
- color_* : None means - get it from i3status config
- format : text with "text" mode. percentage with % replaces {}
- hide_when_full : hide any information when battery is fully charged
- mode : for primitive-one-char bar, or "text" for text percentage ouput
Requires:
- the 'acpi' command line
@author shadowprince, AdamBSteele
@license Eclipse Public License
"""
from __future__ import division # python2 compatibility
from time import time
import math
import subprocess
BLOCKS = ["๏", "๏", "๏", "๏", "๏", "๏", "๏", "๏", "๏"]
CHARGING_CHARACTER = "๏ฆ"
EMPTY_BLOCK_CHARGING = '|'
EMPTY_BLOCK_DISCHARGING = 'โ'
FULL_BLOCK = '๏'
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 30
color_bad = None
color_charging = "#C6572F"
color_degraded = None
color_good = "#52C62F"
format = "Battery: {}"
hide_when_full = False
mode = "bar"
notification = True
def battery_level(self, i3s_output_list, i3s_config):
response = {}
# Example acpi raw output: "Battery 0: Discharging, 43%, 00:59:20 remaining"
acpi_raw = subprocess.check_output(["acpi"], stderr=subprocess.STDOUT)
acpi_unicode = acpi_raw.decode("UTF-8")
# Example list: ['Battery', '0:', 'Discharging', '43%', '00:59:20', 'remaining']
acpi_list = acpi_unicode.split(' ')
charging = True if acpi_list[2][:8] == "Charging" else False
percent_charged = int(acpi_list[3][:-2])
self.time_remaining = ' ' + acpi_list[4] if len(acpi_list) > 4 else ""
battery_full = False
if self.mode == "bar":
if charging:
full_text = CHARGING_CHARACTER
#+ ' ' + str(percent_charged) + "%" + str(self.time_remaining)
else:
full_text = BLOCKS[int(math.ceil(percent_charged/100*(len(BLOCKS) - 1)))]
#+ ' ' + str(percent_charged) + "%" + str(self.time_remaining)
elif self.mode == "ascii_bar":
full_part = FULL_BLOCK * int(percent_charged/10)
if charging:
empty_part = EMPTY_BLOCK_CHARGING * (10 - int(percent_charged/10))
else:
empty_part = EMPTY_BLOCK_DISCHARGING * (10 - int(percent_charged/10))
full_text = full_part + empty_part
else:
full_text = self.format.format(str(percent_charged) + "%")
response["full_text"] = full_text
if percent_charged < 10:
response["color"] = (
self.color_bad
if self.color_bad
else i3s_config['color_bad']
)
if percent_charged < 30:
response["color"] = (
self.color_degraded
if self.color_degraded
else "#FF0000"
)
else:
response["color"] = (
self.color_good
if self.color_good
else i3s_config['color_good']
)
if percent_charged < 7 and not charging and self.notification:
subprocess.call(['twmnc',
'-t', 'Warning Low Battery', '-c', str(percent_charged) + '%',
'--bg',"#C02510",
'-d', '3000',
'-i', '~/.config/twmn/crit.png'],
stdout=open('/dev/null', 'w'),
stderr=open('/dev/null', 'w'))
if battery_full:
response["color"] = (
self.color_good
if self.color_good
else i3s_config['color_good']
)
response["full_text"] = "" if self.hide_when_full else BLOCKS[-1]
elif charging:
response["color"] = self.color_charging
#response["cached_until"] = time() + self.cache_timeout
return response
def on_click(self, i3s_output_list, i3s_config, event):
"""
Display a notification with the remaining charge time.
"""
if self.notification and self.time_remaining:
subprocess.call(
['notify-send', '{}'.format(self.time_remaining), '-t', '4000'],
stdout=open('/dev/null', 'w'),
stderr=open('/dev/null', 'w')
)
if __name__ == "__main__":
from time import sleep
x = Py3status()
config = {
'color_good': '#00FF00',
'color_bad': '#FF0000',
}
while True:
print(x.battery_level([], config))
sleep(1) | [
"kalioragus@gmail.com"
] | kalioragus@gmail.com |
a16fd8e50b9c997067a44669d605721cbf30a699 | c82b0584f91a7a130718273ecf72039e2d5f9ab1 | /polyaxon_deploy/schemas/security_context.py | a6ce5946b5aed47c96e476bc8c5a116f43003948 | [
"MIT"
] | permissive | todokku/polyaxon-deploy | 7af770dac9fb9797b86e3bf6b5f1da477a751ba0 | 77828e028670c43cc74704a4d7b9ec2e661e10a4 | refs/heads/master | 2021-02-15T16:02:13.468664 | 2020-03-04T09:37:06 | 2020-03-04T09:37:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,137 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from marshmallow import ValidationError, fields, validates_schema
from polyaxon_deploy.schemas.base import BaseConfig, BaseSchema
def validate_security_context(user, group):
if any([user, group]) and not all([user, group]):
raise ValidationError(
"Security context requires both `user` and `group` or none.")
class SecurityContextSchema(BaseSchema):
enabled = fields.Bool(allow_none=True)
user = fields.Int(allow_none=True)
group = fields.Int(allow_none=True)
@staticmethod
def schema_config():
return SecurityContextConfig
@validates_schema
def validate_security_context(self, data):
validate_security_context(data.get('user'), data.get('group'))
class SecurityContextConfig(BaseConfig):
SCHEMA = SecurityContextSchema
REDUCED_ATTRIBUTES = ['enabled', 'user', 'group']
def __init__(self, enabled=None, user=None, group=None):
validate_security_context(user, group)
self.enabled = enabled
self.user = user
self.group = group
| [
"mouradmourafiq@gmail.com"
] | mouradmourafiq@gmail.com |
1c73574065a164f757249a8c0128113bc3fccdae | 837a430acf6deaf94784bcaec496bbfac5affff7 | /venv/bin/pip3 | acec7bf0fb4b56136916c07617d638c89df28d3e | [] | no_license | meghamanohar/Pythontests | d8a474050b150c2d80690ce71e615306163b50c3 | 93153019e8a35c567d9954b6ba9edda2c3132e92 | refs/heads/master | 2022-06-29T16:34:56.397756 | 2020-05-14T02:27:38 | 2020-05-14T02:27:38 | 263,792,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | #!/Users/krishnayadappanavar/PycharmProjects/FirstProject/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"megha.manohar@gmail.com"
] | megha.manohar@gmail.com | |
a7a58e508b74821e3264af3384076b5e256be0cd | c58bc356bce1d1c2f0b16357bdd4a193930e29b3 | /ddan/ddcn.py | 755ea2661e8328e5d2b0591e9c44d52f39e9c2f3 | [
"MIT"
] | permissive | spandandey21/ddan | f54067e2b92a52bc6250aac8c51aa358f9e9d5ba | 842fd4dc2d0be0d841863d98df4fc131deff4787 | refs/heads/master | 2022-02-19T14:23:00.050886 | 2019-09-09T16:08:18 | 2019-09-09T16:08:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,975 | py | import tensorflow as tf
import numpy as np
import pandas as pd
from sklearn.metrics import log_loss
from tensorflow.python.framework import ops
from keras import backend as K
from keras.layers import Dense, Dropout, Activation, GaussianNoise
from keras.layers.normalization import BatchNormalization
from keras.layers.advanced_activations import PReLU, ELU, LeakyReLU
from utils import shuffle_aligned_list, batch_gen, val_batch_gen
from mmd import maximum_mean_discrepancy
class DDCNModel(object):
def __init__(self, nfeatures=50, arch=[8, 'act'], mmd_layer_idx=[1],
batch_size=16, supervised=False, confusion=0.0, confusion_incr=1e-3, confusion_max=1,
val_data=None, validate_every=1,
activations='relu', epochs=1000, optimizer=None, noise=0.0, droprate=0.0, verbose=True):
self.batch_size = batch_size
self.epochs = epochs
self.validate_every = validate_every
self.supervised = supervised
self.verbose = verbose
if val_data is None:
self.validate_every = 0
else:
self.Xval = val_data[0]
self.yval = val_data[1]
self._build_model(nfeatures, arch, supervised, confusion, confusion_incr,
confusion_max, activations, noise, droprate, mmd_layer_idx, optimizer)
self.sess = tf.Session()
K.set_session(self.sess)
self.sess.run(tf.global_variables_initializer())
def _build_model(self, nfeatures, architecture, supervised, confusion, confusion_incr, confusion_max,
activations, noise, droprate, mmd_layer_idx, optimizer):
self.inp_a = tf.placeholder(tf.float32, shape=(None, nfeatures))
self.inp_b = tf.placeholder(tf.float32, shape=(None, nfeatures))
self.labels_a = tf.placeholder(tf.float32, shape=(None, 1))
nlayers = len(architecture)
layers_a = [self.inp_a]
layers_b = [self.inp_b]
for i, nunits in enumerate(architecture):
print nunits,
if i in mmd_layer_idx: print '(MMD)'
else: print
if isinstance(nunits, int):
shared_layer = Dense(nunits, activation='linear')
elif nunits == 'noise':
shared_layer = GaussianNoise(noise)
elif nunits == 'bn':
shared_layer = BatchNormalization()
elif nunits == 'drop':
shared_layer = Dropout(droprate)
elif nunits == 'act':
if activations == 'prelu':
shared_layer = PReLU()
elif activations == 'elu':
shared_layer = ELU()
elif activations == 'leakyrelu':
shared_layer = LeakyReLU()
else:
shared_layer = Activation(activations)
layers_a += [shared_layer(layers_a[-1])]
layers_b += [shared_layer(layers_b[-1])]
y_logits = Dense(1, activation='linear', name='a_output')(layers_a[-1])
self.y_clf = Activation('sigmoid')(y_logits)
# Sum the losses from both branches...
self.xe_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(labels=self.labels_a, logits=y_logits))
self.mmd_losses = []
for idx in mmd_layer_idx:
self.mmd_losses += [maximum_mean_discrepancy(layers_a[idx], layers_b[idx])]
self.domain_loss = tf.reduce_sum(self.mmd_losses)
self.confusion = tf.Variable(float(confusion), trainable=False, dtype=tf.float32)
conf_incr = tf.cond(self.confusion < confusion_max, lambda: float(confusion_incr), lambda: 0.)
self.increment_confusion = tf.assign(self.confusion, self.confusion + conf_incr)
self.total_loss = tf.add(self.confusion*self.domain_loss, self.xe_loss)
if supervised:
self.labels_b = tf.placeholder(tf.float32, shape=(None, 1))
b_logits = Dense(1, activation='linear', name='b_output')(layers_b[-1])
self.bloss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(labels=self.labels_b, logits=b_logits))
self.total_loss = tf.add(self.total_loss, self.bloss)
if optimizer is None:
self.train_step = tf.train.MomentumOptimizer(1e-3, 0.9)
else:
self.train_step = optimizer
self.train_step = self.train_step.minimize(self.total_loss)
def predict_proba(self, X, batch_size=None):
if batch_size is None: batch_size = self.batch_size
yprobs = np.zeros((X.shape[0]), dtype=float)
idx = np.arange(X.shape[0])
vbatch = val_batch_gen([idx, X], batch_size)
for i, (thisidx, thisX) in enumerate(vbatch):
yprobs[thisidx] = self.sess.run(self.y_clf,
feed_dict={self.inp_a: thisX, K.learning_phase(): 0}).flatten()
return yprobs
def evaluate(self, X, y, batch_size=None):
yprobs = self.predict_proba(X, batch_size)
return log_loss(y, yprobs)
def fit(self, Xs, ys, Xt, yt=None, Xval=None, yval=None,
epochs=None, batch_size=None, verbose=None):
if epochs is None: epochs = self.epochs
if batch_size is None: batch_size = self.batch_size
if Xval is None:
Xval = self.Xval
yval = self.yval
if verbose is None: verbose = self.verbose
S_batches = batch_gen([Xs, ys], batch_size=batch_size)
if yt is None: yt = np.ones(Xt.shape[0])
T_batches = batch_gen([Xt, yt], batch_size=batch_size)
self.history = {'source_loss': [], 'target_loss': [], 'val_loss': [], 'domain_loss': []}
for i in range(epochs):
Xsource, ysource = S_batches.next()
Xtarget, ytarget = T_batches.next()
feed_dict = {self.inp_a: Xsource, self.inp_b: Xtarget,
self.labels_a: ysource.reshape(-1, 1), K.learning_phase(): 1}
if self.supervised:
feed_dict[self.labels_b] = ytarget.reshape(-1, 1)
# train
_, _, confusion, xeloss, dloss, tloss = self.sess.run([
self.train_step,
self.increment_confusion,
self.confusion,
self.xe_loss,
self.domain_loss,
self.total_loss],
feed_dict=feed_dict)
if self.validate_every > 0 and i % self.validate_every == 0:
if i == 0:
print 'Epoch confusion dloss sloss tloss vloss'
self.history['source_loss'] += [self.evaluate(Xs, ys)]
self.history['target_loss'] += [self.evaluate(Xt, yt)]
self.history['val_loss'] += [self.evaluate(Xval, yval)]
self.history['domain_loss'] += [dloss]
print '{:04d} {:.2f} {:.4f} {:.4f} {:.5f} {:.5f} {:.5f} '.format(i, confusion, dloss, tloss,
self.history['source_loss'][-1], self.history['target_loss'][-1], self.history['val_loss'][-1])
if __name__ == '__main__':
from sklearn.datasets import make_blobs
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns; sns.set()
batch_size = 200
Xs, ys = make_blobs(300, centers=[[0, 0], [0, 1]], cluster_std=0.2)
Xt, yt = make_blobs(300, centers=[[1, -1], [1, 0]], cluster_std=0.2)
Xall = np.vstack([Xs, Xt])
yall = np.hstack( [ys, yt])
plt.scatter(Xall[:, 0], Xall[:, 1], c=yall)
plt.savefig('blobs.png')
plt.close()
print 'MMD:', compute_mmd_on_samples(Xs, Xt)
| [
"erlend.davidson@gmail.com"
] | erlend.davidson@gmail.com |
93981d5496bc781858932e1163011673c668b86b | 67769384e54979a058a939d067c813d6ba84e213 | /alphaml/datasets/cls_dataset/sector.py | 31e7198fd548de25beb9398b18c050c77c974921 | [
"BSD-3-Clause"
] | permissive | dingdian110/alpha-ml | f53e1c847c9d327691e77dcb3edab8ca51520d50 | d6a7a8a8a3452a7e3362bf0ef32b9ac5fe215fde | refs/heads/master | 2020-09-14T17:08:02.229245 | 2019-11-21T11:32:56 | 2019-11-21T11:32:56 | 223,195,139 | 1 | 0 | BSD-3-Clause | 2019-11-21T14:39:39 | 2019-11-21T14:39:38 | null | UTF-8 | Python | false | false | 700 | py | import pandas as pd
def load_sector():
L = []
file_path = 'data/xgb_dataset/sector/sector.txt'
with open(file_path, 'r') as f:
for line in f.readlines():
items = line.strip().split('\n')[0].split(' ')
d ={}
d['label'] = int(items[0]) - 1
del items[0]
for item in items:
key, value = item.split(':')
d[key] = float(value)
L.append(d)
df = pd.DataFrame(L)
y = df['label'].values
del df['label']
df.fillna(0,inplace=True)
X = df.values
return X, y
if __name__ == '__main__':
X, y = load_sector()
print(X)
print(set(y)) | [
"32727236+salty-fish-97@users.noreply.github.com"
] | 32727236+salty-fish-97@users.noreply.github.com |
015f28cff9057185f32b9aa80589b0f4ae92b00a | b1a7fce60e8935592d07323222212d132eedb407 | /Raspi/Confirm.py | a4d5142e76c993a17e454a2068f3e4dc046cbad7 | [] | no_license | Namlitruong/Capstone-ModularRobot | d0922030a8ee0af7a06667ea5f333b19e1bbb070 | e23b07b260a7bfef9a0ef07bb74816cf64cc6a56 | refs/heads/master | 2022-12-17T23:07:07.952625 | 2020-08-17T00:41:11 | 2020-08-17T00:41:11 | 273,672,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,766 | py | import CANbus
import can
import csv
#############################--INTERRUPT--######################################
import time
import os, signal
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(13, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def interrupt_handler(channel):
ID = os.getppid()
print(ID)
pid = os.popen("ps aux | grep 'python3 Confirm.py' | awk '{print $2}'").readlines()
print ("Length: ", len(pid))
for i in range (len(pid)):
print (pid[i])
os.system ('sudo kill -9 '+ pid[i])
print("####################################")
GPIO.add_event_detect(13, GPIO.RISING,
callback=interrupt_handler,
bouncetime=500)
###################################################################################
actuatorID = []
sensorID = []
def wriToFile (aID, sID):
f = open ('config.csv', 'w')
with f:
writer = csv.writer(f, delimiter = ";")
writer.writerow (aID)
writer.writerow (sID)
def classifier (msg):
subID = 0
mType = 0
if (msg.arbitration_id == 0x1A0):
print ("Module detected !!!")
subID = 0x1A0
mType = 'A'
elif (msg.arbitration_id == 0x1F0):
#print ("Sensor module detected !!!")
subID = 0x1F0
mType = 'S'
return subID, mType
def searchValidID (IDlist, tempModule):
for i in range (1, 16):
flag = False
tempModule.ID = tempModule.ID + 1
if (len(IDlist) == 0):
break
for j in range (len(IDlist)):
if (IDlist[j].ID == tempModule.ID):
flag = True
break
if (flag == False and j+1 == len(IDlist)):
break
IDlist.append (tempModule)
print ("Assign new ID: ", hex(tempModule.ID))
return tempModule.ID
def verifyID (IDlist):
activeList = []
for i in range (len(IDlist)):
while (True):
CANbus.send((IDlist[i].ID - 0x100), [0x00])
msg = CANbus.receiveNonBlocking(0.1)
if (IDlist[i].timeout == 5):
break
if (msg == None):
IDlist[i].timeout = IDlist[i].timeout + 1
else:
activeList.append (IDlist[i])
break
return activeList
def printAvailableID (msg, module):
IDlist =[]
print (msg)
for i in range (len(module)):
print (module[i].ID, " ", i)
IDlist.append (module[i].ID)
return IDlist
if __name__ == "__main__":
while (True):
while (True):
print ("Waiting for connecting modules")
msg = CANbus.receive()
tempID, mType = classifier (msg)
if (msg.arbitration_id == tempID):
break
tempModule = CANbus.module(msg.arbitration_id)
if (mType == 'A'):
tempID = searchValidID (actuatorID, tempModule)
CANbus.send (0x0A0, [(tempID - 0x1A0)])
elif (mType == 'S'):
tempID = searchValidID (sensorID, tempModule)
CANbus.send (0x0F0, [(tempID - 0x1F0)])
#CANbus.send (0x0A0, [(tempID - 0x1A0)])
print ("Sending Confirmation", tempID - 0x100)
while (True):
msg = CANbus.receive()
if (msg.arbitration_id == tempID):
break
print ("Confirmation Complete")
#Verify modules
print ("Verifying existing modules")
actuatorID = verifyID (actuatorID)
sensorID = verifyID (sensorID)
aID = printAvailableID ("Available Module: ", actuatorID)
#sID = printAvailableID ("Available Sensor: ", sensorID)
sID = printAvailableID (" ", sensorID)
wriToFile (aID, sID) | [
"pi@raspberrypi"
] | pi@raspberrypi |
22e1df1c15e86bdfdede5035f91968705c0d6f0b | b9a5336cb031d74b80395a5f6482661330516fb7 | /reviews/migrations/0001_initial.py | d9211574aeae7e61c1eeaf6a36084bc56655bb00 | [] | no_license | OMDraz/BookrApp | 124863c0eeb7d381ac6e7aa117b953d3f7448bd5 | 0b202ddc8bb9635f503f4725ae1a2e8fdecf5091 | refs/heads/master | 2023-08-07T03:01:26.932390 | 2021-09-23T05:18:25 | 2021-09-23T05:18:25 | 394,129,767 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 719 | py | # Generated by Django 3.2.3 on 2021-08-09 02:43
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Publisher',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='The name of the publisher', max_length=50)),
('website', models.URLField(help_text="The publisher's website")),
('email', models.EmailField(help_text="The publisher's email address.", max_length=254)),
],
),
]
| [
"omardraz94@gmail.com"
] | omardraz94@gmail.com |
1ae71121fe67533c75e20874fc8ff41f033c1d67 | a9243f735f6bb113b18aa939898a97725c358a6d | /0.16/_downloads/plot_artifacts_detection.py | 86f915a1f8213e207c582dae54ccbc31f59c58bd | [] | permissive | massich/mne-tools.github.io | 9eaf5edccb4c35831400b03278bb8c2321774ef2 | 95650593ba0eca4ff8257ebcbdf05731038d8d4e | refs/heads/master | 2020-04-07T08:55:46.850530 | 2019-09-24T12:26:02 | 2019-09-24T12:26:02 | 158,233,630 | 0 | 0 | BSD-3-Clause | 2018-11-19T14:06:16 | 2018-11-19T14:06:16 | null | UTF-8 | Python | false | false | 5,773 | py | """
Introduction to artifacts and artifact detection
================================================
Since MNE supports the data of many different acquisition systems, the
particular artifacts in your data might behave very differently from the
artifacts you can observe in our tutorials and examples.
Therefore you should be aware of the different approaches and of
the variability of artifact rejection (automatic/manual) procedures described
onwards. At the end consider always to visually inspect your data
after artifact rejection or correction.
Background: what is an artifact?
--------------------------------
Artifacts are signal interference that can be
endogenous (biological) and exogenous (environmental).
Typical biological artifacts are head movements, eye blinks
or eye movements, heart beats. The most common environmental
artifact is due to the power line, the so-called *line noise*.
How to handle artifacts?
------------------------
MNE deals with artifacts by first identifying them, and subsequently removing
them. Detection of artifacts can be done visually, or using automatic routines
(or a combination of both). After you know what the artifacts are, you need
remove them. This can be done by:
- *ignoring* the piece of corrupted data
- *fixing* the corrupted data
For the artifact detection the functions MNE provides depend on whether
your data is continuous (Raw) or epoch-based (Epochs) and depending on
whether your data is stored on disk or already in memory.
Detecting the artifacts without reading the complete data into memory allows
you to work with datasets that are too large to fit in memory all at once.
Detecting the artifacts in continuous data allows you to apply filters
(e.g. a band-pass filter to zoom in on the muscle artifacts on the temporal
channels) without having to worry about edge effects due to the filter
(i.e. filter ringing). Having the data in memory after segmenting/epoching is
however a very efficient way of browsing through the data which helps
in visualizing. So to conclude, there is not a single most optimal manner
to detect the artifacts: it just depends on the data properties and your
own preferences.
In this tutorial we show how to detect artifacts visually and automatically.
For how to correct artifacts by rejection see
:ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_rejection.py`.
To discover how to correct certain artifacts by filtering see
:ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_filtering.py`
and to learn how to correct artifacts
with subspace methods like SSP and ICA see
:ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_ssp.py`
and :ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_ica.py`.
Artifacts Detection
-------------------
This tutorial discusses a couple of major artifacts that most analyses
have to deal with and demonstrates how to detect them.
"""
import numpy as np
import mne
from mne.datasets import sample
from mne.preprocessing import create_ecg_epochs, create_eog_epochs
# getting some data ready
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = mne.io.read_raw_fif(raw_fname, preload=True)
###############################################################################
# Low frequency drifts and line noise
(raw.copy().pick_types(meg='mag')
.del_proj(0)
.plot(duration=60, n_channels=100, remove_dc=False))
###############################################################################
# we see high amplitude undulations in low frequencies, spanning across tens of
# seconds
raw.plot_psd(tmax=np.inf, fmax=250)
###############################################################################
# On MEG sensors we see narrow frequency peaks at 60, 120, 180, 240 Hz,
# related to line noise.
# But also some high amplitude signals between 25 and 32 Hz, hinting at other
# biological artifacts such as ECG. These can be most easily detected in the
# time domain using MNE helper functions
#
# See :ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_filtering.py`.
###############################################################################
# ECG
# ---
#
# finds ECG events, creates epochs, averages and plots
average_ecg = create_ecg_epochs(raw).average()
print('We found %i ECG events' % average_ecg.nave)
joint_kwargs = dict(ts_args=dict(time_unit='s'),
topomap_args=dict(time_unit='s'))
average_ecg.plot_joint(**joint_kwargs)
###############################################################################
# we can see typical time courses and non dipolar topographies
# not the order of magnitude of the average artifact related signal and
# compare this to what you observe for brain signals
###############################################################################
# EOG
# ---
average_eog = create_eog_epochs(raw).average()
print('We found %i EOG events' % average_eog.nave)
average_eog.plot_joint(**joint_kwargs)
###############################################################################
# Knowing these artifact patterns is of paramount importance when
# judging about the quality of artifact removal techniques such as SSP or ICA.
# As a rule of thumb you need artifact amplitudes orders of magnitude higher
# than your signal of interest and you need a few of such events in order
# to find decompositions that allow you to estimate and remove patterns related
# to artifacts.
#
# Consider the following tutorials for correcting this class of artifacts:
# - :ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_filtering.py`
# - :ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_ica.py`
# - :ref:`sphx_glr_auto_tutorials_plot_artifacts_correction_ssp.py`
| [
"larson.eric.d@gmail.com"
] | larson.eric.d@gmail.com |
41240f0a6dd4ad176be9cc6e8b9eb2e11cf60e08 | e7b0547134291e4707f9760d4c4ce7bf678b2e7a | /block.py | 6017f3d3de2325fadaecaf921e1a4ba28c148138 | [] | no_license | ThisLiftIsGoingDown/Garden-Railway-Control | 154c51c321360c5289077ed10ff3a80b55b2210e | f66ff0f7750400151e688cab23d1316a4c2909f0 | refs/heads/main | 2023-07-15T07:41:59.640067 | 2021-08-24T07:52:42 | 2021-08-24T07:52:42 | 373,764,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 351 | py | from enum import Enum
class State(Enum):
vacant = 0
occupied = 1
outOfService = 2
class Block:
def __init__(self, node , startState = State.vacant):
self.node = node
self.state = startState
def checkState(self):
return self.state
def updateState(self, newState):
self.state = newState
| [
"david.bartsch@bluewin.ch"
] | david.bartsch@bluewin.ch |
64cbbf12cccecdd79098ee784933598a826d5869 | b9f7c7a87292c1a9c231ce89933ae9d4bc51f487 | /src/sst/elements/simpleElementExample/tests/basicStatistics0.py | 3ea5c138cfc1b3558768044804877e0a4e49d5e9 | [
"BSD-3-Clause"
] | permissive | sstsimulator/sst-elements | 3a8db475a7a6cbd4c2a5d737c32718752da9797a | 68cdb3ac843750705805653b3fdcd4b015e84089 | refs/heads/master | 2023-08-17T03:30:24.145168 | 2023-08-16T13:58:07 | 2023-08-16T13:58:07 | 43,475,440 | 85 | 145 | NOASSERTION | 2023-09-12T13:59:11 | 2015-10-01T02:57:18 | C++ | UTF-8 | Python | false | false | 3,054 | py | # Import the SST module
import sst
# The basicStatisticsX.py scripts demonstrate user-side configuration of statistics.
# Each one focuses on a different aspect of user-side configuration
#
# This example demonstrates:
# 1. Default output behavior (reporting statistics at the end of simulation)
# 2. Various output formats for statistics
#
# This component has no links and SST will produce a warning because that is an unusual configuration
# that often points to a mis-configuration. For this simulation, the warning can be ignored.
#
# Relevant code:
# simpleElementExample/basicStatistics.h
# simpleElementExample/basicStatistics.cc
# simpleElementExample/basicEvent.h
#
# Output:
# simpleElementExample/tests/refFiles/basicStatistics0.out
# simpleElementExample/tests/refFiles/basicStatistics0.csv
#
### Create two components (to compare different components' output in the CSV file)
component0 = sst.Component("StatisticComponent0", "simpleElementExample.basicStatistics")
component1 = sst.Component("StatisticComponent1", "simpleElementExample.basicStatistics")
### Parameterize the components.
# Run 'sst-info simpleElementExample.basicStatistics' at the command line
# to see parameter documentation
params0 = {
"marsagliaZ" : 438, # Seed for Marsaglia RNG
"marsagliaW" : 9375794, # Seed for Marsaglia RNG
"mersenne" : 102485, # Seed for Mersenne RNG
"run_cycles" : 1000, # Number of cycles to run for
"subids" : 3 # Number of SUBID_statistic instances
}
component0.addParams(params0)
params1 = {
"marsagliaZ" : 957537, # Seed for Marsaglia RNG
"marsagliaW" : 5857, # Seed for Marsaglia RNG
"mersenne" : 860, # Seed for Mersenne RNG
"run_cycles" : 1200, # Number of cycles to run for
"subids" : 6 # Number of SUBID_statistic instances
}
component1.addParams(params1)
### Enable statistics
## Limit the verbosity of statistics to any with a load level from 0-4
# This component's statistics range from 1-4 (see sst-info)
sst.setStatisticLoadLevel(4)
## Determine where statistics should be sent. By default this script uses CSV, other options are
# commented out below. Output locations are case-insensitive (e.g., statOutputCSV = statoutputcsv).
# Default: Output to CSV. Filename and separator can be specified
sst.setStatisticOutput("sst.statOutputCSV", { "filepath" : "./basicStatistics0.csv", "separator" : "," } )
# Option: Output to the terminal
#sst.setStatisticOutput("sst.statoutputconsole")
# Option: Output to a text file
#sst.setStatisticOutput("sst.statOutputTXT", { "filepath" : "./basicStatistics0.txt" } )
# Option: Output to HDF5. Requires sst-core to be configured with HDF5 library.
#sst.setStatisticOutput("sst.statoutputhd5f")
# Option: Output to JSON
#sst.setStatisticOutput("sst.statOutputJSON", { "filepath" : "./basicStatistics0.json" } )
## Enable statistics on the components
sst.enableAllStatisticsForComponentType("simpleElementExample.basicStatistics")
| [
"grvosku@sandia.gov"
] | grvosku@sandia.gov |
2fd1b907e6eff215b937433a3f361834b3dd96ec | a355b16b9b4cebdd39beb69a6c5aa4e175ae52f6 | /phytosanitary/urls/links.py | 8d16c92f08f546895ad6e4779cd0a8695434b8ee | [] | no_license | hypertexthero/Phytosanitary | e2ba31116b432a8623b332e53a390ff31c24fc10 | 4f001436c90de7a64649e82089e577af6981b793 | refs/heads/master | 2016-09-05T09:47:01.448846 | 2012-11-28T16:34:03 | 2012-11-28T16:34:03 | 3,460,559 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 807 | py | from django.conf.urls.defaults import *
from phytosanitary.models import Link
link_info_dict = {
'queryset': Link.objects.all(),
'date_field': 'pub_date',
}
urlpatterns = patterns('django.views.generic.date_based',
(r'^$', 'archive_index', link_info_dict, 'phytosanitary_link_archive_index'),
(r'^(?P<year>\d{4})/$', 'archive_year', link_info_dict, 'phytosanitary_link_archive_year'),
(r'^(?P<year>\d{4})/(?P<month>\w{3})/$', 'archive_month', link_info_dict, 'phytosanitary_link_archive_month'),
(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{2})/$', 'archive_day', link_info_dict, 'phytosanitary_link_archive_day'),
(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$', 'object_detail', link_info_dict, 'phytosanitary_link_detail'),
) | [
"simon@hypertexthero.com"
] | simon@hypertexthero.com |
0e694119ae71816af6ee234acde50cfb3b07d971 | 259637f225cf17885f972edfc8b8221dfce8f81f | /week-03/day-01/factorio.py | a35246f21bf9bd600e6f75950da6d1a2798938be | [] | no_license | green-fox-academy/IBS_guthixx23 | c9570d0c3a3db4ebe4f6affa4f8980f66178d5fe | f3629e88cf4d9a74adfca2c94b8c928ec669beb3 | refs/heads/main | 2023-01-31T01:12:49.060303 | 2020-12-08T17:58:04 | 2020-12-08T17:58:04 | 304,072,806 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | def factorio(num):
ans = 1
for i in range(1, num+1):
ans *= i
return ans
print(factorio(4)) | [
"guthixx23@gmail.com"
] | guthixx23@gmail.com |
72a48d03a7274c7f1ba3ddca5b8865827f62836e | 144df6ebbae1caf145c868579b335f579bf81357 | /test.py | 3986f037d12b93923af6b2accd4074cccbba40a2 | [] | no_license | xsnk/GreyHatPythonRead | f78645c00bb60e812bdefb5091971a2f94ccfa61 | 83cd36be8d5145be3b8f327f97619cb25110b774 | refs/heads/master | 2020-03-17T17:53:57.062965 | 2018-05-17T12:01:19 | 2018-05-17T12:01:19 | 133,806,664 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | import zdebug
debugger = zdebug.Debugger()
debugger.load("c:/Windows/System32/calc.exe")
| [
"noreply@github.com"
] | xsnk.noreply@github.com |
48be6df478a910b9d3c13cffb98277f3082c0fe2 | d41aa512f8ad7a28121121cf96f2286abc5391c3 | /scrape_argos/settings.py | e1d3d896b509fcae2e0caab6b9ec23a69f38a0a7 | [
"MIT"
] | permissive | andyregan/scrape_argos | 8b1757819b013bbdb0d0c67ee6b205455aff5ea7 | a3cb44f29173cb4b64e8d73204aecfb40b9edfd9 | refs/heads/master | 2021-01-01T06:50:54.760280 | 2013-05-11T10:08:43 | 2013-05-11T10:08:43 | 9,894,606 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | # Scrapy settings for scrape_argos project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/topics/settings.html
#
BOT_NAME = 'scrape_argos'
SPIDER_MODULES = ['scrape_argos.spiders']
NEWSPIDER_MODULE = 'scrape_argos.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'scrape_argos (+http://www.yourdomain.com)'
| [
"andrewjregan@gmail.com"
] | andrewjregan@gmail.com |
acdeccf893f67cfeddb6d3a93bcc2068e4d109c4 | 25d02ff4a1c5375321943b6830d9f9386010a76b | /relayer.py | 9eea4b27caadf47cfaa57449da44b676d06db116 | [
"MIT"
] | permissive | minddrive/image-relayer | a50219ef147238281799e6dc7e0bb62a35eaa09f | ff1d0cc39e93e44eae92b8fc1a1c73139b56d202 | refs/heads/main | 2023-04-29T13:48:25.969809 | 2021-05-21T05:42:51 | 2021-05-21T05:42:51 | 369,377,213 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,420 | py | #!/usr/bin/env python
import json
import logging
import re
import cloudscraper
import discord
import yaml
logger = logging.getLogger("relayer")
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename="relayer.log", encoding="utf-8", mode="w")
handler.setFormatter(logging.Formatter("%(asctime)s:%(levelname)s:%(name)s: %(message)s"))
logger.addHandler(handler)
site_regexes = {
"fur_affinity": r"(furaffinity\.net/view/(\d+))",
"weasyl": r"(weasyl\.com/~\w+/submissions/(\d+))"
}
def read_config():
with open('relayer.yml') as cfg:
return yaml.full_load(cfg)
class SiteBase:
def __init__(self):
self.url = None
self.headers = None
self.author = True
self.author_name = None
self.author_icon = None
self.image = None
self.image_url = None
class FurAffinity(SiteBase):
def __init__(self, site_id):
super().__init__()
self.url = f"https://bawk.space/fapi/submission/{site_id}"
def gather_info(self, data):
self.author_name = data["author"]
self.author_icon = data["avatar"]
self.image = data["title"]
self.image_url = data["image_url"]
class Weasyl(SiteBase):
def __init__(self, site_id):
super().__init__()
self.url = f"https://www.weasyl.com/api/submissions/{site_id}/view"
self.headers = {'X-Weasyl-API-Key': relayer_config["weasyl_api_key"]}
def gather_info(self, data):
self.author_name = data["owner"]
self.author_icon = data["owner_media"]["avatar"][0]["url"]
self.image = data["title"]
self.image_url = data["media"]["submission"][0]["links"]["cover"][0]["url"]
class RelayerClient(discord.Client):
def __init__(self, **options):
super().__init__(**options)
self.scraper = cloudscraper.create_scraper()
@staticmethod
def log_details(message, content):
logger.info(
f"{message.author.name}#{message.author.discriminator}@"
f"{message.guild.name}:{message.channel.name}: {content}"
)
async def on_ready(self):
logger.info(f"Logged in as {self.user}")
async def on_message(self, message):
# Bot should not reply to itself
if message.author == client.user:
return
for name, regex in site_regexes.items():
site_class = ''.join(w.capitalize() for w in name.split('_'))
comp_regex = re.compile(regex)
links = comp_regex.findall(message.content)
logger.debug(links)
for link, site_id in links:
site = globals()[site_class](site_id)
# If no response, just skip
if not (resp := self.scraper.get(site.url, headers=site.headers)):
continue
data = json.loads(resp.text)
site.gather_info(data)
self.log_details(message, site.image_url)
embed = discord.Embed(title=site.image)
embed.set_image(url=site.image_url)
if site.author:
embed.set_author(
name=site.author_name,
icon_url=site.author_icon
)
await message.channel.send(embed=embed)
relayer_config = read_config()
client = RelayerClient()
client.run(relayer_config["discord_token"])
| [
"elessar@numenor.org"
] | elessar@numenor.org |
3160ede5e603262448964d8dc9e3a89b58592466 | 60d5ea4f007d49768d250ef394003f554003e4d0 | /python/Depth-first Search/111.Minimum Depth of Binary Tree.py | 28976c05b41b56e4880a2b5192eea9b5868c08e4 | [] | no_license | EvanJamesMG/Leetcode | dd7771beb119ea1250dbb3b147a09053298cd63b | fa638c7fda3802e9f4e0751a2c4c084edf09a441 | refs/heads/master | 2021-01-10T17:11:10.896393 | 2017-12-01T16:04:44 | 2017-12-01T16:04:44 | 46,968,756 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,470 | py | # coding=utf-8
# Definition for singly-linked list.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
'''
่งฃ้ขๆ่ทฏ๏ผ
้ๅฝ
ๅๅ ็งๆ
ๅต่่๏ผ
1๏ผๆ ไธบ็ฉบ๏ผๅไธบ0ใ
2๏ผๆ น่็นๅฆๆๅชๅญๅจๅทฆๅญๆ ๆ่
ๅชๅญๅจๅณๅญๆ ๏ผๅ่ฟๅๅผๅบไธบๅทฆๅญๆ ๆ่
ๅณๅญๆ ็๏ผๆๅฐๆทฑๅบฆ+1๏ผใ
3๏ผๅฆๆๆ น่็น็ๅทฆๅญๆ ๅๅณๅญๆ ้ฝๅญๅจ๏ผๅ่ฟๅๅผไธบ๏ผๅทฆๅณๅญๆ ็ๆๅฐๆทฑๅบฆ็่พๅฐๅผ+1๏ผใ
'''
class Solution(object):
def minDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root == None:
return 0
if root.left == None and root.right != None:
return self.minDepth( root.right ) + 1
if root.left != None and root.right == None:
return self.minDepth( root.left ) + 1
return min( self.minDepth( root.left ), self.minDepth( root.right ) ) + 1
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.deserialize(codec.serialize(root))
#
if __name__ == "__main__":
mnode = ListNode(3)
mnode.next = ListNode(5)
mnode.next.next = ListNode(6)
mnode.next.next.next = ListNode(7)
mnode.next.next.next.next = ListNode(8)
result = Solution().rotateRight(mnode, 6)
print(result.val)
| [
"Evan123mg@gmail.com"
] | Evan123mg@gmail.com |
9ef94e2e4d69efad94f09beea5a420f9acda3202 | c1654d09c1eccf17d105d31c62bbf4106feb89d8 | /resolution-mylar.py | 4d6222a94a7d894fdaa9fbff4e10052cca671b70 | [] | no_license | piti118/crystal-length-study-for-mu2e | 142be2f059299c9902706b50d375fda01e651ead | a0287d2676fef33c15298caf432b0d5b38443bd1 | refs/heads/master | 2016-09-11T09:12:07.118526 | 2012-05-14T05:26:27 | 2012-05-14T05:26:27 | 3,666,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,825 | py | # -*- coding: utf-8 -*-
# <nbformat>3</nbformat>
# <codecell>
from root_numpy import *
from dist_fit import *
from cithep import *
from h5py import *
sample='mylar'
# <codecell>
class Hitmap:
def __init__(self,numrow=21,numcol=21):
self.hmap = np.zeros([numrow,numcol])
self.numrow = numrow
self.numcol = numcol
def acc(self,l,k,E):
i,j = self.lk2ij(l,k)
self.hmap[i,j]+=E
def lk2ij(self, l,k):
return l+self.numcol/2,k+self.numrow/2
def sumE(self,cutoff=None):
if cutoff is not None:
return np.sum(np.sum(self.hmap[self.hmap>cutoff]))
else:
return np.sum(np.sum(self.hmap))
# <codecell>
hitmap = root2array('%s.root'%sample,'hitmap')
einfo = root2array('%s.root'%sample,'eventinfo')
# <codecell>
# <codecell>
laster = tuple()
thishit = None
result = np.array([],dtype=[('angle',np.double),('E',np.double)])
for hit in hitmap:
runno = hit['runno']
eventno = hit['eventno']
if (runno,eventno) != laster and laster != tuple():
result.resize(len(result)+1)
result[-1]['angle'] = laster[0]*5.
result[-1]['E'] = thishit.sumE()
thishit=None
laster = (runno,eventno)
if thishit is None:
thishit = Hitmap()
thishit.acc(hit['l'],hit['k'],hit['E'])
if thishit is not None:
result.resize(len(result)+1)
result[-1]['angle'] = laster[0]*5.
result[-1]['E'] = thishit.sumE()
thishit=None #take care of the last one
# <codecell>
f = File('%s.hdf5'%sample,'w')
f.create_dataset('result',data=result)
f.close()
# <codecell>
f = File('%s.hdf5'%sample,'r')
tmp = f['result']
result = np.array(tmp)
f.close()
# <codecell>
def my_gau(x,g_mu,g_sigma):
return gaussian(x,g_mu,g_sigma)
# <codecell>
def smear(E):
w = sqrt(1000.*E)#1000 photon per mev
ret = randn(len(E))
ret*=w/1000.
ret+=E
return ret
def doOneFit(E,range=(95.,110.),mean=104.,sigma=1.,n=20.,alpha=0.5,N=80000,
limit_N=(1000,100000),limit_n=(0.1,100.), limit_mean=(90,106), limit_sigma=(0.3,5.),limit_alpha=(0.,5.)):
#eg = Add2Pdf(my_gau,Normalize(crystalball,range))
#describe(eg)
#eg = Normalize(crystalball,range)
eg = Convolve(Normalize(crystalball,range),my_gau,(-2,2),nbins=40)
#eeg = eg
eeg = Extend(eg)
print describe(eeg)
#fit, m = fit_uml(eg,sm,mean=104.5,sigma=1.,n=20.,alpha=0.5, limit_n=(0.1,50.), limit_mean=(90,106), limit_sigma=(0.3,5.),limit_alpha=(0.,2.))
#try_uml(eg,sm,mean=104.,sigma=1.,n=50.,alpha=0.5)
fit,m = None,None
good = False
itry = 0
first = True
while not good and itry<5:
try:
if not first:
mean = 104.5+randn(1)*2.
alpha=0.5+randn(1)*0.2
first =False
fit,m = fit_binpoisson(eeg,E,maxcalls=2000000,bins=100,
mean=mean,sigma=sigma,n=n,alpha=alpha,N=N,g_mu=0.,g_sigma=0.3,
limit_N=limit_N,limit_n=limit_n, limit_mean=limit_mean, limit_sigma=limit_sigma,limit_alpha=limit_alpha,
limit_g_mu=(-1,1),limit_g_sigma=(0.001,0.5),
quiet=False,throw=False)
good = True
except Exception as e:
print e
#raise e
itry+=1
fit.draw(m)
l,h = fwhm_f(eeg,range,m.args)
print m.values
vertical_highlight(l,h)
return fit,m,h,l,eeg
# <codecell>
angles = np.linspace(0,90,19)[:-1]
myresult = {}
# <codecell>
arg = {
0 :{'range':(96.,105.5)},
1 :{'range':(96.,105.5)},
2 :{'range':(96.,105.5)},
3 :{'range':(96.,105.5)},
4 :{'range':(96.,105.5)},
5 :{'range':(96.,105.5)},
6 :{'range':(96.,105.5)},
7 :{'range':(96.,105.5)},
8 :{'range':(96.,105.5)},
9 :{'range':(96.,105.5)},
10:{'range':(96.,105.5)},
11:{'range':(96.,105.5)},
12:{'range':(90.,105.5)},
13:{'range':(90.,105.5)},
14:{'range':(90.,105.5)},
15:{'range':(90.,105.5)},
16:{'range':(80.,105.5)},
17:{'range':(80.,105.5)},
}
for i,angle in enumerate(angles):
if i < 14: continue
myE = result['E'][(result['angle']>(angle-0.1)) & (result['angle']<(angle+0.1))]
figure()
myE = smear(myE)
emin,emax = 101.,105.5
if i in arg:
emin,emax = arg[i]['range']
myE = myE[(myE>emin) & (myE<emax)]
myresult[i] = doOneFit(myE,range=(emin,emax),N=len(myE))
title(str(angle)+' '+str(i))
# <codecell>
#make and save the plot
def make_nice_plot(r):
fig,axs = subplots(3,3,figsize=(20,12))
for i in r:
ii = i%9
row = ii/3
col = ii%3
fit = myresult[i][0]
m = myresult[i][1]
fh,fl = myresult[i][2],myresult[i][3]
fwhm_res = (fh-fl)/2.35
ax=axs[row,col]
sca(ax)
fit.draw(m)
vertical_highlight(fl,fh)
title('%s %d deg'%(sample,5*i))
text(0.5,0.2,r'fwhm/2.35=%3.2f'%(fwhm_res),transform = ax.transAxes)
make_nice_plot(range(9))
savefig('%s_1.pdf'%sample,bbox_inches='tight')
make_nice_plot(range(9,18))
savefig('%s_2.pdf'%sample,bbox_inches='tight')
# <codecell>
fwhm = np.zeros(18)
for i in range(18): fwhm[i]=(myresult[i][2]-myresult[i][3])/2.35
np.save('fwhm_%s.npy'%sample,fwhm)
x = np.array(range(18))*5.
plot(x,fwhm,'xb')
# <codecell>
hist(result['E'],bins=100,range=(100,110),histtype='step');,
# <codecell>
a = numpy.array([],dtype=[('a',np.double)])
a
a.resize(len(a)+1)
a.resize(len(a)+1)
a
# <codecell>
gdf = df.groupby(['runno','eventno'])
# <codecell>
for k,v in gdf:
h = Hitmap(10,10)
for i in xrange(len(v)):
h.acc(v.l[i],v.k[i],v.E[i])
print h.hmap
print h.sumE()
break
# <codecell>
h = Hitmap(10,10)
# <codecell>
for x in hmap:
# <codecell>
| [
"piti118@gmail.com"
] | piti118@gmail.com |
994488c0995c4cb3859a16fbd3481c780bdb7c61 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/virtual-wan/azext_vwan/vendored_sdks/v2018_08_01/v2018_08_01/aio/operations/_load_balancer_load_balancing_rules_operations.py | 80034f67d188e49b5f19806c7376dfe4dd5c6385 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 8,796 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerLoadBalancingRulesOperations:
"""LoadBalancerLoadBalancingRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerLoadBalancingRuleListResult"]:
"""Gets all the load balancing rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerLoadBalancingRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_08_01.models.LoadBalancerLoadBalancingRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerLoadBalancingRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerLoadBalancingRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/loadBalancingRules'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
load_balancing_rule_name: str,
**kwargs
) -> "_models.LoadBalancingRule":
"""Gets the specified load balancer load balancing rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param load_balancing_rule_name: The name of the load balancing rule.
:type load_balancing_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LoadBalancingRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_08_01.models.LoadBalancingRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancingRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'loadBalancingRuleName': self._serialize.url("load_balancing_rule_name", load_balancing_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LoadBalancingRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/loadBalancingRules/{loadBalancingRuleName}'} # type: ignore
| [
"noreply@github.com"
] | Azure.noreply@github.com |
e913df03c7803afd5541a37caa8edf1ec5ee78b8 | 621b856c4f181128e7f7039a6f5508f3ffddc983 | /Peoggramms/salman_h_changed_loop.py | 0ecf8caeee7434c0260eddd5b3e3497d710b096b | [] | no_license | olesyaogorodnikova/Robot_kinematics | 31853f64266f15cce05b6cfc70427819975e7442 | cbc1284540fd7bf1c01f2ef36f319d1ac898590c | refs/heads/master | 2021-01-07T14:22:15.171320 | 2020-02-19T21:50:15 | 2020-02-19T21:50:15 | 241,723,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,959 | py | #!/usr/bin/env python
import rospy
import tf
from kuka_arm.srv import *
from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
from geometry_msgs.msg import Pose
from numpy import array, matrix, cos, sin, pi, arccos, arctan2, sqrt
from numpy.linalg import inv
import time
def get_Table():
# Define variables for joint angles
theta1, theta2, theta3, theta4, theta5, theta6 = 0., 0., 0., 0., 0., 0.
# Construct DH Table with measurements from 'kr210.urdf.xacro' file
s = {'alpha0': 0, 'a0': 0, 'd1': 0.75, 'theta1': theta1,
'alpha1': -pi/2, 'a1': 0.35, 'd2': 0, 'theta2': theta2,
'alpha2': 0, 'a2': 1.25, 'd3': 0, 'theta3': theta3,
'alpha3': -pi/2, 'a3': -0.054, 'd4': 1.50, 'theta4': theta4,
'alpha4': pi/2, 'a4': 0, 'd5': 0, 'theta5': theta5,
'alpha5': -pi/2, 'a5': 0, 'd6': 0, 'theta6': theta6,
'alpha6': 0, 'a6': 0, 'd7': 0.303, 'theta7': 0}
return s
def H_Transformation(alpha, a, d, theta):
A = matrix([[ cos(theta), -sin(theta), 0, a],
[ sin(theta)*cos(alpha), cos(theta)*cos(alpha), -sin(alpha), -sin(alpha)*d],
[ sin(theta)*sin(alpha), cos(theta)*sin(alpha), cos(alpha), cos(alpha)*d],
[ 0, 0, 0, 1]])
return A
def Rotation_Rx(theta):
"""Define matrix for rotation (roll) about x axis."""
Rx = matrix([[1, 0, 0],
[0, cos(theta), -sin(theta)],
[0, sin(theta), cos(theta)]])
return Rx
def Rotation_Ry(theta):
"""Define matrix for rotation (pitch) about y axis."""
Ry = matrix([[cos(theta), 0, sin(theta)],
[ 0, 1, 0],
[-sin(theta), 0, cos(theta)]])
return Ry
def Rotation_Rz(theta):
"""Define matrix for rotation (yaw) about z axis."""
Rz = matrix([[cos(theta), -sin(theta), 0],
[sin(theta), cos(theta), 0],
[ 0, 0, 1]])
return Rz
def get_Gripper_pose(geometry_msg):
"""
Extract EE pose from received trajectory pose in an IK request message.
NOTE: Pose is position (cartesian coords) and orientation (euler angles)
Docs: https://github.com/ros/geometry/blob/indigo-devel/
tf/src/tf/transformations.py#L1089
"""
px = geometry_msg.position.x
py = geometry_msg.position.y
pz = geometry_msg.position.z
(roll, pitch, yaw) = tf.transformations.euler_from_quaternion(
[geometry_msg.orientation.x, geometry_msg.orientation.y,
geometry_msg.orientation.z, geometry_msg.orientation.w])
gripper_pose = (px, py, pz)
orient = (roll, pitch, yaw)
return gripper_pose, orient
def get_R0_EE(gripper_pose):
"""
Compute EE Rotation matrix w.r.t base frame.
Computed from EE orientation (roll, pitch, yaw) and describes the
orientation of each axis of EE w.r.t the base frame
"""
roll, pitch, yaw = gripper_pose[1]
# Perform extrinsic (fixed-axis) sequence of rotations of EE about
# x, y, and z axes by roll, pitch, and yaw radians respectively
Rzyx = Rotation_Rz(yaw) * Rotation_Ry(pitch) * Rotation_Rx(roll)
# Align EE frames in URDF vs DH params through a sequence of
# intrinsic (body-fixed) rotations: 180 deg yaw and -90 deg pitch
R_corr = Rotation_Rz(pi) * Rotation_Ry(-pi/2)
# Account for this frame alignment error in EE pose
R0_EE = Rzyx * R_corr
return R0_EE
def get_Wc(s, R0_EE, gripper_pose):
"""
Compute Wrist Center position (cartesian coords) w.r.t base frame.
Keyword arguments:
R_ee -- EE Rotation matrix w.r.t base frame
ee_pose -- tuple of cartesian coords and euler angles describing EE
Return values:
Wc -- vector of cartesian coords of WC
"""
px, py, pz = gripper_pose[0]
# Define EE position as a vector
Pxyz = matrix([[px],
[py],
[pz]])
# Get Col3 vector from Rxyz that describes z-axis orientation of EE
# nx = R0_EE[0, 2]
# ny = R0_EE[1, 2]
# nz = R0_EE[2, 2]
# N_ee = matrix ([[nx],
# [ny],
# [nz]])
N_EE = R0_EE[:, 2]
# WC is a displacement from EE equal to a translation along
# the EE z-axis of magnitude dG w.r.t base frame (Refer to DH Table)
# wcx = px - s['d7']*nx
# wcy = py - s['d7']*ny
# wcz = pz - s['d7']*nz
Wc = Pxyz - s['d7']*N_EE
return Wc
def get_joints1_2_3(s, Wc):
"""
Calculate joint angles 1,2,3 using geometric IK method.
NOTE: Joints 1,2,3 control position of WC (joint 5)
"""
wcx, wcy, wcz = Wc[0], Wc[1], Wc[2]
# theta1 is calculated by viewing joint 1 and arm from top-down
theta1 = arctan2(wcy, wcx)
# theta2,3 are calculated using Cosine Law on a triangle with edges
# at joints 1,2 and WC viewed from side and
# forming angles A, B and C repectively
m = sqrt(wcx**2 + wcy**2)-s['a1']
w = arctan2(wcz - s['d1'], m)
b = sqrt((wcz - s['d1'])**2 + m**2)
c = sqrt(s['d4']**2 + s['a3']**2)
A = arccos((b**2 + s['a2']**2 - c**2) / (2*b*s['a2']))
B = arccos((c**2 + s['a2']**2 - b**2) / (2*c*s['a2']))
theta2 = pi/2 - A - w
theta3 = pi/2 - B - arctan2(s['a3'], s['d4'])
return theta1, theta2, theta3
def get_joints4_5_6(s, R0_EE, theta1, theta2, theta3):
"""
Calculate joint Euler angles 4,5,6 using analytical IK method.
NOTE: Joints 4,5,6 constitute the wrist and control WC orientation
"""
# Compute individual transforms between adjacent links
# T(i-1)_i = Rx(alpha(i-1)) * Dx(alpha(i-1)) * Rz(theta(i)) * Dz(d(i))
T0_1 = H_Transformation(s['alpha0'], s['a0'], s['d1'], s['theta1'])
T1_2 = H_Transformation(s['alpha1'], s['a1'], s['d2'], s['theta2'])
T2_3 = H_Transformation(s['alpha2'], s['a2'], s['d3'], s['theta3'])
T0_3 = T0_1 * T1_2 * T2_3
# Extract rotation components of joints 1,2,3 from their
# respective individual link Transforms
R0_1 = T0_1[:3, :3]
R1_2 = T1_2[:3, :3]
R2_3 = T2_3[:3, :3]
# Evaluate the composite rotation matrix fromed by composing
# these individual rotation matrices
R0_3 = R0_1 * R1_2 * R2_3
# R3_6 is the composite rotation matrix formed from an extrinsic
# x-y-z (roll-pitch-yaw) rotation sequence that orients WC
#R3_6 = T0_3[:3, :3].transpose() * R0_EE # b/c R0_6 == R_ee = R0_3*R3_6
R3_6 = inv(array(R0_3, dtype='float')) * R0_EE
r21 = R3_6[1, 0] # sin(theta5)*cos(theta6)
r22 = R3_6[1, 1] # -sin(theta5)*sin(theta6)
r13 = R3_6[0, 2] # -sin(theta5)*cos(theta4)
r23 = R3_6[1, 2] # cos(theta5)
r33 = R3_6[2, 2] # sin(theta4)*sin(theta5)
# Compute Euler angles theta 4,5,6 from R3_6 by individually
# isolating and explicitly solving each angle
angles_pre = (0,0,0,0,0,0)
if np.abs(r23) is not 1:
theta5 = arctan2(sqrt(r13**2 + r33**2), r23)
if sin(theta5) < 0:
theta4 = arctan2(-r33, r13)
theta6 = arctan2(r22, -r21)
else:
theta4 = arctan2(r33, -r13)
theta6 = arctan2(-r22, r21)
else:
theta6 = angles_pre[5]
if r23 == 1:
theta5 = 0
theta4 = -theta6 + arctan2(-r12, -r32)
else:
theta5 = 0
theta4 = q6 - arctan2(r12, -r32)
return theta4, theta5, theta6
def handle_calculate_IK(req):
"""Handle request from a CalculateIK type service."""
rospy.loginfo("Received %s eef-poses from the plan" % len(req.poses))
if len(req.poses) < 1:
print "No valid poses received"
return -1
else:
s = get_Table()
# Initialize service response consisting of a list of
# joint trajectory positions (joint angles) corresponding
# to a given gripper pose
joint_trajectory_list = []
# To store coordinates for plotting (in plot_ee() function)
#received_ee_points = []
#fk_EE_points = []
#EE_errors = []
# For each gripper pose a response of six joint angles is computed
loop_start_time = time.time()
len_poses = len(req.poses)
for x in xrange(0, len_poses):
loop_current_time = time.time()
joint_trajectory_point = JointTrajectoryPoint()
# INVERSE KINEMATICS
gripper_pose = get_Gripper_pose(req.poses[x])
#received_ee_points.append(ee_pose[0])
R0_EE = get_R0_EE(gripper_pose)
Wc = get_Wc(s, R0_EE, gripper_pose)
# Calculate angles for joints 1,2,3 and update dh table
theta1, theta2, theta3 = get_joints1_2_3(s, Wc)
s['theta1'] = theta1
s['theta2'] = theta2-pi/2 # account for 90 deg constant offset
s['theta3'] = theta3
# Calculate angles for joints 4,5,6 and update dh table
theta4, theta5, theta6 = get_joints4_5_6(s, R0_EE, theta1, theta2, theta3)
s['theta4'] = theta4
s['theta5'] = theta5
s['theta6'] = theta6
# Populate response for the IK request
joint_trajectory_point.positions = [theta1, theta2, theta3,
theta4, theta5, theta6]
joint_trajectory_list.append(joint_trajectory_point)
def calculate_FK():
"""Calculate Forward Kinematics for verifying joint angles."""
# Compute individual transforms between adjacent links
# T(i-1)_i = Rx(alpha(i-1)) * Dx(alpha(i-1)) * Rz(theta(i)) * Dz(d(i))
T0_1 = H_Transformation(s['alpha0'], s['a0'], s['d1'], s['theta1'])
T1_2 = H_Transformation(s['alpha1'], s['a1'], s['d2'], s['theta2'])
T2_3 = H_Transformation(s['alpha2'], s['a2'], s['d3'], s['theta3'])
T3_4 = H_Transformation(s['alpha3'], s['a3'], s['d4'], s['theta4'])
T4_5 = H_Transformation(s['alpha4'], s['a4'], s['d5'], s['theta5'])
T5_6 = H_Transformation(s['alpha5'], s['a5'], s['d6'], s['theta6'])
T6_EE = H_Transformation(s['alpha6'], s['a6'], s['d7'], s['theta7'])
# Create overall transform between base frame and EE by
# composing the individual link transforms
T0_EE = T0_1 * T1_2 * T2_3 * T3_4 * T4_5 * T5_6 * T6_EE
fk_EE = [T0_EE[0, 3], T0_EE[1, 3], T0_EE[2, 3]]
fk_EE_points.append([(fk_EE[0].item(0)),
(fk_EE[1].item(0)),
(fk_EE[2].item(0))])
error_x = abs(fk_EE[0] - EE_pose[0][0])
error_y = abs(fk_EE[1] - EE_pose[0][1])
error_z = abs(fk_EE[2] - EE_pose[0][2])
EE_errors.append([(error_x.item(0)),
(error_y.item(0)),
(error_z.item(0))])
# NOTE: Uncomment following line to compute FK for plotting EE
#calculate_FK()
print "Total time:", round(time.time() - loop_start_time, 4)
rospy.loginfo("Number of joint trajectory points:" +
" %s" % len(joint_trajectory_list))
return CalculateIKResponse(joint_trajectory_list)
def IK_server():
"""Initialize IK_server ROS node and declare calculate_ik service."""
rospy.init_node('IK_server')
s = rospy.Service('calculate_ik', CalculateIK, handle_calculate_IK)
print "Ready to receive an IK request"
rospy.spin()
if __name__ == "__main__":
IK_server()
| [
"ooleszja@gmail.com"
] | ooleszja@gmail.com |
0a1322cfc3522be936cc35d976692015418c04b7 | 78f69f8c44cdb6387bd5fc82919c3e3a7b19abee | /__init__.py | dd2ade445cf488090daac9395705d5804ce632f7 | [] | no_license | evanwike/grocery-list-skill | 63abe9adfe7f81aa7902e2d969a22fbf9e1b3d1b | 672109a0693f6a92738584a0b59a2110d71b4ce5 | refs/heads/master | 2020-05-05T09:42:18.283319 | 2019-04-08T04:26:42 | 2019-04-08T04:26:42 | 179,913,164 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,448 | py | from mycroft import MycroftSkill, intent_file_handler
from mycroft.util.log import getLogger
import pymongo
LOGGER = getLogger(__name__)
URI = 'mongodb://root:password1@ds049446.mlab.com:49446/hackathon'
CLIENT = pymongo.MongoClient(URI)
DB = CLIENT.get_database()
USER = 'user'
lists = DB['lists']
class GroceryList(MycroftSkill):
def __init__(self):
MycroftSkill.__init__(self)
self.grocery_list = lists.find_one({'name': USER})['items']
# Add item to grocery list
@intent_file_handler('add_item.intent')
def handle_add_item_intent(self, message):
item = message.data.get("item")
if item not in self.grocery_list:
self.grocery_list.append(item)
update_db(self.grocery_list)
message = item + (' have' if item[len(item) - 1] == 's' else ' has')
self.speak_dialog('add_success', data={'message': message})
else:
self.speak_dialog('add_error', data={'item': item})
# Remove item from grocery list
@intent_file_handler('remove_item.intent')
def handle_remove_item_intent(self, message):
item = message.data.get('item')
if item not in self.grocery_list:
self.speak_dialog('remove_error', data={'item': item})
else:
self.grocery_list.remove(item)
update_db(self.grocery_list)
message = item + (' have' if item[len(item) - 1] == 's' else ' has')
self.speak_dialog('remove_success', data={'message': message})
# Detect if item is plural for has/have
# How many items are on my grocery list?
@intent_file_handler('count_items.intent')
def handle_count_items(self, message):
plural = len(self.grocery_list) > 1
verb = 'are' if plural else 'is'
s = 's' if plural else ''
self.speak_dialog('count_items', data={'n': len(self.grocery_list), 'verb': verb, 's': s})
@intent_file_handler('list_items.intent')
def handle_list_grocery(self, message):
if len(self.grocery_list) > 0:
self.speak_dialog("list_items")
for item in self.grocery_list:
self.speak(item)
else:
self.speak_dialog("empty_list")
# self.speak_dialog('list.grocery')
def create_skill():
return GroceryList()
def update_db(grocery_list: list):
lists.update_one({'name': USER}, {'$set': {'items': grocery_list}}, upsert=True)
| [
"sportsdude716@gmail.com"
] | sportsdude716@gmail.com |
cd6a459ece5a08bd23ac75e022e08a981b4e98c4 | 5d09e3b32b0f7dee1147139e5e57822f33dc0f32 | /lib/authorship_simulate_citations.py | dfe00d94e2db5ca746145205494cf1700d1da662 | [] | no_license | scone-snu/pyflib2 | cb797f625100d280f6bd3b757795040ca892b1ed | bb2ad7d9974903ac8c3b01ac48b4d6ab72d2ac80 | refs/heads/master | 2020-03-31T17:37:54.216805 | 2011-05-06T04:43:31 | 2011-05-06T04:43:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,186 | py | import os
import glob
import re
import networkx as nx
import itertools
import matplotlib.pyplot as plt
import pickle
from collections import defaultdict
from PlotFunctions import *
import random
from scipy.stats import gamma
import math
# Variables that can be modified
START_YEAR = 1997 # Year to start simulation from (i.e. start simulation from START_YEAR+1)
NEW_EDGES_PER_YEAR = 1370 # Number of new edges per year
T = 6 # Years to simulate
P = 0.4 # Probability of choosing a neighbor
Q = 0.4 # Probability of choosing at random or closing a triangle, etc.
PREFIX = "ca"
# # Simulate from the single-edge graph
# G = nx.Graph()
# G.add_edge("1","2", weight=1, years=[START_YEAR])
# Simulate from START_YEAR
G = nx.read_edgelist("../data/parsed/authorship_%d.edgelist" % START_YEAR, create_using=nx.Graph(), comments='#', delimiter='|', data=True, encoding='utf-8')
# Load year of first publication for each author
with open("../data/parsed/authorship.year", "r") as f:
first_paper = pickle.load(f)
# Load # of papers each author produces in his/her lifetime
with open("../data/parsed/authorship.count", "r") as f:
num_papers = pickle.load(f)
max_gam = max(gamma.pdf(range(1,12),3,scale=2))
def num_new_nodes(year, author):
# Constant Activity Level
if random.random() < 0.648:
return 1
else:
return 0
def num_papers_dist():
return 4
def num_citations_dist():
return 71
new_num_citations = {}
for t in range(START_YEAR+1,START_YEAR+1+T):
print "Simulating year %d..." % t
# Load # of citations
with open("../data/parsed/citations_%d.count" % t) as f:
num_citations = pickle.load(f)
num_citations.update(new_num_citations)
# Create new edges for existing nodes
print "\t for existing nodes"
for node in G.nodes_iter():
for i in range(0, num_new_nodes(t,node)):
# See if we want to form an edge and set target if we want to
rand = random.random()
target = None
if rand < P:
# Pick a node proportional to edge weight
bins = []
for nbr in G.neighbors(node):
#print node,nbr,G[node][nbr]
mult = max([num_citations[p] for p in G[node][nbr]['papers']])
#clist = [num_citations[p] for p in G[node][nbr]['papers']]
#mult = int(round(float(sum(clist)) / len(clist)))
bins += [nbr] * mult
if len(bins) == 0:
bins = G.neighbors(node)
target = random.choice(bins)
elif rand < P + Q:
# Degree-random
bins = []
for nbr in G.neighbors(node):
for nbr2 in G.neighbors(nbr):
bins += [nbr2]
target = random.choice(bins)
# Form an edge if target is set, don't form self-loops
if target:
#print "Adding edge from %s to %s" % (node,target)
new_paper = "N"+str(t)+"_"+node+"_"+target
num_citations[new_paper] = num_citations_dist()
if G.has_edge(node,target):
G[node][target]['weight'] += 1
G[node][target]['years'].append(t)
G[node][target]['papers'].append(new_paper)
elif node != target:
G.add_edge(node, target, weight=1, years=[t], papers=[new_paper])
# New node additions
print "\t for new nodes"
if len(G.nodes()) > 0:
# Generate bins for preferential attachment
bins = []
for node,degree in G.degree_iter():
bins += [node] * degree
# Add new nodes and connect them to existing nodes using preferential attachment
for i in range(0,NEW_EDGES_PER_YEAR):
new_node = "N"+str(t)+"_"+str(i)
new_paper = "N"+str(t)+"_"+new_node
new_num_citations[new_paper] = num_citations_dist()
first_paper[new_node] = t
num_papers[new_node] = num_papers_dist()
# Pick & connect to a random node
G.add_edge(random.choice(bins), new_node, weight=1, years=[t], papers=[new_paper])
nx.write_edgelist(G, "../data/simulations/%ssim_%d_%d_%f_%f.edgelist" % (PREFIX, START_YEAR, t, P, Q), comments='#', delimiter='|', data=True, encoding='utf-8')
#print G.edges()
# # Uncomment the below to visualize the graph. Might take extremely long to render!
# nx.draw_graphviz(G)
# plt.show() | [
"jccccf@gmail.com"
] | jccccf@gmail.com |
12519564ac2077f1120fb5cbb0e9bfaf0c9762c4 | 0bb991864bb1c68eb41c40229b2a78adcbbf69c9 | /python/model_features/statistics.py | 5f73b2e6b61173784966955ab4a9f0dc70ecff90 | [] | no_license | kristianeschenburg/Parcellating-connectivity | ab78a62a11e549f027a177f57c15924ef6eafb9e | 19edaba4d923b1d283b182f21dca4f46a0fbd2f6 | refs/heads/master | 2020-03-22T13:37:16.801653 | 2018-07-29T18:33:47 | 2018-07-29T18:33:47 | 140,120,191 | 0 | 0 | null | 2018-07-07T22:16:40 | 2018-07-07T22:16:39 | null | UTF-8 | Python | false | false | 1,568 | py | import numpy as np
import time
def UpdateStats(stats, t0, curr_lp, max_lp, K, z, c, steps, gt_z, map_z, verbose):
"""
Update diagnostic statistics.
Parameters:
- - - - -
t0 : initial start time
curr_lp : current log-probability of map
max_lp : max log-probability
K : number of clusters
z : current map
c : current parent links
steps : total number of steps taken
gt_z : ground truth map
map_z : maximum a-posterior map
verbose : flag to print status updates
"""
stats['lp'].append(curr_lp)
stats['max_lp'].append(max_lp)
stats['K'].append(K)
stats['z'] = np.row_stack([stats['z'],z])
stats['c'] = np.row_stack([stats['c'],c])
curr_time = time.clock() - t0
stats['times'].append(curr_time)
if verbose:
print('Step: ' + str(steps) + ' Time: ' + str(curr_time) +
' LP: ' + str(curr_lp) + ' K: ' + str(K) + ' MaxLP: ' + str(max_lp))
if np.any(gt_z):
stats['NMI'].append(NMI(gt_z, map_z))
return stats
def NMI(z1, z2):
"""
Compute normalized mutual information between two maps.two
Parameters:
- - - - -
z1, z2 : maps to compare
"""
N = len(z1)
assert N == len(z2)
p1 = np.bincount(z1)/N
p1[p1 == 0] = 1
H1 = (-p1*np.log(p1)).sum()
p2 = np.bincount(z2)/N
p2[p2 == 0] = 1
H2 = (-p2*np.log(p2)).sum()
joint = np.histogram2d(z1,z2,[range(0,z1.max()+2), range(0,z2.max()+2)],
normed=True)
joint_p = joint[0]
pdiv = joint_p/np.outer(p1,p2)
pdiv[joint_p == 0] = 1
MI = (joint_p*np.log(pdiv)).sum()
if MI == 0:
NMI = 0
else:
NMI = MI/np.sqrt(H1*H2)
return NMI | [
"keschenb@uw.edu"
] | keschenb@uw.edu |
fc4e2f70fdb42770a7c8e6dd0beb93b61e367911 | 1c7ac6a675fa16e7c7d85b90a02eaddbadf80738 | /skulpt/python/binaire_alarme2.py | 6250021f1bdda748c16d8f42c4f4e05dd8be832c | [] | no_license | mistert14/mistert-skulpt | e19c432264fd532e90fdfcc06e6b5d1d9cac7936 | 7a5990e03466a1889922ad3c1e4b0e736cca569f | refs/heads/master | 2020-09-24T13:26:44.632719 | 2014-05-19T19:37:42 | 2014-05-19T19:37:42 | 40,827,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,995 | py | import canvas, math, math2
canvas.clear_timers()
#variables generales
bits = {'A': 0, 'B': 0, 'C': 0 }
out = '0'
#fonction de calcul de la sortie
def process(val):
global bits, out
parser = math2.RpnMathParser(val, bits);
out = parser.get_result()
if str(out) == '0.0':
out = '0'
else:
out = '1'
#gestion des cases a cocher
def chk(id,value):
global bits
if value:
bits[id] = 1
else:
bits[id] = 0
process(inp.get_text())
#gestion de l'equation
def check(val):
process(val)
#creation de l'interface
canvas.add_checkbox("chkA","A",chk,20)
canvas.add_checkbox("chkB","B",chk,20)
canvas.add_checkbox("chkC","C",chk,20)
inp = canvas.add_input("equ:",check,200)
inp.set_text("A & (not(B) | not(C))")
process(inp.get_text())
def color(value):
if value == '1':
return 'Yellow'
else:
return 'White'
#ceci est la fonction qui dessine l'ecran toutes les 17 millisecondes
def draw():
global bits, out
canvas.fill_rect(0,0,500,500)
left = 80
top = 30
canvas.draw_line((0,30+top),(500,30+top),4,'Blue')
cl2 = 'Yellow'
canvas.draw_text("ENTREES",(left-30,25+top),24,cl2)
canvas.draw_text("SORTIES",(left+250,25+top),24,cl2)
canvas.draw_circle((left, 50+top), 10, 2, 'Blue', color(str(bits['A'])))
canvas.draw_circle((left, 73+top), 10, 2, 'Blue', color(str(bits['B'])))
canvas.draw_circle((left, 96+top), 10, 2, 'Blue', color(str(bits['C'])))
canvas.draw_circle((left+170, 50+top), 10, 2, 'Blue', color(out))
canvas.draw_text("A: "+str(bits['A']),(left+15,58+top),24,cl2)
canvas.draw_text("B: "+str(bits['B']),(left+15,80+top),24,cl2)
canvas.draw_text("C: "+str(bits['C']),(left+15,102+top),24,cl2)
canvas.draw_text(inp.get_text(),(left+185,58+top),24,cl2)
"""
A FAIRE:
Dessiner avec draw_circle et draw_line les interrupteurs
et les faire basculer et eteindre quand les entrees changent
"""
#appel de la temporisation de dessin de l'ecran
t = canvas.create_timer(17,draw)
t.start()
| [
"mrtseb@gmail.com"
] | mrtseb@gmail.com |
216cfa0a771df09a4201a43f6c87376c2b2194ba | ac31ab210a9d4688e0ba90872fe48a6c97886b9b | /ReLink.py | f13036d27d75a1e963928ea54802a1835895c68b | [] | no_license | springltd/link2_pi_demo01 | 647120ab2d5cd15189fd8e76f3a699829f4342f2 | 94865945fb0b54a66da3df08f1e604a050f70107 | refs/heads/master | 2022-10-22T12:21:31.301083 | 2020-06-16T09:43:02 | 2020-06-16T09:43:02 | 272,659,612 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,617 | py | #!/usr/bin/env python3
#Library imports
from tkinter import *
from tkinter import StringVar
import time
from functools import partial
class App:
# Class to manage ReLink PiHat
# ---------------------------------------
def __init__(self, master):
# Init function for class
# -----------------------
frame = Frame(master)
frame.pack()
#arrays of IO states and GPIO pins (we always use J pin number convention in this program)
self.IOState=[0,0,0,0]
self.jPin =[15,22,29,36,]
self.AllState = 0
# Create and position each of the buttons
self.ChannelButton15 = Button(frame, text="15",bg = "red",height=1, width=1)
self.ChannelButton15.grid(row=7,column=2);
self.ChannelButton22 = Button(frame, text="22",bg = "red",height=1, width=1)
self.ChannelButton22.grid(row=10,column=3);
self.ChannelButton29 = Button(frame, text="29",bg = "red",height=1, width=1)
self.ChannelButton29.grid(row=14,column=2);
self.ChannelButton36 = Button(frame, text="36",bg = "red",height=1, width=1)
self.ChannelButton36.grid(row=17,column=3);
# create on and off actions for each button
action_toggle15= partial(self.ToggleOnOff, 0, self.ChannelButton15)
action_toggle22= partial(self.ToggleOnOff, 1, self.ChannelButton22)
action_toggle29= partial(self.ToggleOnOff, 2, self.ChannelButton29)
action_toggle36= partial(self.ToggleOnOff, 3, self.ChannelButton36)
#associate the actions with the button
self.ChannelButton15.config(command=action_toggle15)
self.ChannelButton22.config(command=action_toggle22)
self.ChannelButton29.config(command=action_toggle29)
self.ChannelButton36.config(command=action_toggle36)
# Create the GPIO labels alongside the buttons
l15 = Label(frame, text = "GPIO22", height=1, width=6);
l15.grid (row=7, column=0)
l22 = Label(frame, text = "GPIO25", height=1, width=6);
l22.grid (row=10, column=4)
l29 = Label(frame, text = "GPIO05", height=1, width=6);
l29.grid (row=14, column=0)
l36 = Label(frame, text = "GPIO16", height=1, width=6);
l36.grid (row=17, column=4)
# Create the Toggle All button
ToggleAllButton = Button(frame, text="Toggle All", height=1, width=25, command =self.ToggleAll)
ToggleAllButton.grid(row=20, column=0,columnspan=5)
def ToggleAll(self):
# toggle all i/os on or off
# -------------------------
if self.AllState==1:
self.AllState = 0
bgclr="red"
fgclr="black"
else:
self.AllState = 1
bgclr="green"
fgclr="white"
# update the button colours according to the IO state
self.ChannelButton15.config(fg = fgclr , bg = bgclr)
self.ChannelButton22.config(fg = fgclr , bg = bgclr)
self.ChannelButton29.config(fg = fgclr , bg = bgclr)
self.ChannelButton36.config(fg = fgclr , bg = bgclr)
# put the new i/o states in the array of i/o states
for idx in range(4):
GPIO.output(self.jPin[idx] ,self.AllState)
self.IOState[idx] = self.AllState
def ToggleOnOff(self, idx, button):
# Toggle an i/o on or off
# -----------------------
if (self.IOState[idx] == 0):
self.IOState[idx] = 1
button.config(bg="green", fg="white")
else:
self.IOState[idx] = 0
button.config(bg="red", fg="black")
GPIO.output(self.jPin[idx] ,self.IOState[idx])
def SetAllOff(self):
# Drive all outputs to the 'off' state
# ------------------------------------
for idx in range(4):
GPIO.output(self.jPin[idx] ,0)
self.IOState[idx] = 0
# Main program
# ------------
import RPi.GPIO as GPIO
#Turn off GPIO warnings
GPIO.setwarnings(False)
#Set the GPIO numbering convention to be header pin numbers
GPIO.setmode(GPIO.BOARD)
#Configure each GPIO pin as an output
GPIO.setup(15,GPIO.OUT)
GPIO.setup(22,GPIO.OUT)
GPIO.setup(29,GPIO.OUT)
GPIO.setup(36,GPIO.OUT)
#Create our window using Tkinter
root = Tk()
root.title('ReLink PiHat')
root.resizable(width=FALSE, height=FALSE)
app = App(root)
#Turn all the GPIO off to start with
app.SetAllOff()
#Main loop - responds to dialog events
root.mainloop()
#we exit the main loop if user has closed the window
#reset the GPIO and end the program
GPIO.cleanup()
| [
"andrew.gatt@springltd.co"
] | andrew.gatt@springltd.co |
9014134d43bb036fbffc60790f20a299ae4710ab | 5605d4637f78f6d41958029e692b3b33818c2081 | /src/database.py | 7d3c0212509dfb34325441a6a957736c69568b5e | [] | no_license | ssynn/C-S_chat_program | 0dcc9f922f6416339b45d3fc5e66fc6a03fad306 | f847e5fe192a96ad3337cf64be34e760409069bd | refs/heads/master | 2020-05-24T21:58:20.778252 | 2019-06-16T09:20:09 | 2019-06-16T09:20:09 | 187,487,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,279 | py | import sqlite3
# ็ปๅฝ
def login(user_message: dict) -> bool:
'''
ไผ ๅ
ฅไปฅไธๆ ผๅผ็ๅญๅ
ธ
user_message{
'ID': str,
'PASSWORD': str
}
'''
ans = None
try:
conn = sqlite3.connect('./data/data.db')
cursor = conn.cursor()
cursor.execute('''
SELECT ID
FROM users
WHERE ID=? AND PASSWORD=?
''', (
user_message['ID'],
user_message['PASSWORD']
))
temp = cursor.fetchall()
if len(temp) == 1:
ans = {'answer': 'success'}
else:
ans = {'answer': 'fail'}
except Exception as e:
print('Login error!')
print(e)
finally:
conn.close()
return ans
# ๆณจๅ
def signup(user_message: dict) -> dict:
'''
ไผ ๅ
ฅไปฅไธๆ ผๅผ็ๅญๅ
ธ
user_message{
'ID': str,
'PASSWORD': str
}
'''
message = dict()
try:
conn = sqlite3.connect('./data/data.db')
cursor = conn.cursor()
# print(user_message)
cursor.execute('''
SELECT *
FROM users
WHERE ID = ?
''',[user_message['ID']]
)
if len(cursor.fetchall()) != 0:
message['reason'] = '็จๆทๅทฒๅญๅจ๏ผ'
message['answer'] = 'fail'
raise Exception('็จๆทๅทฒๅญๅจ๏ผ')
cursor.execute('''
INSERT
INTO users
VALUES(?, ?)
''', [
user_message['ID'],
user_message['PASSWORD']
])
conn.commit()
message['answer'] = 'success'
except Exception as e:
print('Signup error!')
print(e)
finally:
conn.close()
return message
def makeFriend(user1: str, user2: str) -> dict:
'''
ๅ
ๆฃๆฅไธคไธชไบบๆฏๅฆๅทฒ็ปๆไธบๆๅ๏ผ็ถๅๅปบ็ซๆๅ่ก, ไผ ๅ
ฅ็ไธคไธช็จๆทไธ่ฝไธบๅไธไธชไบบ
่ฟๅ{'answer': 'fail/seccuss', 'reason':str(e)}
'''
newFriends = [user1, user2]
newFriends.sort()
ans = None
try:
# ๆฃๆฅ็จๆทๆฏๅฆ้ๅค
if user1 == user2:
raise Exception('็จๆท้ๅค')
conn = sqlite3.connect('./data/data.db')
cursor = conn.cursor()
# ๅ
ๆฅๆพ็จๆทๆฏๅฆๅญๅจ
cursor.execute('''
SELECT *
FROM users
WHERE ID=? OR ID=?
''', newFriends)
num = cursor.fetchall()
if len(num) != 2:
raise Exception('ๆ ๆ็จๆท๏ผ')
# ๅปบ็ซๆฐๆๅ่ก
cursor.execute('''
INSERT
INTO friends
values(?,?)
''', newFriends)
conn.commit()
conn.close()
ans = {'answer': 'success'}
except Exception as e:
print('Make friends error!')
print(e)
ans = {'answer': 'fail', 'reason':str(e)}
finally:
return ans
def get_my_friends(userID) -> list:
ans = []
try:
conn = sqlite3.connect('./data/data.db')
cursor = conn.cursor()
# ๅปบ็ซๆฐๆๅ่ก
cursor.execute('''
SELECT *
FROM friends
WHERE ID1 = ? or ID2 = ?
''', [userID, userID])
ans = cursor.fetchall()
conn.close()
ans = list(map(lambda x: x[0] if x[0] != userID else x[1], ans))
except Exception as e:
print('Search friends error!')
print(e)
finally:
return ans
def get_all_users() -> list:
users = []
try:
conn = sqlite3.connect('./data/data.db')
cursor = conn.cursor()
cursor.execute('''
SELECT ID
FROM users
''')
users = cursor.fetchall()
users = list(map(lambda x: x[0], users))
except Exception as e:
print(e)
finally:
conn.close()
return users
def delete_friend(user1, user2) -> dict():
'''
ๅ ้คไธคไธช็จๆทไน้ด็ๅฅฝๅๅ
ณ็ณป
'''
newFriends = [user1, user2]
newFriends.sort()
ans = None
try:
conn = sqlite3.connect('./data/data.db')
cursor = conn.cursor()
# ๅ
ๆฅๆพๅ
ณ็ณปๆฏๅฆๅญๅจ
cursor.execute('''
SELECT *
FROM friends
WHERE ID1 = ? and ID2 = ?
''', newFriends)
num = cursor.fetchall()
if len(num) != 1:
raise Exception('็จๆท้ดๅนถ้ๅฅฝๅ๏ผ')
# ๅปบ็ซๆฐๆๅ่ก
cursor.execute('''
DELETE
FROM friends
WHERE ID1=? and ID2=?
''', newFriends)
conn.commit()
conn.close()
ans = {'answer': 'success'}
except Exception as e:
print('Delete friends error!')
print(e)
ans = {'answer': 'fail', 'reason':str(e)}
finally:
return ans
if __name__ == "__main__":
# print(get_all_users())
# signup({
# 'ID':'5',
# 'PASSWORD':'1'
# })
# print(get_all_users())
# ไบคๆๅๆต่ฏ
# print(makeFriend('1', '1'))
# print(makeFriend('1', '2'))
# print(makeFriend('1', '3'))
# print(makeFriend('1', '4'))
# ๅ ้คๆๅๆต่ฏ
print(get_my_friends('1'))
print(delete_friend('1', '2'))
print(get_my_friends('1'))
pass
| [
"824063458@qq.com"
] | 824063458@qq.com |
d25e7326a9da02a7ac488bd3ef17368a45448185 | d6d4a1e4a4c33b7410fc63852a17ab2de089ef78 | /test2.py | 495b395916689f9f3f29750bfcfea5a50d9c2ee2 | [] | no_license | ankittiwari101/learning_git | 054ffcbf52f785a506a37d4aa49d3eb25951f8ee | 43a3166c98e46fbac9dd2c8dff7371d2aa2b392e | refs/heads/master | 2021-03-29T07:01:56.915364 | 2020-03-17T10:20:08 | 2020-03-17T10:20:08 | 247,928,915 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 71 | py | print("Tiwari Again!!This is my second commit at a github repository.") | [
"noreply@github.com"
] | ankittiwari101.noreply@github.com |
99a4ce09a2f6f8b5ae00d0f27e9e5310227a043c | 5290b41db07900b9ec0e818976480918031766eb | /kb_site/manage.py | 99088931b2f798a59a85ee09abb9039ff33deba0 | [] | no_license | aleluk/KnowledgeBase | 8db061bf6b0531f2414e9d8dde8c2482d20e799c | e38c82dfa8269443a24d12f31096b82052c9c026 | refs/heads/master | 2023-04-11T07:48:00.266479 | 2021-04-18T12:35:59 | 2021-04-18T12:35:59 | 352,182,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'kb_site_main.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"64218199+aleluk@users.noreply.github.com"
] | 64218199+aleluk@users.noreply.github.com |
f6bc950e15c4b64504ccaad6a8a45115c40cb4de | 447101726b535b2a12fb3c0d8336a8dd06f7dea3 | /modex/core.py | 8de5347336123cdb58e3910e439e818b1c38e5af | [] | no_license | weihaigang/CrackDict | 15503911cd73521151d5fc5a7aa2af075dbd3b3f | d752bd54cbc230e8c610a2f95beaff9247f2898a | refs/heads/master | 2023-07-31T19:01:46.000370 | 2021-09-19T00:17:13 | 2021-09-19T00:17:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,814 | py | # @Author: JogFeelingVi
# @Date: 2021-08-30 23:14:22
# @Last Modified by: By JogFeelingVi
# @Last Modified time: 2021-08-30 23:14:22
from . import rplan
from itertools import zip_longest, repeat, product
from functools import reduce
import re
class curls:
Archives = {
'var': 1.02,
}
Custom = []
fmu = []
Synthesis = None
nName = ''
Count = 0
Dataforplan = {'plan': 'TMDsSpPhHf'}
__act_dict = {
'cust': lambda s, l: curls.__act_cust__(s, l),
'plan': lambda s, p: curls.__act_plan__(s, p),
'out': lambda s, o: curls.__act_out__(s, o),
'list': lambda s, b: curls.__act_list__(s, b),
'dual_md': lambda s, m: curls.__act_dual_md__(s, m),
'dual_m': lambda s, m: curls.__act_dual_m__(s, m),
'dual_d': lambda s, m: curls.__act_dual_d__(s, m),
'minpw': lambda s, d: curls.__act_minpw__(s, d),
'fmu': lambda s, l: curls.__act_fmu__(s, l),
}
def __act_fmu__(self, l: list):
if l is None:
return
self.fmu = l
print(f'Save fmu {self.fmu} --fmu 442000 027')
def __act_dual_m__(self, m: bool):
if m == False:
return
tmp = [f'{int(m):02}' for m in self.Dataforplan['M']]
self.Dataforplan['M'] = tmp
def __act_dual_d__(self, m: bool):
if m == False:
return
tmp = [f'{int(m):02}' for m in self.Dataforplan['D']]
self.Dataforplan['D'] = tmp
def __act_dual_md__(self, m: bool):
if m == False:
return
self.__act_dual_m__(True)
self.__act_dual_d__(True)
def __act_minpw__(self, d: int):
self.Archives['minpw'] = d
def __act_cust__(self, l: list):
if l is None:
return
self.Custom = l
print(f'Save Custom list {self.Custom}, Use -p c!')
def __act_plan__(self, S: str):
if S is None:
return
# add [3456789] {1}
GPS = []
plan_m = re.finditer('(\[([^\[\]]*)\])|([TMDdSspPfhcH])', S)
for m in plan_m:
x, y = m.span()
if y - x > 1:
# []
#kh = m.string[x:y].replace('[', '').replace(']','')
kh = m.string[x:y][1:-1]
GPS.append([f'{x}' for x in kh])
# end
elif y - x == 1:
# TMDdSspPfhc
cl = m.string[x:y]
GPS.append([self.rPlan(cl), self.Custom][cl == 'c'])
# end
else:
return
#('0', '1', '2', '7', '7', '5', '9', '7')
self.Count = reduce(lambda x, y: x * y, [len(x) for x in GPS])
start = rplan.jionStr(*[x[0] for x in GPS])
ends = rplan.jionStr(*[x[-1] for x in GPS])
lse = {len(start), len(ends)}
minpw = self.Archives['minpw']
# mksnumber
bijiao = {True if x >= minpw else False for x in lse}
if True in bijiao:
self.Synthesis = GPS
print(f'Number of password digits {lse}')
print(f'Scope: {start}-{ends}')
self.nName = self.fname_invalid(f'{ends}')
# File name invalid
print(f'Count: {self.Count:,} Done!')
else:
print(
f'minpw seting {minpw}, [ -p {S} ] Not eligible. Refer {lse}')
def __act_out__(self, o: str):
if self.Synthesis is None:
return
else:
path = f'./{self.nName}.lst' if o is None else o
outf = rplan.pathx(path)
print(f'OutFile: {outf}')
#rplan.wfilefor(outf, curls.Synthesis)
wplan = rplan.wfileplus(outf, self.Synthesis, self.Count)
minpw = self.Archives['minpw']
if self.fmu != None and 'plus_fmu' in self.Archives.keys():
wplan.fmus(self.fmu, self.Archives['plus_fmu'])
else:
print('Plus_fmu Start Error!')
# wplan.fumc = {'fum': funcobj, 'args': [x, y, z]}
print(f'Minimum password length {wplan.minpw(minpw)}')
wplan.writeLc()
def __act_list__(self, b: bool):
if b == False:
return
plankeys = 'M,D,d,s,S,f,p,P,T'.split(',')
for key in plankeys:
value = ','.join(self.rPlan(key))
print(f'- {key} {value}')
print('- h ba,pa,ma,fa,da,tu...')
print('- H Ba,Zhang,Zhao,Yun...')
print('- c Custom list, -c xxx yyy zzz')
@staticmethod
def fname_invalid(fname:str) -> str:
if len(fname) > 255:
fname = fname[0:10]
if fname[0] in ['+', '-', '.']:
fname = fname[1:-1]
blacklist = ['/', '\t', '\b', '@', '#', '$', '%', '^', '&', '*', '(', ')', '[', ']', '?']
intersection = set(blacklist) & set(fname)
if len(intersection) != 0:
regx = '[{}]'.format(''.join(intersection))
fname = re.sub(regx, '', fname)
return fname
def rPlan(self, key: str):
if key in self.Dataforplan.keys():
return self.Dataforplan[key]
else:
return None
def InitializationPlan(self):
plan_d = rplan.plan.__members__
for k, v in plan_d.items():
if k in 'MDhH':
tmp = v.value.split(',')
else:
tmp = list(v.value)
self.Dataforplan[k] = tmp
def __init__(self, args: dict) -> None:
self.Archives = {**self.Archives, **args}
self.InitializationPlan()
print(f'Archives: {self.Archives}')
def Action(self):
Sequence = 'minpw,cust,fmu,dual_m,dual_d,dual_md,plan,out,list'.split(
',')
for Seq in Sequence:
vals = self.Archives[Seq]
self.__act_dict[Seq](self, vals) | [
"lifelse@outlook.com"
] | lifelse@outlook.com |
801a2a01933e03fb0f56781ece4a79654cc8788c | b72d0900bec98fcee6c725cef035c02ca29bbf1b | /Python/100Excersises/1 to 25/25/25.py | 38dc3ba7dc12908e54d10b12f5a442b5a1ccd3cd | [
"MIT"
] | permissive | sugamkarki/NAMI-Year-II-TERM-I-Group_Project | 68b8808c8607858a313e8b4d601d8d12c6edda2b | f0a9a5f219ccbec024eb5316361db3fca46e171c | refs/heads/master | 2023-06-28T19:07:19.330236 | 2021-07-24T03:05:42 | 2021-07-24T03:05:42 | 312,819,148 | 0 | 0 | MIT | 2021-07-24T12:45:06 | 2020-11-14T13:08:08 | Python | UTF-8 | Python | false | false | 163 | py | alphabet=[]
for letters in range(97,123):
alphabet.append(chr(letters))
d=dict(a=alphabet)
for item in d.values():
for alpha in item:
print(alpha)
| [
"sugamkarki7058@gmail.com"
] | sugamkarki7058@gmail.com |
75ed8c814760c96bc4cb333a81523c02f6fce8d5 | 52a4d282f6ecaf3e68d798798099d2286a9daa4f | /test_sa.py | 81104dd1d3c6c5b477f238e92d7d1b4e9c05347a | [
"MIT"
] | permissive | bkovitz/FARGish | f0d1c05f5caf9901f520c8665d35780502b67dcc | 3dbf99d44a6e43ae4d9bba32272e0d618ee4aa21 | refs/heads/master | 2023-07-10T15:20:57.479172 | 2023-06-25T19:06:33 | 2023-06-25T19:06:33 | 124,162,924 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,332 | py | # test_sa.py -- Test of spreading activation
import unittest
from pprint import pprint as pp
import inspect
from time import process_time
from dataclasses import dataclass
import operator
from operator import itemgetter
from heapq import nlargest
from typing import Union, List, Tuple, Dict, Set, FrozenSet, Iterable, Any, \
NewType, Type, ClassVar, Sequence, Callable, Hashable
from itertools import chain
import networkx as nx
from Propagator import Propagator, Delta
NodeId = NewType('NodeId', int)
@dataclass
class MyProp(Propagator):
noise: float = 0.0
def make_deltas(self, g, old_d):
#print() #DEBUG
return chain.from_iterable(
self.deltas_from(g, old_d, nodeid)
for nodeid in old_d
)
def deltas_from(self, g, old_d, nodeid) \
-> List[Delta]:
'''Deltas from nodeid to its neighbors.'''
result: List[Delta] = []
nodeid_a = old_d.get(nodeid, 0.0)
for neighborid, edge_d in g.adj[nodeid].items():
weight = edge_d.get('weight', 1.0)
delta = Delta(
neighborid,
weight * nodeid_a,
nodeid
)
result.append(delta)
return result
def min_value(self, g, nodeid):
return 0.0
class Node:
nodeid: NodeId
@dataclass(frozen=True)
class Operator:
func: Callable
name: str
def call(self, *operands: int) -> int:
return self.func(*operands)
def __str__(self):
return self.name
plus = Operator(operator.add, '+')
times = Operator(operator.mul, 'x')
minus = Operator(operator.sub, '-')
@dataclass(frozen=True)
class Before:
'''A feature meaning that .obj was present before the action represented
by the slipnode occurred.'''
obj: Hashable
def __str__(self):
return f'Before({self.obj})'
@dataclass(frozen=True)
class After:
'''A feature meaning that .obj was present after the action represented
by the slipnode occurred.'''
obj: Hashable
def __str__(self):
return f'After({self.obj})'
@dataclass(frozen=True)
class Equation(Node):
operands: Tuple[int]
operator: Operator
result: int
def features(self) -> Iterable[Hashable]:
for operand in self.operands:
yield operand
yield Before(operand)
yield self.operator
yield self.result
yield After(self.result)
#return set(self.operands + (self.operator, self.result, Before
def __str__(self):
expr = f' {self.operator} '.join(str(n) for n in self.operands)
return f'{expr} = {self.result}'
class TestSA(unittest.TestCase):
def test_sa(self):
p = MyProp(positive_feedback_rate=0.0)
self.assertEqual(p.noise, 0.0)
g = nx.Graph() # undirected graph
g.add_edge(1, 2, weight=1.0)
g.add_edge(1, 3, weight=1.3)
g.add_node(4)
#print(g.edges[1, 2]['weight'])
#for neighbor in g.adj[1].items():
#print(neighbor)
# Let's give all nodes activation=1.0.
initial_a_dict = dict((nodeid, 1.0) for nodeid in g.nodes)
# Propagate
got: Dict[NodeId, float] = p.propagate(g, initial_a_dict)
self.assertEqual(got, {1: 1.026, 2: 1.0, 3: 1.006, 4: 0.98})
def test_eqns(self):
p = MyProp(positive_feedback_rate=0.0, sigmoid_p=1.5)
def query(g, features, k=10):
activations_in = dict((f, 1.0) for f in features)
activations_out = p.propagate(g, activations_in, num_iterations=10)
tups = [
(node, a)
for (node, a) in activations_out.items()
if isinstance(node, Equation)
]
return nlargest(k, tups, itemgetter(1))
def see(activations_d):
for node, a in sorted(activations_d.items(), key=itemgetter(1)):
print(f'{node!s:20s} {a:0.3f}')
g = nx.Graph()
# Make slipnet: a bipartite graph of Equations and features
for a in range(1, 11):
for b in range(1, 11):
if b >= a:
continue
for operator in [plus, minus, times]:
e = Equation((a, b), operator, operator.call(a, b))
g.add_node(e)
for f in e.features():
g.add_edge(f, e, weight=1.0)
tups = query(g, [4, 5, Before(4), Before(5)], k=3)
self.assertCountEqual(
['5 + 4 = 9', '5 x 4 = 20', '5 - 4 = 1'],
[str(eqn) for (eqn, a) in tups]
)
if __name__ == '__main__':
import matplotlib.pyplot as plt
plt.ion()
p = MyProp(positive_feedback_rate=0.0, sigmoid_p=1.5)
def query(g, features, k=4):
activations_in = dict((f, 1.0) for f in features)
activations_out = p.propagate(g, activations_in, num_iterations=10)
tups = [
(node, a)
for (node, a) in activations_out.items()
if isinstance(node, Equation)
]
return nlargest(k, tups, itemgetter(1))
def see(activations_d):
for node, a in sorted(activations_d.items(), key=itemgetter(1)):
print(f'{node!s:20s} {a:0.3f}')
g = nx.Graph()
for a in range(1, 11):
for b in range(1, 11):
if b >= a:
continue
for operator in [plus, minus, times]:
e = Equation((a, b), operator, operator.call(a, b))
g.add_node(e)
for f in e.features():
g.add_edge(f, e, weight=1.0)
#e1 = Equation((2, 3), plus, plus.call(2, 3))
#print(e1)
# g.add_node(e1)
# for f in e1.features():
# g.add_edge(f, e1, weight=1.0)
# a0 = dict((f, 1.0) for f in [4, 5, Before(4), Before(5)])
# #a0 = dict((f, 1.0) for f in [7, 6, Before(7), Before(6)])
# see(a0)
# print()
#
# start = process_time()
# a1 = p.propagate(g, a0, num_iterations=10)
# end = process_time()
# print(end - start)
# #see(a1)
# print(sum(a1.values()))
es = query(g, [4, 5, Before(4), Before(5)])
pp(es)
#nx.draw(g, with_labels=True, pos=nx.bipartite_layout(g, [n for n in g.nodes if isinstance(n, Equation)]))
#plt.show()
| [
"bkovitz@indiana.edu"
] | bkovitz@indiana.edu |
507f6b0403a78b43766a63432e623686cc5a0493 | 466ba928ab060cc6e9b84cf4f64f742cc6153eb2 | /checkForUpdates.py | f7c00929fceb0b51fc493ea4a83745b57f8259c2 | [] | no_license | Joshua1337/FreifunkNodeChecker | 16de6a35a4343357d357d193ecc843ba89482571 | 53b9a3ef6890ed58f26a35a6accea03597596124 | refs/heads/master | 2021-11-10T01:25:19.307961 | 2021-10-24T11:59:13 | 2021-10-24T11:59:13 | 80,371,054 | 0 | 0 | null | 2020-09-24T21:39:52 | 2017-01-29T20:38:06 | Python | UTF-8 | Python | false | false | 2,612 | py | # coding: utf-8
import argparse
import logging
import os
import json
import requests
from telegram.ext import Updater
from time import sleep
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
class Check():
def __init__(self, authToken, url, chatId):
self.lastContent = ""
self.filePath = os.path.dirname(os.path.realpath(__file__)) + "\cache"
self.authToken = authToken
self.url = url
self.chatId = chatId
def cacheContainsId(self, id, cache):
for j in cache['nodes']:
if j['id'] == id:
return True
return False
def run(self):
while True:
if not os.path.isfile(self.filePath) or os.path.getsize(self.filePath) == 0:
with open(self.filePath, "w") as file:
self.lastContent = json.loads(requests.get(self.url).text)
json.dump(self.lastContent, file)
else:
with open(self.filePath, "r") as file:
self.lastContent = json.load(file)
r = requests.get(self.url)
js = json.loads(r.text)
if self.lastContent['nodes'] != js['nodes']:
updater = Updater(self.authToken)
for i in js['nodes']:
isNew = self.cacheContainsId(i['id'], self.lastContent)
if not isNew:
updater.bot.sendMessage(chat_id=self.chatId,
text="Neuer Knoten <a href=\"https://map.freifunk-hennef.de/#!v:m;n:{}\"\
>{}</a>".format(i['id'], i['name']), parse_mode="html")
self.lastContent = js
with open(self.filePath, "w") as file:
json.dump(self.lastContent, file)
logging.info("Sleeping 60s")
sleep(60)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="รberprรผft eine JSON-Datei nach รnderungen")
parser.add_argument("-token", type=str, required=True, help="Authtoken fรผr den Telegram Bot")
parser.add_argument("-url", type=str, required=True, help="Netzwerkpfad zur JSON-Datei")
parser.add_argument("-chat", type=int, required=True,
help="Telegram Chat-ID an die die Benachrichtigung gesendet werden soll")
parsed_args = parser.parse_args()
if not parsed_args.token:
parser.print_help()
exit()
Check(parsed_args.token, parsed_args.url, parsed_args.chat).run()
| [
"thecop@thecop.us"
] | thecop@thecop.us |
00404615272f8b216b6112c31abf170d3dbb3ac4 | af200bff16d3b176c0cab30d0d71666f9511c3cd | /__init__.py | 2b7440ba08e59b803ecb9580382b5e9f80ef1c0e | [] | no_license | NKLASS/Russian-Grammar-Analyser | af222c307730bed8c97b55dd4672a6443bdead47 | e1d4ae8ceb427df412d93ca09a5e63651e4a72bc | refs/heads/master | 2023-03-17T21:54:38.729881 | 2017-03-29T14:20:59 | 2017-03-29T14:20:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | # -*- coding: UTF-8 -*-
import sys
import json
reload(sys)
from flask import Flask, jsonify,request,render_template
from translator import analyseGrammar
sys.setdefaultencoding('utf-8')
app = Flask(__name__)
@app.route('/')
def index():
return render_template('computer.html')
@app.route('/_add_numbers')
def add_numbers():
user_input = request.args.get('a')
list = []
dict = analyseGrammar(user_input.lstrip(' '))
list.append(dict)
return jsonify(result=list)
if __name__ == '__main__':
app.run(debug=True) | [
"bjamurray@gmail.com"
] | bjamurray@gmail.com |
49d98b69895f2db5dd9fa22267d1e67e92e73d52 | 669196cb7444c699b9c477bd36d76082d534e08a | /tests/unit/test_user_email.py | c475eef807feb4dd45015fb7490c85ba2be6c329 | [
"MIT"
] | permissive | tilgovi/pyramid_fullauth | d51ad9fabca0ef380f6981c0f62e5c36d8484cba | 3de2f784e89c2e82104dbe36acbb85597e4fff31 | refs/heads/master | 2021-01-24T15:15:28.691347 | 2014-11-02T18:45:05 | 2014-11-02T18:45:05 | 26,466,736 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | """Test email related User methods."""
from pyramid.compat import text_type
from pyramid_fullauth.models import User
NEW_EMAIL = text_type('new@example.com')
def test_set_new_email():
"""
Test User.set_new_email method.
setting new email should result in setting new_email field,
and key used to activate the change.
"""
user = User()
assert user.email_change_key is None
assert user.new_email is None
user.set_new_email(NEW_EMAIL)
assert user.new_email == NEW_EMAIL
assert user.email_change_key
def test_change_email():
"""
Test User.change_email method.
Calling it should copy new email set by set_new_email method
into regular email field.
"""
user = User()
assert not user.email
user.set_new_email(NEW_EMAIL)
user.change_email()
assert not user.email_change_key
assert user.email == NEW_EMAIL
| [
"fizyk@fizyk.net.pl"
] | fizyk@fizyk.net.pl |
790830077d8069ae93383389d8841eccf07aeda2 | bc0f99dba2233f02e1f1b59711164bc2eb47e072 | /LOBDeepPP/LOBDeepPP_model/__LOB_models_output2D.py | 8679322c5bbb76988d2073f925c8c23fa9be2086 | [
"MIT"
] | permissive | mariussterling/LOBDeepPP_code | 29e483b70ee81f4302ea977c47a25d8ec743b2b9 | 010782f8db9a745940753f49d953361c32ee1190 | refs/heads/master | 2022-10-10T15:45:40.770829 | 2020-06-09T22:48:27 | 2020-06-09T22:48:27 | 255,342,824 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,570 | py | from keras import layers, models
from .__activations import PReLU2
def output_model_askbid(inp, params, output_shape, interpretable, **kwargs):
if interpretable:
out = inp
else:
out_inp = layers.InputLayer(
input_shape=inp.get_shape().as_list()[1:],
name='out_inp')
out = out_inp.output
out = layers.Cropping2D(
cropping=((out.shape[1].value - 1, 0), (0, 0)),
name=f'out_cropping')(out)
out = layers.Reshape(
target_shape=[i.value for i in out.shape[2:]],
name='out_reshape')(out)
out_ask = output_model_b(
out, params, output_shape[0],
interpretable=kwargs.get('interpretable_nested', True),
name='ask')
out_bid = output_model_b(
out, params, output_shape[0],
interpretable=kwargs.get('interpretable_nested', True),
name='bid')
out = layers.concatenate([out_ask, out_bid], name='out_concatenate')
if interpretable:
return out
else:
return models.Model(inputs=out_inp.input, outputs=out, name='out')(inp)
def output_model_b(inp, params, output_shape, interpretable, name=''):
# h = params.get('output').get('h', output_shape)
if interpretable:
out = inp
else:
out_inp = layers.InputLayer(
input_shape=inp.get_shape().as_list()[1:],
name=f'out_{name}_inp')
out = out_inp.output
filters = params['output'].get('filters', None)
for i, f in enumerate(filters):
out = layers.Dense(f, name=f'out_{name}_dense{i}')(out)
out = PReLU2(name=f'out_{name}_dense{i}_relu')(out)
out = layers.BatchNormalization(name=f'out_{name}_dense{i}_bn')(out)
out = layers.Flatten(name=f'out_{name}_flatten')(out)
out_p = layers.Dense(
output_shape,
name=f'out_{name}_out_pos')(out)
out_p = PReLU2(name=f'out_{name}_out_pos_relu')(out_p)
out_n = layers.Lambda(
lambda x: x * -1,
name=f'out_{name}_out_neg0')(out)
out_n = layers.Dense(
output_shape,
# activation='relu',
name=f'out_{name}_out_neg')(out_n)
out_n = PReLU2(name=f'out_{name}_out_neg_relu')(out_n)
out = layers.Subtract(name=f'out_{name}_out')([out_p, out_n])
out = layers.Reshape(
target_shape=out.get_shape().as_list()[1:] + [1],
name=f'out_{name}_reshape')(out)
if interpretable:
return out
else:
return models.Model(
inputs=out_inp.input,
outputs=out,
name=f'out_{name}'
)(inp)
| [
"marius.sterling@hu-berlin.de"
] | marius.sterling@hu-berlin.de |
67143a8e1eb81c79e74cb83a07a1483096d620ba | 4ffb9e383f7c2759bd39a7e1772ecb437e7b4082 | /cursosweb/bin/pip3.7 | 4fefeae8196f856e476355f5b36066cfd9842145 | [] | no_license | meridiaz/x-serv-15.8-cms-users-put | 62fe3a9cd418ced2c67b9b66b1d525107831579e | 67ce09cc2f04b76a5d99149e71f833636c94b6d4 | refs/heads/master | 2022-12-11T18:49:04.635968 | 2020-04-18T17:32:49 | 2020-04-18T17:32:49 | 295,018,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | 7 | #!/home/meri/Escritorio/sat/gitlab/try2git/x-serv-15.6-django-cms-put/cursosweb/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"mdiaz"
] | mdiaz |
d86ee1b4566a0e1a14a1ae48450497bf4829a0b9 | bc06161fc2a7ac28931042a9e503a276c003870c | /mysite/vacancies/management/commands/_utils.py | f64f9e7411333ccda3e65785f282f3ba08111b11 | [] | no_license | ilineserg/django_indeed | ac324ee886509b10b119f528ab0c1c0ed809ac25 | 911380593b0068bbe6fd7ac33d8086f180557f4d | refs/heads/master | 2021-02-10T03:31:56.939497 | 2020-04-02T08:34:46 | 2020-04-02T08:34:46 | 244,348,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 759 | py | import hashlib
import urllib.parse as url_parser
from enum import Enum
class Colors(Enum):
"""
Available colors.
"""
RED = '\x1b[91m'
GREEN = '\x1b[32m'
END = '\x1b[0m'
def colorize(text: str, color: Colors) -> str:
"""
Colorizing a text string.
"""
return f"{color.value}{text}{Colors.END.value}"
def normalize_url(base, url):
return url_parser.urljoin(base, url)
def url_to_md5(url):
return hashlib.md5(url.encode()).hexdigest()
def debug_log(message):
print(message)
# with open('debug.log', 'a+') as _log_file:
# _log_file.write(f"{message}\n")
def error_log(message):
print(message)
# with open('error.log', 'a+') as _log_file:
# _log_file.write(f"{message}\n")
| [
"ilineserg@gmail.com"
] | ilineserg@gmail.com |
05743f19fd6a54dc73ab2663b6a43d967e3953e5 | caf39133030e9e9d9240769fbfe72287009c6b51 | /supervised_learning/0x00-binary_classification/11-neural_network.py | 21048d014bf2a955602c5ea89d0fdecd77c0e585 | [] | no_license | sazad44/holbertonschool-machine_learning | d08facbc24582ebcedf9a8607c82b18909fe7867 | b92e89b980a8f1360a24f4ed5654a2ab0dfac679 | refs/heads/master | 2022-11-30T22:32:21.264942 | 2020-08-12T05:25:06 | 2020-08-12T05:25:06 | 280,286,486 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,014 | py | #!/usr/bin/env python3
"""Class definitions for neural network with 1 hidden layer"""
import numpy as np
class NeuralNetwork():
"""Neural network class definition"""
def __init__(self, nx, nodes):
"""initialization func for class"""
if not isinstance(nx, int):
raise TypeError("nx must be an integer")
elif nx < 1:
raise ValueError("nx must be a positive integer")
if not isinstance(nodes, int):
raise TypeError("nodes must be an integer")
elif nodes < 1:
raise ValueError("nodes must be a positive integer")
self.__W1 = np.random.randn(nodes, nx)
self.__b1 = np.array([float(0)] * nodes).reshape(nodes, 1)
self.__A1 = 0
self.__W2 = np.random.randn(1, nodes)
self.__b2 = 0
self.__A2 = 0
@property
def W1(self):
"""getter func for W1"""
return self.__W1
@property
def b1(self):
"""getter func for b1"""
return self.__b1
@property
def A1(self):
"""getter func for A1"""
return self.__A1
@property
def W2(self):
"""getter func for W2"""
return self.__W2
@property
def b2(self):
"""getter func for b2"""
return self.__b2
@property
def A2(self):
"""getter func for A2"""
return self.__A2
def forward_prop(self, X):
"""calculates forward propagation of neural network"""
matmul = np.matmul(self.__W1, X)
actVals = 1 / (1 + np.exp(-(matmul + self.__b1)))
self.__A1 = actVals
matmul = np.matmul(self.__W2, self.__A1)
actVals = 1 / (1 + np.exp(-(matmul + self.__b2)))
self.__A2 = actVals
return self.__A1, self.__A2
def cost(self, Y, A):
"""calculates cost of the model using logistic regression"""
costMat = -(Y * (np.log(A)) + (1 - Y) * np.log(1.0000001 - A))
costVal = np.sum(costMat) / len(costMat[0])
return costVal
| [
"36613205+sazad44@users.noreply.github.com"
] | 36613205+sazad44@users.noreply.github.com |
476aa2a98daed909cdc94030738f19b05ded46bf | 454fc28a4db23ff10e0642bc9c67e01d95230d42 | /functions.py | 849d5ae0f362aa9dfddb6a2964e55bfc10b6502a | [] | no_license | SarsenovZ2z/fontrec_dataset | 9d739418109b98a348771c9a63dd5343f3ba1493 | 94f948bc3f942de478c1244cd1f963fa1cea66b1 | refs/heads/master | 2020-05-18T20:42:32.492312 | 2019-05-03T00:42:54 | 2019-05-03T00:42:54 | 184,628,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | import re
import random
import os
def getFonts(path):
files = []
# r=root, d=directories, f = files
for r, d, f in os.walk(path):
for file in f:
if '.ttf' in file:
files.append({'path': os.path.join(r, file), 'name': file.replace('.ttf', '')})
return files
def getRandomText(file):
return re.sub(r'(\n\s*)+\n+', '\n', open(file, "r").read()).splitlines()
def rand():
return random.uniform(0, 1)
def randInt(maxNum):
return random.randint(0, maxNum)
| [
"nurik9293709@gmail.com"
] | nurik9293709@gmail.com |
946ffb36b8439369b9bb56e4d75d22cf7dc120d2 | 317f0a8f92043a04a1ec1986603c77c5844d7314 | /Default/install_package_control.py | 2d10cd1b517c4435d3e33e2343bb96ede23c25af | [
"LicenseRef-scancode-boost-original"
] | permissive | sharunkumar/Packages | f658c0a1bbe505a697fc62cbd580950ef388a6bc | bae297c3f03921c8aa2e0adb0ce2a40ee8d33330 | refs/heads/master | 2020-04-06T09:51:16.493005 | 2019-05-09T09:39:20 | 2019-05-09T09:39:20 | 157,359,405 | 0 | 0 | NOASSERTION | 2018-11-13T10:06:30 | 2018-11-13T10:06:30 | null | UTF-8 | Python | false | false | 5,524 | py | import base64
import binascii
import os
import threading
from urllib.error import URLError
from urllib.request import build_opener, install_opener, ProxyHandler, urlopen
import sublime
import sublime_api
import sublime_plugin
class InstallPackageControlCommand(sublime_plugin.ApplicationCommand):
error_prefix = 'Error installing Package Control: '
filename = 'Package Control.sublime-package'
public_key = (
'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEkiE2JtDn/IQDfVLso4HRg0BNMHNj'
'5rpuEIVaX6txyFS0HoBmCgd+9AXKcgKAsBKbEBD6a9nVzLLmJrDVFafepQ==')
def run(self):
threading.Thread(target=self._install).start()
def is_visible(self):
ipp_path = os.path.join(sublime.installed_packages_path(), self.filename)
p_path = os.path.join(sublime.packages_path(), self.filename.replace('.sublime-package', ''))
return not os.path.exists(ipp_path) and not os.path.exists(p_path)
def _install(self):
"""
RUNS IN A THREAD
Downloads and then installs Package Control, alerting the user to
the result
"""
try:
package_data = self._download()
if package_data is None:
sublime.set_timeout(self._show_error, 10)
return
dest = os.path.join(sublime.installed_packages_path(), self.filename)
with open(dest, 'wb') as f:
f.write(package_data)
sublime.set_timeout(self._show_success, 10)
except (Exception) as e:
print(self.error_prefix + str(e))
sublime.set_timeout(self._show_error, 10)
def _show_success(self):
"""
RUNS IN THE MAIN THREAD
"""
sublime.message_dialog(
"Package Control was successfully installed\n\n"
"Use the Command Palette and type \"Install Package\" to get started")
def _show_error(self):
"""
RUNS IN THE MAIN THREAD
"""
sublime.error_message(
"An error occurred installing Package Control\n\n"
"Please check the Console for details\n\n"
"Visit https://packagecontrol.io/installation for manual instructions")
def _download(self):
"""
RUNS IN A THREAD
Attempts to download over TLS first, falling back to HTTP in case a
user's proxy configuration doesn't work with TLS by default.
Although a secure connection is made, Python 3.3 does not check the
connection hostname against the certificate, so a TLS connection
really only provides privacy. To ensure that the package has not been
modified, we check a public-key signature of the file.
:return:
None or a byte string of the verified package file
"""
host_path = 'packagecontrol.io/' + self.filename.replace(' ', '%20')
# Don't be fooled by the TLS URL, Python 3.3 does not verify hostnames
secure_url = 'https://' + host_path
insecure_url = 'http://' + host_path
secure_sig_url = secure_url + '.sig'
insecure_sig_url = insecure_url + '.sig'
install_opener(build_opener(ProxyHandler()))
try:
package_data = urlopen(secure_url).read()
sig_data = urlopen(secure_sig_url).read()
except (URLError) as e:
print('%sHTTPS error encountered, falling back to HTTP - %s' % (self.error_prefix, str(e)))
try:
package_data = urlopen(insecure_url).read()
sig_data = urlopen(insecure_sig_url).read()
except (URLError) as e2:
print('%sHTTP error encountered, giving up - %s' % (self.error_prefix, str(e2)))
return None
return self._verify(package_data, sig_data)
def _verify(self, package_data, sig_data):
"""
RUNS IN A THREAD
Verifies the package is authentic
:param package_data:
A byte string of the .sublime-package data
:param sig_data:
A byte string of the .sig data
:return:
None if invalid, byte string of package file otherwise
"""
try:
armored_sig = sig_data.decode('ascii').strip()
except (UnicodeDecodeError):
print(self.error_prefix + 'invalid signature ASCII encoding')
return None
begin = '-----BEGIN PACKAGE CONTROL SIGNATURE-----'
end = '-----END PACKAGE CONTROL SIGNATURE-----'
pem_error = self.error_prefix + 'invalid signature PEM armor'
b64_sig = ''
in_body = None
for line in armored_sig.splitlines():
if not in_body:
if line != begin:
print(pem_error)
return None
in_body = True
else:
if line.startswith('-----'):
if line != end:
print(pem_error)
return None
break
b64_sig += line
try:
sig = base64.b64decode(b64_sig)
except (binascii.Error):
print(self.error_prefix + 'invalid signature base64 decoding')
return None
public_key = base64.b64decode(self.public_key)
if not sublime_api.verify_pc_signature(package_data, sig, public_key):
print(self.error_prefix + 'signature could not be verified')
return None
return package_data
| [
"sharunkumar.ks@gofrugal.com"
] | sharunkumar.ks@gofrugal.com |
e8dd578f213f88343398afdca104aa8d1e0c925b | 7020b4ff30cd1f35146235c1d0e74bb8f9a3c4b9 | /venv/Lib/site-packages/pymysql/connections.py | 7c90ce34e00b2a8abdfea1084d9fcca168f6bf9a | [] | no_license | Brian-Munene/HouseProject | 4f6c500738d733d88dc75b6e2849e80d85024197 | a6a41cfa8d59a9861e0659bce2da191d4eefe1df | refs/heads/master | 2023-02-09T06:47:53.011462 | 2021-09-14T09:48:09 | 2021-09-14T09:48:09 | 169,042,895 | 0 | 1 | null | 2023-02-02T06:14:37 | 2019-02-04T07:38:34 | Python | UTF-8 | Python | false | false | 49,033 | py | # Python implementation of the MySQL client-server protocol
# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
# Error codes:
# http://dev.mysql.com/doc/refman/5.5/en/error-messages-client.html
from __future__ import print_function
from ._compat import PY2, range_type, text_type, str_type, JYTHON, IRONPYTHON
import errno
import io
import os
import socket
import struct
import sys
import traceback
import warnings
from . import _auth
from .charset import charset_by_name, charset_by_id
from .constants import CLIENT, COMMAND, CR, FIELD_TYPE, SERVER_STATUS
from . import converters
from .cursors import Cursor
from .optionfile import Parser
from .protocol import (
dump_packet, MysqlPacket, FieldDescriptorPacket, OKPacketWrapper,
EOFPacketWrapper, LoadLocalPacketWrapper
)
from .util import byte2int, int2byte
from . import err, VERSION_STRING
try:
import ssl
SSL_ENABLED = True
except ImportError:
ssl = None
SSL_ENABLED = False
try:
import getpass
DEFAULT_USER = getpass.getuser()
del getpass
except (ImportError, KeyError):
# KeyError occurs when there's no entry in OS database for a current user.
DEFAULT_USER = None
DEBUG = False
_py_version = sys.version_info[:2]
if PY2:
pass
elif _py_version < (3, 6):
# See http://bugs.python.org/issue24870
_surrogateescape_table = [chr(i) if i < 0x80 else chr(i + 0xdc00) for i in range(256)]
def _fast_surrogateescape(s):
return s.decode('latin1').translate(_surrogateescape_table)
else:
def _fast_surrogateescape(s):
return s.decode('ascii', 'surrogateescape')
# socket.makefile() in Python 2 is not usable because very inefficient and
# bad behavior about timeout.
# XXX: ._socketio doesn't work under IronPython.
if PY2 and not IRONPYTHON:
# read method of file-like returned by sock.makefile() is very slow.
# So we copy io-based one from Python 3.
from ._socketio import SocketIO
def _makefile(sock, mode):
return io.BufferedReader(SocketIO(sock, mode))
else:
# socket.makefile in Python 3 is nice.
def _makefile(sock, mode):
return sock.makefile(mode)
TEXT_TYPES = {
FIELD_TYPE.BIT,
FIELD_TYPE.BLOB,
FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.MEDIUM_BLOB,
FIELD_TYPE.STRING,
FIELD_TYPE.TINY_BLOB,
FIELD_TYPE.VAR_STRING,
FIELD_TYPE.VARCHAR,
FIELD_TYPE.GEOMETRY,
}
DEFAULT_CHARSET = 'utf8mb4'
MAX_PACKET_LEN = 2**24-1
def pack_int24(n):
return struct.pack('<I', n)[:3]
# https://dev.mysql.com/doc/internals/en/integer.html#packet-Protocol::LengthEncodedInteger
def lenenc_int(i):
if (i < 0):
raise ValueError("Encoding %d is less than 0 - no representation in LengthEncodedInteger" % i)
elif (i < 0xfb):
return int2byte(i)
elif (i < (1 << 16)):
return b'\xfc' + struct.pack('<H', i)
elif (i < (1 << 24)):
return b'\xfd' + struct.pack('<I', i)[:3]
elif (i < (1 << 64)):
return b'\xfe' + struct.pack('<Q', i)
else:
raise ValueError("Encoding %x is larger than %x - no representation in LengthEncodedInteger" % (i, (1 << 64)))
class Connection(object):
"""
Representation of a socket with a mysql server.
The proper way to get an instance of this class is to call
connect().
Establish a connection to the MySQL database. Accepts several
arguments:
:param host: Host where the database server is located
:param user: Username to log in as
:param password: Password to use.
:param database: Database to use, None to not use a particular one.
:param port: MySQL port to use, default is usually OK. (default: 3306)
:param bind_address: When the client has multiple network interfaces, specify
the interface from which to connect to the host. Argument can be
a hostname or an IP address.
:param unix_socket: Optionally, you can use a unix socket rather than TCP/IP.
:param read_timeout: The timeout for reading from the connection in seconds (default: None - no timeout)
:param write_timeout: The timeout for writing to the connection in seconds (default: None - no timeout)
:param charset: Charset you want to use.
:param sql_mode: Default SQL_MODE to use.
:param read_default_file:
Specifies my.cnf file to read these parameters from under the [client] section.
:param conv:
Conversion dictionary to use instead of the default one.
This is used to provide custom marshalling and unmarshaling of types.
See converters.
:param use_unicode:
Whether or not to default to unicode strings.
This option defaults to true for Py3k.
:param client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT.
:param cursorclass: Custom cursor class to use.
:param init_command: Initial SQL statement to run when connection is established.
:param connect_timeout: Timeout before throwing an exception when connecting.
(default: 10, min: 1, max: 31536000)
:param ssl:
A dict of arguments similar to mysql_ssl_set()'s parameters.
:param read_default_group: Group to read from in the configuration file.
:param compress: Not supported
:param named_pipe: Not supported
:param autocommit: Autocommit mode. None means use server default. (default: False)
:param local_infile: Boolean to enable the use of LOAD DATA LOCAL command. (default: False)
:param max_allowed_packet: Max size of packet sent to server in bytes. (default: 16MB)
Only used to limit size of "LOAD LOCAL INFILE" data packet smaller than default (16KB).
:param defer_connect: Don't explicitly connect on contruction - wait for connect call.
(default: False)
:param auth_plugin_map: A dict of plugin names to a class that processes that plugin.
The class will take the Connection object as the argument to the constructor.
The class needs an authenticate method taking an authentication packet as
an argument. For the dialog plugin, a prompt(echo, prompt) method can be used
(if no authenticate method) for returning a string from the user. (experimental)
:param server_public_key: SHA256 authenticaiton plugin public key value. (default: None)
:param db: Alias for database. (for compatibility to MySQLdb)
:param passwd: Alias for password. (for compatibility to MySQLdb)
:param binary_prefix: Add _binary prefix on bytes and bytearray. (default: False)
See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_ in the
specification.
"""
_sock = None
_auth_plugin_name = ''
_closed = False
_secure = False
def __init__(self, host=None, user=None, password="",
database=None, port=0, unix_socket=None,
charset='', sql_mode=None,
read_default_file=None, conv=None, use_unicode=None,
client_flag=0, cursorclass=Cursor, init_command=None,
connect_timeout=10, ssl=None, read_default_group=None,
compress=None, named_pipe=None,
autocommit=False, db=None, passwd=None, local_infile=False,
max_allowed_packet=16*1024*1024, defer_connect=False,
auth_plugin_map=None, read_timeout=None, write_timeout=None,
bind_address=None, binary_prefix=False, program_name=None,
server_public_key=None):
if use_unicode is None and sys.version_info[0] > 2:
use_unicode = True
if db is not None and database is None:
database = db
if passwd is not None and not password:
password = passwd
if compress or named_pipe:
raise NotImplementedError("compress and named_pipe arguments are not supported")
self._local_infile = bool(local_infile)
if self._local_infile:
client_flag |= CLIENT.LOCAL_FILES
if read_default_group and not read_default_file:
if sys.platform.startswith("win"):
read_default_file = "c:\\my.ini"
else:
read_default_file = "/etc/my.cnf"
if read_default_file:
if not read_default_group:
read_default_group = "client"
cfg = Parser()
cfg.read(os.path.expanduser(read_default_file))
def _config(key, arg):
if arg:
return arg
try:
return cfg.get(read_default_group, key)
except Exception:
return arg
user = _config("user", user)
password = _config("password", password)
host = _config("host", host)
database = _config("database", database)
unix_socket = _config("socket", unix_socket)
port = int(_config("port", port))
bind_address = _config("bind-address", bind_address)
charset = _config("default-character-set", charset)
if not ssl:
ssl = {}
if isinstance(ssl, dict):
for key in ["ca", "capath", "cert", "key", "cipher"]:
value = _config("ssl-" + key, ssl.get(key))
if value:
ssl[key] = value
self.ssl = False
if ssl:
if not SSL_ENABLED:
raise NotImplementedError("ssl module not found")
self.ssl = True
client_flag |= CLIENT.SSL
self.ctx = self._create_ssl_ctx(ssl)
self.host = host or "localhost"
self.port = port or 3306
self.user = user or DEFAULT_USER
self.password = password or b""
if isinstance(self.password, text_type):
self.password = self.password.encode('latin1')
self.db = database
self.unix_socket = unix_socket
self.bind_address = bind_address
if not (0 < connect_timeout <= 31536000):
raise ValueError("connect_timeout should be >0 and <=31536000")
self.connect_timeout = connect_timeout or None
if read_timeout is not None and read_timeout <= 0:
raise ValueError("read_timeout should be >= 0")
self._read_timeout = read_timeout
if write_timeout is not None and write_timeout <= 0:
raise ValueError("write_timeout should be >= 0")
self._write_timeout = write_timeout
if charset:
self.charset = charset
self.use_unicode = True
else:
self.charset = DEFAULT_CHARSET
self.use_unicode = False
if use_unicode is not None:
self.use_unicode = use_unicode
self.encoding = charset_by_name(self.charset).encoding
client_flag |= CLIENT.CAPABILITIES
if self.db:
client_flag |= CLIENT.CONNECT_WITH_DB
self.client_flag = client_flag
self.cursorclass = cursorclass
self._result = None
self._affected_rows = 0
self.host_info = "Not connected"
# specified autocommit mode. None means use server default.
self.autocommit_mode = autocommit
if conv is None:
conv = converters.conversions
# Need for MySQLdb compatibility.
self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int}
self.decoders = {k: v for (k, v) in conv.items() if type(k) is int}
self.sql_mode = sql_mode
self.init_command = init_command
self.max_allowed_packet = max_allowed_packet
self._auth_plugin_map = auth_plugin_map or {}
self._binary_prefix = binary_prefix
self.server_public_key = server_public_key
self._connect_attrs = {
'_client_name': 'pymysql',
'_pid': str(os.getpid()),
'_client_version': VERSION_STRING,
}
if program_name:
self._connect_attrs["program_name"] = program_name
if defer_connect:
self._sock = None
else:
self.connect()
def _create_ssl_ctx(self, sslp):
if isinstance(sslp, ssl.SSLContext):
return sslp
ca = sslp.get('ca')
capath = sslp.get('capath')
hasnoca = ca is None and capath is None
ctx = ssl.create_default_context(cafile=ca, capath=capath)
ctx.check_hostname = not hasnoca and sslp.get('check_hostname', True)
ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
if 'cert' in sslp:
ctx.load_cert_chain(sslp['cert'], keyfile=sslp.get('key'))
if 'cipher' in sslp:
ctx.set_ciphers(sslp['cipher'])
ctx.options |= ssl.OP_NO_SSLv2
ctx.options |= ssl.OP_NO_SSLv3
return ctx
def close(self):
"""
Send the quit message and close the socket.
See `Connection.close() <https://www.python.org/dev/peps/pep-0249/#Connection.close>`_
in the specification.
:raise Error: If the connection is already closed.
"""
if self._closed:
raise err.Error("Already closed")
self._closed = True
if self._sock is None:
return
send_data = struct.pack('<iB', 1, COMMAND.COM_QUIT)
try:
self._write_bytes(send_data)
except Exception:
pass
finally:
self._force_close()
@property
def open(self):
"""Return True if the connection is open"""
return self._sock is not None
def _force_close(self):
"""Close connection without QUIT message"""
if self._sock:
try:
self._sock.close()
except: # noqa
pass
self._sock = None
self._rfile = None
__del__ = _force_close
def autocommit(self, value):
self.autocommit_mode = bool(value)
current = self.get_autocommit()
if value != current:
self._send_autocommit_mode()
def get_autocommit(self):
return bool(self.server_status &
SERVER_STATUS.SERVER_STATUS_AUTOCOMMIT)
def _read_ok_packet(self):
pkt = self._read_packet()
if not pkt.is_ok_packet():
raise err.OperationalError(2014, "Command Out of Sync")
ok = OKPacketWrapper(pkt)
self.server_status = ok.server_status
return ok
def _send_autocommit_mode(self):
"""Set whether or not to commit after every execute()"""
self._execute_command(COMMAND.COM_QUERY, "SET AUTOCOMMIT = %s" %
self.escape(self.autocommit_mode))
self._read_ok_packet()
def begin(self):
"""Begin payment."""
self._execute_command(COMMAND.COM_QUERY, "BEGIN")
self._read_ok_packet()
def commit(self):
"""
Commit changes to stable storage.
See `Connection.commit() <https://www.python.org/dev/peps/pep-0249/#commit>`_
in the specification.
"""
self._execute_command(COMMAND.COM_QUERY, "COMMIT")
self._read_ok_packet()
def rollback(self):
"""
Roll back the current payment.
See `Connection.rollback() <https://www.python.org/dev/peps/pep-0249/#rollback>`_
in the specification.
"""
self._execute_command(COMMAND.COM_QUERY, "ROLLBACK")
self._read_ok_packet()
def show_warnings(self):
"""Send the "SHOW WARNINGS" SQL command."""
self._execute_command(COMMAND.COM_QUERY, "SHOW WARNINGS")
result = MySQLResult(self)
result.read()
return result.rows
def select_db(self, db):
"""
Set current db.
:param db: The name of the db.
"""
self._execute_command(COMMAND.COM_INIT_DB, db)
self._read_ok_packet()
def escape(self, obj, mapping=None):
"""Escape whatever value you pass to it.
Non-standard, for internal use; do not use this in your applications.
"""
if isinstance(obj, str_type):
return "'" + self.escape_string(obj) + "'"
if isinstance(obj, (bytes, bytearray)):
ret = self._quote_bytes(obj)
if self._binary_prefix:
ret = "_binary" + ret
return ret
return converters.escape_item(obj, self.charset, mapping=mapping)
def literal(self, obj):
"""Alias for escape()
Non-standard, for internal use; do not use this in your applications.
"""
return self.escape(obj, self.encoders)
def escape_string(self, s):
if (self.server_status &
SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES):
return s.replace("'", "''")
return converters.escape_string(s)
def _quote_bytes(self, s):
if (self.server_status &
SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES):
return "'%s'" % (_fast_surrogateescape(s.replace(b"'", b"''")),)
return converters.escape_bytes(s)
def cursor(self, cursor=None):
"""
Create a new cursor to execute queries with.
:param cursor: The type of cursor to create; one of :py:class:`Cursor`,
:py:class:`SSCursor`, :py:class:`DictCursor`, or :py:class:`SSDictCursor`.
None means use Cursor.
"""
if cursor:
return cursor(self)
return self.cursorclass(self)
def __enter__(self):
"""Context manager that returns a Cursor"""
warnings.warn(
"Context manager API of Connection object is deprecated; Use conn.begin()",
DeprecationWarning)
return self.cursor()
def __exit__(self, exc, value, traceback):
"""On successful exit, commit. On exception, rollback"""
if exc:
self.rollback()
else:
self.commit()
# The following methods are INTERNAL USE ONLY (called from Cursor)
def query(self, sql, unbuffered=False):
# if DEBUG:
# print("DEBUG: sending query:", sql)
if isinstance(sql, text_type) and not (JYTHON or IRONPYTHON):
if PY2:
sql = sql.encode(self.encoding)
else:
sql = sql.encode(self.encoding, 'surrogateescape')
self._execute_command(COMMAND.COM_QUERY, sql)
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
return self._affected_rows
def next_result(self, unbuffered=False):
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
return self._affected_rows
def affected_rows(self):
return self._affected_rows
def kill(self, thread_id):
arg = struct.pack('<I', thread_id)
self._execute_command(COMMAND.COM_PROCESS_KILL, arg)
return self._read_ok_packet()
def ping(self, reconnect=True):
"""
Check if the server is alive.
:param reconnect: If the connection is closed, reconnect.
:raise Error: If the connection is closed and reconnect=False.
"""
if self._sock is None:
if reconnect:
self.connect()
reconnect = False
else:
raise err.Error("Already closed")
try:
self._execute_command(COMMAND.COM_PING, "")
self._read_ok_packet()
except Exception:
if reconnect:
self.connect()
self.ping(False)
else:
raise
def set_charset(self, charset):
# Make sure charset is supported.
encoding = charset_by_name(charset).encoding
self._execute_command(COMMAND.COM_QUERY, "SET NAMES %s" % self.escape(charset))
self._read_packet()
self.charset = charset
self.encoding = encoding
def connect(self, sock=None):
self._closed = False
try:
if sock is None:
if self.unix_socket:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.connect_timeout)
sock.connect(self.unix_socket)
self.host_info = "Localhost via UNIX socket"
self._secure = True
if DEBUG: print('connected using unix_socket')
else:
kwargs = {}
if self.bind_address is not None:
kwargs['source_address'] = (self.bind_address, 0)
while True:
try:
sock = socket.create_connection(
(self.host, self.port), self.connect_timeout,
**kwargs)
break
except (OSError, IOError) as e:
if e.errno == errno.EINTR:
continue
raise
self.host_info = "socket %s:%d" % (self.host, self.port)
if DEBUG: print('connected using socket')
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self._sock = sock
self._rfile = _makefile(sock, 'rb')
self._next_seq_id = 0
self._get_server_information()
self._request_authentication()
if self.sql_mode is not None:
c = self.cursor()
c.execute("SET sql_mode=%s", (self.sql_mode,))
if self.init_command is not None:
c = self.cursor()
c.execute(self.init_command)
c.close()
self.commit()
if self.autocommit_mode is not None:
self.autocommit(self.autocommit_mode)
except BaseException as e:
self._rfile = None
if sock is not None:
try:
sock.close()
except: # noqa
pass
if isinstance(e, (OSError, IOError, socket.error)):
exc = err.OperationalError(
2003,
"Can't connect to MySQL server on %r (%s)" % (
self.host, e))
# Keep original exception and traceback to investigate error.
exc.original_exception = e
exc.traceback = traceback.format_exc()
if DEBUG: print(exc.traceback)
raise exc
# If e is neither DatabaseError or IOError, It's a bug.
# But raising AssertionError hides original error.
# So just reraise it.
raise
def write_packet(self, payload):
"""Writes an entire "mysql packet" in its entirety to the network
addings its length and sequence number.
"""
# Internal note: when you build packet manualy and calls _write_bytes()
# directly, you should set self._next_seq_id properly.
data = pack_int24(len(payload)) + int2byte(self._next_seq_id) + payload
if DEBUG: dump_packet(data)
self._write_bytes(data)
self._next_seq_id = (self._next_seq_id + 1) % 256
def _read_packet(self, packet_type=MysqlPacket):
"""Read an entire "mysql packet" in its entirety from the network
and return a MysqlPacket type that represents the results.
:raise OperationalError: If the connection to the MySQL server is lost.
:raise InternalError: If the packet sequence number is wrong.
"""
buff = b''
while True:
packet_header = self._read_bytes(4)
#if DEBUG: dump_packet(packet_header)
btrl, btrh, packet_number = struct.unpack('<HBB', packet_header)
bytes_to_read = btrl + (btrh << 16)
if packet_number != self._next_seq_id:
self._force_close()
if packet_number == 0:
# MariaDB sends error packet with seqno==0 when shutdown
raise err.OperationalError(
CR.CR_SERVER_LOST,
"Lost connection to MySQL server during query")
raise err.InternalError(
"Packet sequence number wrong - got %d expected %d"
% (packet_number, self._next_seq_id))
self._next_seq_id = (self._next_seq_id + 1) % 256
recv_data = self._read_bytes(bytes_to_read)
if DEBUG: dump_packet(recv_data)
buff += recv_data
# https://dev.mysql.com/doc/internals/en/sending-more-than-16mbyte.html
if bytes_to_read == 0xffffff:
continue
if bytes_to_read < MAX_PACKET_LEN:
break
packet = packet_type(buff, self.encoding)
packet.check_error()
return packet
def _read_bytes(self, num_bytes):
self._sock.settimeout(self._read_timeout)
while True:
try:
data = self._rfile.read(num_bytes)
break
except (IOError, OSError) as e:
if e.errno == errno.EINTR:
continue
self._force_close()
raise err.OperationalError(
CR.CR_SERVER_LOST,
"Lost connection to MySQL server during query (%s)" % (e,))
except BaseException:
# Don't convert unknown exception to MySQLError.
self._force_close()
raise
if len(data) < num_bytes:
self._force_close()
raise err.OperationalError(
CR.CR_SERVER_LOST, "Lost connection to MySQL server during query")
return data
def _write_bytes(self, data):
self._sock.settimeout(self._write_timeout)
try:
self._sock.sendall(data)
except IOError as e:
self._force_close()
raise err.OperationalError(
CR.CR_SERVER_GONE_ERROR,
"MySQL server has gone away (%r)" % (e,))
def _read_query_result(self, unbuffered=False):
self._result = None
if unbuffered:
try:
result = MySQLResult(self)
result.init_unbuffered_query()
except:
result.unbuffered_active = False
result.connection = None
raise
else:
result = MySQLResult(self)
result.read()
self._result = result
if result.server_status is not None:
self.server_status = result.server_status
return result.affected_rows
def insert_id(self):
if self._result:
return self._result.insert_id
else:
return 0
def _execute_command(self, command, sql):
"""
:raise InterfaceError: If the connection is closed.
:raise ValueError: If no username was specified.
"""
if not self._sock:
raise err.InterfaceError("(0, '')")
# If the last query was unbuffered, make sure it finishes before
# sending new commands
if self._result is not None:
if self._result.unbuffered_active:
warnings.warn("Previous unbuffered result was left incomplete")
self._result._finish_unbuffered_query()
while self._result.has_next:
self.next_result()
self._result = None
if isinstance(sql, text_type):
sql = sql.encode(self.encoding)
packet_size = min(MAX_PACKET_LEN, len(sql) + 1) # +1 is for command
# tiny optimization: build first packet manually instead of
# calling self..write_packet()
prelude = struct.pack('<iB', packet_size, command)
packet = prelude + sql[:packet_size-1]
self._write_bytes(packet)
if DEBUG: dump_packet(packet)
self._next_seq_id = 1
if packet_size < MAX_PACKET_LEN:
return
sql = sql[packet_size-1:]
while True:
packet_size = min(MAX_PACKET_LEN, len(sql))
self.write_packet(sql[:packet_size])
sql = sql[packet_size:]
if not sql and packet_size < MAX_PACKET_LEN:
break
def _request_authentication(self):
# https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::HandshakeResponse
if int(self.server_version.split('.', 1)[0]) >= 5:
self.client_flag |= CLIENT.MULTI_RESULTS
if self.user is None:
raise ValueError("Did not specify a username")
charset_id = charset_by_name(self.charset).id
if isinstance(self.user, text_type):
self.user = self.user.encode(self.encoding)
data_init = struct.pack('<iIB23s', self.client_flag, MAX_PACKET_LEN, charset_id, b'')
if self.ssl and self.server_capabilities & CLIENT.SSL:
self.write_packet(data_init)
self._sock = self.ctx.wrap_socket(self._sock, server_hostname=self.host)
self._rfile = _makefile(self._sock, 'rb')
self._secure = True
data = data_init + self.user + b'\0'
authresp = b''
plugin_name = None
if self._auth_plugin_name == '':
plugin_name = b''
authresp = _auth.scramble_native_password(self.password, self.salt)
elif self._auth_plugin_name == 'mysql_native_password':
plugin_name = b'mysql_native_password'
authresp = _auth.scramble_native_password(self.password, self.salt)
elif self._auth_plugin_name == 'caching_sha2_password':
plugin_name = b'caching_sha2_password'
if self.password:
if DEBUG:
print("caching_sha2: trying fast path")
authresp = _auth.scramble_caching_sha2(self.password, self.salt)
else:
if DEBUG:
print("caching_sha2: empty password")
elif self._auth_plugin_name == 'sha256_password':
plugin_name = b'sha256_password'
if self.ssl and self.server_capabilities & CLIENT.SSL:
authresp = self.password + b'\0'
elif self.password:
authresp = b'\1' # request public key
else:
authresp = b'\0' # empty password
if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA:
data += lenenc_int(len(authresp)) + authresp
elif self.server_capabilities & CLIENT.SECURE_CONNECTION:
data += struct.pack('B', len(authresp)) + authresp
else: # pragma: no cover - not testing against servers without secure auth (>=5.0)
data += authresp + b'\0'
if self.db and self.server_capabilities & CLIENT.CONNECT_WITH_DB:
if isinstance(self.db, text_type):
self.db = self.db.encode(self.encoding)
data += self.db + b'\0'
if self.server_capabilities & CLIENT.PLUGIN_AUTH:
data += (plugin_name or b'') + b'\0'
if self.server_capabilities & CLIENT.CONNECT_ATTRS:
connect_attrs = b''
for k, v in self._connect_attrs.items():
k = k.encode('utf-8')
connect_attrs += struct.pack('B', len(k)) + k
v = v.encode('utf-8')
connect_attrs += struct.pack('B', len(v)) + v
data += struct.pack('B', len(connect_attrs)) + connect_attrs
self.write_packet(data)
auth_packet = self._read_packet()
# if authentication method isn't accepted the first byte
# will have the octet 254
if auth_packet.is_auth_switch_request():
if DEBUG: print("received auth switch")
# https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
auth_packet.read_uint8() # 0xfe packet identifier
plugin_name = auth_packet.read_string()
if self.server_capabilities & CLIENT.PLUGIN_AUTH and plugin_name is not None:
auth_packet = self._process_auth(plugin_name, auth_packet)
else:
# send legacy handshake
data = _auth.scramble_old_password(self.password, self.salt) + b'\0'
self.write_packet(data)
auth_packet = self._read_packet()
elif auth_packet.is_extra_auth_data():
if DEBUG:
print("received extra data")
# https://dev.mysql.com/doc/internals/en/successful-authentication.html
if self._auth_plugin_name == "caching_sha2_password":
auth_packet = _auth.caching_sha2_password_auth(self, auth_packet)
elif self._auth_plugin_name == "sha256_password":
auth_packet = _auth.sha256_password_auth(self, auth_packet)
else:
raise err.OperationalError("Received extra packet for auth method %r", self._auth_plugin_name)
if DEBUG: print("Succeed to auth")
def _process_auth(self, plugin_name, auth_packet):
handler = self._get_auth_plugin_handler(plugin_name)
if handler:
try:
return handler.authenticate(auth_packet)
except AttributeError:
if plugin_name != b'dialog':
raise err.OperationalError(2059, "Authentication plugin '%s'"
" not loaded: - %r missing authenticate method" % (plugin_name, type(handler)))
if plugin_name == b"caching_sha2_password":
return _auth.caching_sha2_password_auth(self, auth_packet)
elif plugin_name == b"sha256_password":
return _auth.sha256_password_auth(self, auth_packet)
elif plugin_name == b"mysql_native_password":
data = _auth.scramble_native_password(self.password, auth_packet.read_all())
elif plugin_name == b"mysql_old_password":
data = _auth.scramble_old_password(self.password, auth_packet.read_all()) + b'\0'
elif plugin_name == b"mysql_clear_password":
# https://dev.mysql.com/doc/internals/en/clear-text-authentication.html
data = self.password + b'\0'
elif plugin_name == b"dialog":
pkt = auth_packet
while True:
flag = pkt.read_uint8()
echo = (flag & 0x06) == 0x02
last = (flag & 0x01) == 0x01
prompt = pkt.read_all()
if prompt == b"Password: ":
self.write_packet(self.password + b'\0')
elif handler:
resp = 'no response - TypeError within plugin.prompt method'
try:
resp = handler.prompt(echo, prompt)
self.write_packet(resp + b'\0')
except AttributeError:
raise err.OperationalError(2059, "Authentication plugin '%s'" \
" not loaded: - %r missing prompt method" % (plugin_name, handler))
except TypeError:
raise err.OperationalError(2061, "Authentication plugin '%s'" \
" %r didn't respond with string. Returned '%r' to prompt %r" % (plugin_name, handler, resp, prompt))
else:
raise err.OperationalError(2059, "Authentication plugin '%s' (%r) not configured" % (plugin_name, handler))
pkt = self._read_packet()
pkt.check_error()
if pkt.is_ok_packet() or last:
break
return pkt
else:
raise err.OperationalError(2059, "Authentication plugin '%s' not configured" % plugin_name)
self.write_packet(data)
pkt = self._read_packet()
pkt.check_error()
return pkt
def _get_auth_plugin_handler(self, plugin_name):
plugin_class = self._auth_plugin_map.get(plugin_name)
if not plugin_class and isinstance(plugin_name, bytes):
plugin_class = self._auth_plugin_map.get(plugin_name.decode('ascii'))
if plugin_class:
try:
handler = plugin_class(self)
except TypeError:
raise err.OperationalError(2059, "Authentication plugin '%s'"
" not loaded: - %r cannot be constructed with connection object" % (plugin_name, plugin_class))
else:
handler = None
return handler
# _mysql support
def thread_id(self):
return self.server_thread_id[0]
def character_set_name(self):
return self.charset
def get_host_info(self):
return self.host_info
def get_proto_info(self):
return self.protocol_version
def _get_server_information(self):
i = 0
packet = self._read_packet()
data = packet.get_all_data()
self.protocol_version = byte2int(data[i:i+1])
i += 1
server_end = data.find(b'\0', i)
self.server_version = data[i:server_end].decode('latin1')
i = server_end + 1
self.server_thread_id = struct.unpack('<I', data[i:i+4])
i += 4
self.salt = data[i:i+8]
i += 9 # 8 + 1(filler)
self.server_capabilities = struct.unpack('<H', data[i:i+2])[0]
i += 2
if len(data) >= i + 6:
lang, stat, cap_h, salt_len = struct.unpack('<BHHB', data[i:i+6])
i += 6
# TODO: deprecate server_language and server_charset.
# mysqlclient-python doesn't provide it.
self.server_language = lang
try:
self.server_charset = charset_by_id(lang).name
except KeyError:
# unknown collation
self.server_charset = None
self.server_status = stat
if DEBUG: print("server_status: %x" % stat)
self.server_capabilities |= cap_h << 16
if DEBUG: print("salt_len:", salt_len)
salt_len = max(12, salt_len - 9)
# reserved
i += 10
if len(data) >= i + salt_len:
# salt_len includes auth_plugin_data_part_1 and filler
self.salt += data[i:i+salt_len]
i += salt_len
i+=1
# AUTH PLUGIN NAME may appear here.
if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i:
# Due to Bug#59453 the auth-plugin-name is missing the terminating
# NUL-char in versions prior to 5.5.10 and 5.6.2.
# ref: https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::Handshake
# didn't use version checks as mariadb is corrected and reports
# earlier than those two.
server_end = data.find(b'\0', i)
if server_end < 0: # pragma: no cover - very specific upstream bug
# not found \0 and last field so take it all
self._auth_plugin_name = data[i:].decode('utf-8')
else:
self._auth_plugin_name = data[i:server_end].decode('utf-8')
def get_server_info(self):
return self.server_version
Warning = err.Warning
Error = err.Error
InterfaceError = err.InterfaceError
DatabaseError = err.DatabaseError
DataError = err.DataError
OperationalError = err.OperationalError
IntegrityError = err.IntegrityError
InternalError = err.InternalError
ProgrammingError = err.ProgrammingError
NotSupportedError = err.NotSupportedError
class MySQLResult(object):
def __init__(self, connection):
"""
:type connection: Connection
"""
self.connection = connection
self.affected_rows = None
self.insert_id = None
self.server_status = None
self.warning_count = 0
self.message = None
self.field_count = 0
self.description = None
self.rows = None
self.has_next = None
self.unbuffered_active = False
def __del__(self):
if self.unbuffered_active:
self._finish_unbuffered_query()
def read(self):
try:
first_packet = self.connection._read_packet()
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
elif first_packet.is_load_local_packet():
self._read_load_local_packet(first_packet)
else:
self._read_result_packet(first_packet)
finally:
self.connection = None
def init_unbuffered_query(self):
"""
:raise OperationalError: If the connection to the MySQL server is lost.
:raise InternalError:
"""
self.unbuffered_active = True
first_packet = self.connection._read_packet()
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
self.unbuffered_active = False
self.connection = None
elif first_packet.is_load_local_packet():
self._read_load_local_packet(first_packet)
self.unbuffered_active = False
self.connection = None
else:
self.field_count = first_packet.read_length_encoded_integer()
self._get_descriptions()
# Apparently, MySQLdb picks this number because it's the maximum
# value of a 64bit unsigned integer. Since we're emulating MySQLdb,
# we set it to this instead of None, which would be preferred.
self.affected_rows = 18446744073709551615
def _read_ok_packet(self, first_packet):
ok_packet = OKPacketWrapper(first_packet)
self.affected_rows = ok_packet.affected_rows
self.insert_id = ok_packet.insert_id
self.server_status = ok_packet.server_status
self.warning_count = ok_packet.warning_count
self.message = ok_packet.message
self.has_next = ok_packet.has_next
def _read_load_local_packet(self, first_packet):
if not self.connection._local_infile:
raise RuntimeError(
"**WARN**: Received LOAD_LOCAL packet but local_infile option is false.")
load_packet = LoadLocalPacketWrapper(first_packet)
sender = LoadLocalFile(load_packet.filename, self.connection)
try:
sender.send_data()
except:
self.connection._read_packet() # skip ok packet
raise
ok_packet = self.connection._read_packet()
if not ok_packet.is_ok_packet(): # pragma: no cover - upstream induced protocol error
raise err.OperationalError(2014, "Commands Out of Sync")
self._read_ok_packet(ok_packet)
def _check_packet_is_eof(self, packet):
if not packet.is_eof_packet():
return False
#TODO: Support CLIENT.DEPRECATE_EOF
# 1) Add DEPRECATE_EOF to CAPABILITIES
# 2) Mask CAPABILITIES with server_capabilities
# 3) if server_capabilities & CLIENT.DEPRECATE_EOF: use OKPacketWrapper instead of EOFPacketWrapper
wp = EOFPacketWrapper(packet)
self.warning_count = wp.warning_count
self.has_next = wp.has_next
return True
def _read_result_packet(self, first_packet):
self.field_count = first_packet.read_length_encoded_integer()
self._get_descriptions()
self._read_rowdata_packet()
def _read_rowdata_packet_unbuffered(self):
# Check if in an active query
if not self.unbuffered_active:
return
# EOF
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
self.connection = None
self.rows = None
return
row = self._read_row_from_packet(packet)
self.affected_rows = 1
self.rows = (row,) # rows should tuple of row for MySQL-python compatibility.
return row
def _finish_unbuffered_query(self):
# After much reading on the MySQL protocol, it appears that there is,
# in fact, no way to stop MySQL from sending all the data after
# executing a query, so we just spin, and wait for an EOF packet.
while self.unbuffered_active:
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
self.connection = None # release reference to kill cyclic reference.
def _read_rowdata_packet(self):
"""Read a rowdata packet for each data row in the result set."""
rows = []
while True:
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.connection = None # release reference to kill cyclic reference.
break
rows.append(self._read_row_from_packet(packet))
self.affected_rows = len(rows)
self.rows = tuple(rows)
def _read_row_from_packet(self, packet):
row = []
for encoding, converter in self.converters:
try:
data = packet.read_length_coded_string()
except IndexError:
# No more columns in this row
# See https://github.com/PyMySQL/PyMySQL/pull/434
break
if data is not None:
if encoding is not None:
data = data.decode(encoding)
if DEBUG: print("DEBUG: DATA = ", data)
if converter is not None:
data = converter(data)
row.append(data)
return tuple(row)
def _get_descriptions(self):
"""Read a column descriptor packet for each column in the result."""
self.fields = []
self.converters = []
use_unicode = self.connection.use_unicode
conn_encoding = self.connection.encoding
description = []
for i in range_type(self.field_count):
field = self.connection._read_packet(FieldDescriptorPacket)
self.fields.append(field)
description.append(field.description())
field_type = field.type_code
if use_unicode:
if field_type == FIELD_TYPE.JSON:
# When SELECT from JSON column: charset = binary
# When SELECT CAST(... AS JSON): charset = connection encoding
# This behavior is different from TEXT / BLOB.
# We should decode result by connection encoding regardless charsetnr.
# See https://github.com/PyMySQL/PyMySQL/issues/488
encoding = conn_encoding # SELECT CAST(... AS JSON)
elif field_type in TEXT_TYPES:
if field.charsetnr == 63: # binary
# TEXTs with charset=binary means BINARY types.
encoding = None
else:
encoding = conn_encoding
else:
# Integers, Dates and Times, and other basic data is encoded in ascii
encoding = 'ascii'
else:
encoding = None
converter = self.connection.decoders.get(field_type)
if converter is converters.through:
converter = None
if DEBUG: print("DEBUG: field={}, converter={}".format(field, converter))
self.converters.append((encoding, converter))
eof_packet = self.connection._read_packet()
assert eof_packet.is_eof_packet(), 'Protocol error, expecting EOF'
self.description = tuple(description)
class LoadLocalFile(object):
def __init__(self, filename, connection):
self.filename = filename
self.connection = connection
def send_data(self):
"""Send data packets from the local file to the server"""
if not self.connection._sock:
raise err.InterfaceError("(0, '')")
conn = self.connection
try:
with open(self.filename, 'rb') as open_file:
packet_size = min(conn.max_allowed_packet, 16*1024) # 16KB is efficient enough
while True:
chunk = open_file.read(packet_size)
if not chunk:
break
conn.write_packet(chunk)
except IOError:
raise err.OperationalError(1017, "Can't find file '{0}'".format(self.filename))
finally:
# send the empty packet to signify we are done sending data
conn.write_packet(b'')
| [
"brianmunene69@gmail.com"
] | brianmunene69@gmail.com |
775bc8ad2440dec3fa0750bcca10332e6a975a4f | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-4/16a4c177de3f63055c5f0252c3f8ba202175fb41-<start_merge>-bug.py | 488cafe673b3ea8201fc11c222ab29d021e87ebf | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | def start_merge(self, project_id, previous_group_ids, new_group_id):
if (not previous_group_ids):
return
state = {
'transaction_id': uuid4().hex,
'project_id': project_id,
'previous_group_ids': previous_group_ids,
'new_group_id': new_group_id,
'datetime': datetime.now(tz=pytz.utc),
}
self._send(project_id, 'merge', extra_data=(state,), asynchronous=False) | [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
c1e9f92e53090868a41830a7785c711adfab01bc | d9f63d87a9f7b19d5ee60c5f38e9007687df4078 | /้ขๅๅฏน่ฑก-็ฑปๅๅฏน่ฑก4.py | 6b8af3e544ed5021e3843f440b94064de10669be | [] | no_license | zhouf1234/untitled3 | 4b156046f0fea2c773785cba0486621625004786 | 238c5aaef121f3d716c96290e7e417a9a4a03b4e | refs/heads/master | 2020-05-05T02:36:07.396459 | 2019-04-05T08:27:31 | 2019-04-05T08:27:31 | 179,643,758 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | class Person:
school='้ณๅ
ๅนผๅฟๅญ'
def __init__(self):
self.name='ไธนไธน'
p1=Person()
# ไฝฟ็จๅฏน่ฑก็ๆนๆณ๏ผๅฑๆง๏ผๆถ๏ผๅ
็ๆๆ ๆญคๅฑๆง๏ผๅฆๆๆฒกๆๅ็็ฑปๆๆ ๆญคๅฑๆง
print(p1.school) #้ณๅ
ๅนผๅฟๅญ
# ็ปๅฏน่ฑกๆนschoolๅฑๆงๅ
Person.school='ๅคๅคฉๅฐๅญฆ'
print(p1.school) #ๅคๅคฉๅฐๅญฆ
print()
p2=Person()
print(p2.school) #ๅคๅคฉๅฐๅญฆ | [
"="
] | = |
8440e8250bda5ae92abd0501c1219d37a8251790 | d713770971a0d9e4a77921fa85fd03daf339dd84 | /business_hardcode/build_project/build_project.py | b34832268d919212f956754af2974f20ed2d4dea | [
"Apache-2.0"
] | permissive | laashub/laas-soa | cf9c0403cb25eedc74326752aaa776f501fac9d0 | 63a5e84b646bf1d857e97ddbbc7c1c487a9dc9e4 | refs/heads/master | 2023-01-07T17:44:24.431030 | 2020-11-12T13:35:31 | 2020-11-12T13:35:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,528 | py | """
ๆๅปบ้กน็ฎ
้่ฆไพ่ตไธไบๆฐๆฎ
ๆๅปบๆๅกๅจ
ๆบ็ ไปๅบไฟกๆฏ
้กน็ฎ้
็ฝฎไฟกๆฏ
"""
import datetime
import json
import os
import time
import traceback
from rest.operate.executor import context
local_executor_root_path = os.path.join(os.getcwd(), "business_hardcode/build_project")
remote_executor_root_path = "/data/tristan/1" # ่ฟ็จๆง่กๅจๆ น็ฎๅฝ
# ๅๅคๆฌๅฐ็ฎๅฝ
local_executor_data_data_path = os.path.join(local_executor_root_path, "data_data")
context.prepare_local_dirs([local_executor_data_data_path])
# ๆฌๅฐๆฐๆฎ็ๆฌ่ฎฐๅฝๆไปถ
local_update_datetime_record_path = local_executor_root_path + "/" + "local_update_datetime_record"
def build_project(executor_data_id, data_data_data):
"""
ๆๅปบ้กน็ฎ
:param executor_data_id:
:param data_data_data:
:return:
"""
# ่ฎฐๅฝๅ
จๅฑๆฐๆฎ
context.global_data.executor_data_id = executor_data_id
startup_timestamp = int(time.time())
context.log("ๅฏๅจๆถ้ด: " + str(datetime.datetime.now()))
try:
"""
{'id': 11, 'git_server': '1', 'project_name': 'ไปๅบ็ณป็ป', 'branches': 'master', 'tags': '',
'program_language': 'java', 'docker_registry_id': '1', 'update_datetime': {'$date': 1605035741000},
'create_datetime': {'$date': 1605035741000}, 'repo_path': 'http://git.wjh.com/wms/wms_service'}
"""
# ๆฅ่ฏขๆง่กๅจ
host_build = context.select_data_by_data_id__data_data_id(15, 1)[0] # ๆฅ่ฏขๆๅกๅจ่ฟๆฅไฟกๆฏ
# ่ทๅๆๆฐ็ๆฌ็ๆฐๆฎ, ไฟๅญๆฐๆฎๅฐๆฌๅฐ, ๅๆญฅๆๆฐ็ๆฌ็ๆฐๆฎๅฐๆง่กๅจ็ฎๅฝ
latest_update_datetime_record = ""
"""
data_data:
git_server.json
docker_registry.json
"""
# ๆฅ่ฏข gitๆๅกๅจ
data_data_git_server = context.select_data_by_data_id__data_data_id('5', data_data_data['git_server'])[0]
latest_update_datetime_record += str(data_data_git_server["update_datetime"]) + ";"
# ๆฅ่ฏข docker้ๅไปๅบ
data_data_docker_registry = \
context.select_data_by_data_id__data_data_id('4', data_data_data['docker_registry_id'])[0]
latest_update_datetime_record += str(data_data_docker_registry["update_datetime"]) + ";"
# ๆฅ่ฏข ไปๅบๅฐๅ
local_update_datetime_record = None
if os.path.exists(local_update_datetime_record_path):
with open(local_update_datetime_record_path) as f:
local_update_datetime_record = f.read()
if not local_update_datetime_record or local_update_datetime_record != latest_update_datetime_record:
# ############### ๅๆญฅๆฐๆฎๅฐๆไปถๅฐ่ฟ็จๆๅกๅจ
# ๅๅค่ฟ็จ็ฎๅฝ
context.log(context.declare_remote_dirs(host_build, [remote_executor_root_path]))
context.write_data_data_2_file(data_data_git_server, local_executor_data_data_path + '/git_server.json')
context.write_data_data_2_file(data_data_docker_registry,
local_executor_data_data_path + '/docker_registry.json')
# ่ทๅๆๆฐ็ๆฌ็ไธๅก, ไฟๅญไธๅกๅฐๆฌๅฐ, ๅๆญฅๆๆฐ็ๆฌ็ไธๅกๅฐๆง่กๅจ
"""
business_hyper_fusion:
java:
do_build_project.sh
build_project.sh
clean_build_project.sh
startup.sh
Dockerfile
do_build_docker.sh
clean_build_docker.sh
"""
# ๅๆญฅๆฐๆฎใไธๅก่ๆฌ็ฎๅฝๅฐๆๅกๅจ
context.sync_dirs_2_remote(host_build, local_executor_root_path, remote_executor_root_path,
["data_data", "business_hyper_fusion"])
# ๅๆญฅๅฏๅจๆไปถๅฐๆๅกๅจ
context.sync_files_2_remote(host_build, local_executor_root_path, remote_executor_root_path, ["startup.py"])
with open(local_update_datetime_record_path, 'w')as f:
f.write(latest_update_datetime_record)
# ######ๆฏๆฌกๆง่กๅจ้ฝ้่ฆๅๅปบๆง่ก็ฎๅฝ, ๅนถๅฐๅฏๅจๆฐๆฎๅๅ
ฅๆง่ก็ฎๅฝ็data_data.jsonๆไปถไธญ
remote_executor_run_n_path = remote_executor_root_path + "/run/" + str(executor_data_id)
# ๅๅปบ่ฟๆฌกๆง่กๅจ็่ฟ่ก็ฎๅฝ
context.declare_remote_dirs(host_build, [remote_executor_run_n_path])
# ๅๅ
ฅๅฏๅจๅๆฐ
context.execute_remote_command(host_build, """
sudo cat >> %s<<EOF
%s
EOF
""" % (remote_executor_run_n_path + "/data_data.json", json.dumps(data_data_data, ensure_ascii=False)))
# ๆฏๅฆๅบ่ฏฅ่่ๅฐๅ
ฑไบซๆไปถๆท่ดๅฐ่ชๅทฑ็ๅบๅ???
# ๅฅฝๅคๆฏไปไน? ็ฎๅฝ้ฝ้ฝๅฏไปฅๅจ่ชๅทฑ็็ฎๅฝ, ๅๅคๆฏไปไน, ้่ฆๆท่ดๆไปถ
command = "cd %s && python startup.py -ei %s" % (remote_executor_root_path, executor_data_id)
context.RemoteShell(host_build["ip"], host_build["port"], host_build["username"],
host_build["password"]).execute(command)
# context.ShellHandler(host_build["ip"], host_build["port"], host_build["username"],host_build["password"]).execute(command)
print("=" * 200)
except Exception as e:
traceback.print_exc()
context.log(str(e))
context.log("็ปๆๆถ้ด: " + str(datetime.datetime.now()))
context.log("ๆป่ๆถ: %s ็ง้" + str(int((int(time.time()) - startup_timestamp) / 1000)))
| [
"tanshilinmail@gmail.com"
] | tanshilinmail@gmail.com |
940976f32b9a4bc97574ca4635af3b4154fe20cd | 2e8f6b40cdd1c8d89b5345ab00ea467310eeb90b | /generate/select_tables.py | 12f4600132a6e33c9b1bd0cc843c417db2b24a78 | [
"MIT"
] | permissive | samirgadkari/companies | d028deb88ee6ab46391d5c6d52c455a2846e87cd | f683a3d077ec3d9b7241e9c91e6393b290f80b2e | refs/heads/master | 2021-06-25T01:59:22.878337 | 2021-03-10T02:32:32 | 2021-03-10T02:32:32 | 212,151,854 | 0 | 0 | MIT | 2021-01-14T20:47:04 | 2019-10-01T17:00:49 | Jupyter Notebook | UTF-8 | Python | false | false | 16,986 | py | import os
from utils.file import copy_file
from utils.environ import html_samples_dir
selected_tables = ['file:///Volumes/datadrive/tables-extracted_split-tables/0000036146_TRUSTMARK_CORP/10-k/2018-01-01_2018-12-31_10-K/107.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000036146_TRUSTMARK_CORP/10-k/2018-01-01_2018-12-31_10-K/137.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000036146_TRUSTMARK_CORP/10-k/2013-01-01_2013-12-31_10-K/111.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000036146_TRUSTMARK_CORP/10-k/2013-01-01_2013-12-31_10-K/14.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000064782_MELLON_FINANCIAL_CORP/10-k/2006-01-01_2006-12-31_10-K/23.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000064782_MELLON_FINANCIAL_CORP/10-k/2006-01-01_2006-12-31_10-K/83.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000064782_MELLON_FINANCIAL_CORP/10-k/2006-01-01_2006-12-31_10-K/24.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000090498_SIMMONS_FIRST_NATIONAL_CORP/10-k/2011-01-01_2011-12-31_10-K/109.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000090498_SIMMONS_FIRST_NATIONAL_CORP/10-k/2011-01-01_2011-12-31_10-K/33.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000090498_SIMMONS_FIRST_NATIONAL_CORP/10-k/2016-01-01_2016-12-31_10-K/12.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000090498_SIMMONS_FIRST_NATIONAL_CORP/10-k/2016-01-01_2016-12-31_10-K/152.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000039263_CULLEN_FROST_BANKERS,_INC./10-k/2016-01-01_2016-12-31_10-K/152.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000039263_CULLEN_FROST_BANKERS,_INC./10-k/2016-01-01_2016-12-31_10-K/147.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001475841_National_Bank_Holdings_Corp/10-k/2015-01-01_2015-12-31_10-K/157.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001475841_National_Bank_Holdings_Corp/10-k/2015-01-01_2015-12-31_10-K/15.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001606363_Green_Bancorp,_Inc./10-k/2016-01-01_2016-12-31_10-K/256.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001606363_Green_Bancorp,_Inc./10-k/2016-01-01_2016-12-31_10-K/247.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001606363_Green_Bancorp,_Inc./10-k/2016-01-01_2016-12-31_10-K/99.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001473844_CBTX,_Inc./10-k/2018-01-01_2018-12-31_10-K/111.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001473844_CBTX,_Inc./10-k/2018-01-01_2018-12-31_10-K/110.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001319327_Encore_Bancshares_Inc/10-k/2010-01-01_2010-12-31_10-K/48.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001319327_Encore_Bancshares_Inc/10-k/2010-01-01_2010-12-31_10-K/39.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001393534_Belvedere_SoCal/10-k/2008-01-01_2008-12-31_10-K/50.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001393534_Belvedere_SoCal/10-k/2008-01-01_2008-12-31_10-K/44.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001172102_FIRST_RELIANCE_BANCSHARES_INC/10-k/2012-01-01_2012-12-31_10-K/22.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001172102_FIRST_RELIANCE_BANCSHARES_INC/10-k/2012-01-01_2012-12-31_10-K/100.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001172102_FIRST_RELIANCE_BANCSHARES_INC/10-k/2012-01-01_2012-12-31_10-K/74.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001172102_FIRST_RELIANCE_BANCSHARES_INC/10-k/2012-01-01_2012-12-31_10-K/99.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001279756_SOUTHCREST_FINANCIAL_GROUP_INC/10-k/2007-01-01_2007-12-31_10-K/54.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001279756_SOUTHCREST_FINANCIAL_GROUP_INC/10-k/2007-01-01_2007-12-31_10-K/46.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001279756_SOUTHCREST_FINANCIAL_GROUP_INC/10-k/2007-01-01_2007-12-31_10-K/67.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001145547_GREER_BANCSHARES_INC/10-k/2009-01-01_2009-12-31_10-K/20.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001145547_GREER_BANCSHARES_INC/10-k/2009-01-01_2009-12-31_10-K/14.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001145547_GREER_BANCSHARES_INC/10-k/2009-01-01_2009-12-31_10-K/8.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001145547_GREER_BANCSHARES_INC/10-k/2003-01-01_2003-12-31_10-K/18.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001118237_MBT_FINANCIAL_CORP/10-k/2008-01-01_2008-12-31_10-K/4.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001118237_MBT_FINANCIAL_CORP/10-k/2013-01-01_2013-12-31_10-K/107.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001118237_MBT_FINANCIAL_CORP/10-k/2018-01-01_2018-12-31_10-K/110.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001118237_MBT_FINANCIAL_CORP/10-k/2018-01-01_2018-12-31_10-K-A/5.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001098146_PATRIOT_NATIONAL_BANCORP_INC/10-k/2010-01-01_2010-12-31_10-K-A/2.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001098146_PATRIOT_NATIONAL_BANCORP_INC/10-k/2012-01-01_2012-12-31_10-K/114.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001098146_PATRIOT_NATIONAL_BANCORP_INC/10-k/2016-01-01_2016-12-31_10-K/128.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001102266_CenterState_Bank_Corp/10-k/2009-01-01_2009-12-31_10-K/10.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001102266_CenterState_Bank_Corp/10-k/2009-01-01_2009-12-31_10-K/23.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001102266_CenterState_Bank_Corp/10-k/2017-01-01_2017-12-31_10-K/176.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001102266_CenterState_Bank_Corp/10-k/2017-01-01_2017-12-31_10-K/186.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001099932_CENTRA_FINANCIAL_HOLDINGS_INC/10-k/2008-01-01_2008-12-31_10-K/19.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001099932_CENTRA_FINANCIAL_HOLDINGS_INC/10-k/2008-01-01_2008-12-31_10-K/26.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001094742_MAINSTREET_BANKSHARES_INC/10-k/2013-01-01_2013-12-31_10-K/109.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001094742_MAINSTREET_BANKSHARES_INC/10-k/2013-01-01_2013-12-31_10-K/136.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001098151_FIDELITY_D_&_D_BANCORP_INC/10-k/2018-01-01_2018-12-31_10-K/104.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001098151_FIDELITY_D_&_D_BANCORP_INC/10-k/2018-01-01_2018-12-31_10-K/159.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001098151_FIDELITY_D_&_D_BANCORP_INC/10-k/2018-01-01_2018-12-31_10-K/180.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001084717_PACIFIC_CONTINENTAL_CORP/10-k/2008-01-01_2008-12-31_10-K/20.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001084717_PACIFIC_CONTINENTAL_CORP/10-k/2008-01-01_2008-12-31_10-K/51.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001084717_PACIFIC_CONTINENTAL_CORP/10-k/2008-01-01_2008-12-31_10-K/38.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001060455_KILLBUCK_BANCSHARES_INC/10-k/2007-01-01_2007-12-31_10-K/20.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001060455_KILLBUCK_BANCSHARES_INC/10-k/2011-01-01_2011-12-31_10-K/52.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001058867_GUARANTY_BANCSHARES_INC__TX_/10-k/2003-01-01_2003-12-31_10-K/21.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001058867_GUARANTY_BANCSHARES_INC__TX_/10-k/2003-01-01_2003-12-31_10-K/42.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001058867_GUARANTY_BANCSHARES_INC__TX_/10-k/2003-01-01_2003-12-31_10-K/9.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001038773_SMARTFINANCIAL_INC./10-k/2010-01-01_2010-12-31_10-K/16.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001038773_SMARTFINANCIAL_INC./10-k/2018-01-01_2018-12-31_10-K/101.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001037652_PELICAN_FINANCIAL_INC/10-k/2004-01-01_2004-12-31_10-K/45.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001035713_PROVIDIAN_FINANCIAL_CORP/10-k/2004-01-01_2004-12-31_10-K/24.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0001035713_PROVIDIAN_FINANCIAL_CORP/10-k/2004-01-01_2004-12-31_10-K/10.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000947559_FIRST_BANCSHARES_INC__MS_/10-k/2018-01-01_2018-12-31_10-K/10.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000947559_FIRST_BANCSHARES_INC__MS_/10-k/2018-01-01_2018-12-31_10-K/219.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000947559_FIRST_BANCSHARES_INC__MS_/10-k/2018-01-01_2018-12-31_10-K/215.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000947559_FIRST_BANCSHARES_INC__MS_/10-k/2013-01-01_2013-12-31_10-K/108.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000893467_NORTHWEST_BANCORPORATION_INC/10-k/2012-01-01_2012-12-31_10-K/100.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000875357_BOK_FINANCIAL_CORP_ET_AL/10-k/2014-01-01_2014-12-31_10-K/107.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000875357_BOK_FINANCIAL_CORP_ET_AL/10-k/2014-01-01_2014-12-31_10-K/125.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000875357_BOK_FINANCIAL_CORP_ET_AL/10-k/2018-01-01_2018-12-31_10-K/109.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000914138_MIDDLEBURG_FINANCIAL_CORP/10-k/2010-01-01_2010-12-31_10-K/20.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000914138_MIDDLEBURG_FINANCIAL_CORP/10-k/2016-01-01_2016-12-31_10-K-A/9.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000914138_MIDDLEBURG_FINANCIAL_CORP/10-k/2003-01-01_2003-12-31_10-K/20.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000914138_MIDDLEBURG_FINANCIAL_CORP/10-k/2003-01-01_2003-12-31_10-K/26.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000914138_MIDDLEBURG_FINANCIAL_CORP/10-k/2006-01-01_2006-12-31_10-K/20.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000914138_MIDDLEBURG_FINANCIAL_CORP/10-k/2006-01-01_2006-12-31_10-K/47.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000856223_SUMMIT_FINANCIAL_CORP/10-k/2004-01-01_2004-12-31_10-K/17.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000856223_SUMMIT_FINANCIAL_CORP/10-k/2004-01-01_2004-12-31_10-K/51.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000856223_SUMMIT_FINANCIAL_CORP/10-k/2004-01-01_2004-12-31_10-K/41.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000835012_COMMONWEALTH_BANKSHARES_INC/10-k/2010-01-01_2010-12-31_10-K/19.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000835012_COMMONWEALTH_BANKSHARES_INC/10-k/2010-01-01_2010-12-31_10-K/24.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000835012_COMMONWEALTH_BANKSHARES_INC/10-k/2005-01-01_2005-12-31_10-K/28.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000846617_BRIDGE_BANCORP,_INC./10-k/2018-01-01_2018-12-31_10-K/109.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000846617_BRIDGE_BANCORP,_INC./10-k/2018-01-01_2018-12-31_10-K/107.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000846617_BRIDGE_BANCORP,_INC./10-k/2004-01-01_2004-12-31_10-K/17.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000840256_VINEYARD_NATIONAL_BANCORP/10-k/2005-01-01_2005-12-31_10-K/61.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000803112_SECOND_BANCORP_INC/10-k/2002-01-01_2002-12-31_10-K/23.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000797838_NEFFS_BANCORP_INC/10-k/2010-01-01_2010-12-31_10-K-A/4.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000797838_NEFFS_BANCORP_INC/10-k/2008-01-01_2008-12-31_10-K/13.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000750686_CAMDEN_NATIONAL_CORP/10-k/2010-01-01_2010-12-31_10-K/17.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000750686_CAMDEN_NATIONAL_CORP/10-k/2010-01-01_2010-12-31_10-K/33.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000750686_CAMDEN_NATIONAL_CORP/10-k/2017-01-01_2017-12-31_10-K/106.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000759458_CANANDAIGUA_NATIONAL_CORP/10-k/2003-01-01_2003-12-31_10-K/13.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000759458_CANANDAIGUA_NATIONAL_CORP/10-k/2012-01-01_2012-12-31_10-K/59.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000759458_CANANDAIGUA_NATIONAL_CORP/10-k/2012-01-01_2012-12-31_10-K/71.table-extracted',
'file:///Volumes/datadrive/tables-extracted_split-tables/0000740971_OLD_POINT_FINANCIAL_CORP/10-k/2010-01-01_2010-12-31_10-K/10.table-extracted']
def select_tables():
global selected_tables
def remove_prefix(filename):
return filename[len('file://'):]
selected_tables = map(remove_prefix, selected_tables)
for filename in selected_tables:
parts = filename.split(os.sep)
dest_filename = os.path.join(html_samples_dir(),
'html_input',
'__'.join(parts[4:]))
copy_file(filename, dest_filename)
| [
"samir.gadkari@gmail.com"
] | samir.gadkari@gmail.com |
b7b344929e53f398c8535a2cacd7a38cd91a01ce | 4df1b2f1b5085e73223800ada334c8603b8be87b | /getdata.py | c0bc31df5a7f8447c7897fb0169999f1d245441a | [] | no_license | srejun/Project_Roboduct | 1c34355951cf0670ce513b332a70c8785f6ed154 | b656797d86773e84c0791a57751f8db973ffa5d8 | refs/heads/master | 2022-06-28T10:36:12.636777 | 2020-05-06T11:49:38 | 2020-05-06T11:49:38 | 261,741,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,183 | py | #Libraries
from __future__ import print_function
import RPi.GPIO as GPIO
import time
import xbox
file = open("cleaningdata.txt","w")
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
#set GPIO Pins
GPIO_TRIGGERleft = 12
GPIO_ECHOleft = 16
GPIO_TRIGGERrigth = 24
GPIO_ECHOrigth = 26
#set GPIO direction (IN / OUT)
GPIO.setup(GPIO_TRIGGERleft, GPIO.OUT)
GPIO.setup(GPIO_ECHOleft, GPIO.IN)
GPIO.setup(GPIO_TRIGGERrigth, GPIO.OUT)
GPIO.setup(GPIO_ECHOrigth, GPIO.IN)
w1 = 31
w2 = 33
w3 = 35
w4 = 37
w5 = 5
w6 = 7
servoPINleft = 38
servoPINright = 40
turn = 27 #power of left-right (0-100)
run = 34 #power of forward-backward (0-100)
works = 100
#t=time.sleep(0.2)
GPIO.setup(w1,GPIO.OUT)
GPIO.setup(w2,GPIO.OUT)
GPIO.setup(w3,GPIO.OUT)
GPIO.setup(w4,GPIO.OUT)
GPIO.setup(w5,GPIO.OUT)
GPIO.setup(w6,GPIO.OUT)
GPIO.setup(servoPINleft, GPIO.OUT)
GPIO.setup(servoPINright, GPIO.OUT)
pX = GPIO.PWM(servoPINleft, 50) # GPIO 17 for PWM with 50Hz
pY = GPIO.PWM(servoPINright, 50)
pX.start(8) # Initialization
pY.start(4)
pwm1 = GPIO.PWM(w1,120) # FL
pwm2 = GPIO.PWM(w2,120) # BL
pwm3 = GPIO.PWM(w3,120) # BR
pwm4 = GPIO.PWM(w4,120) # FR
pwm5 = GPIO.PWM(w5,120)
pwm6 = GPIO.PWM(w6,120)
pwm1.start(0)
pwm2.start(0)
pwm3.start(0)
pwm4.start(0)
pwm5.start(0)
pwm6.start(0)
checkword = 'none'
action='none'
def work():
pwm5.ChangeDutyCycle(0)
pwm6.ChangeDutyCycle(works)
def forward():
pwm1.ChangeDutyCycle(0)
pwm2.ChangeDutyCycle(run+2)
pwm3.ChangeDutyCycle(run)
pwm4.ChangeDutyCycle(0)
def backward():
pwm1.ChangeDutyCycle(run)
pwm2.ChangeDutyCycle(0)
pwm3.ChangeDutyCycle(0)
pwm4.ChangeDutyCycle(run)
def left():
pwm1.ChangeDutyCycle(0)
pwm2.ChangeDutyCycle(turn)
pwm3.ChangeDutyCycle(0)
pwm4.ChangeDutyCycle(turn)
def right():
pwm1.ChangeDutyCycle(turn)
pwm2.ChangeDutyCycle(0)
pwm3.ChangeDutyCycle(turn)
pwm4.ChangeDutyCycle(0)
def lefthight():
pwm1.ChangeDutyCycle(0)
pwm2.ChangeDutyCycle(turn*1.5)
pwm3.ChangeDutyCycle(0)
pwm4.ChangeDutyCycle(turn*1.5)
def righthight():
pwm1.ChangeDutyCycle(turn*1.5)
pwm2.ChangeDutyCycle(0)
pwm3.ChangeDutyCycle(turn*1.5)
pwm4.ChangeDutyCycle(0)
def stop():
pwm1.ChangeDutyCycle(0)
pwm2.ChangeDutyCycle(0)
pwm3.ChangeDutyCycle(0)
pwm4.ChangeDutyCycle(0)
# Format floating point number to string format -x.xxx
def fmtFloat(n):
return '{:6.3f}'.format(n)
# Print one or more values without a line feed
def show(*args):
for arg in args:
print(arg, end="")
# Print true or false value based on a boolean, without linefeed
def showIf(boolean, ifTrue, ifFalse=" "):
if boolean:
show(ifTrue)
else:
show(ifFalse)
# Instantiate the controller
joy = xbox.Joystick()
# Show various axis and button states until Back button is pressed
print("Xbox controller sample: Press Back button to exit")
def distanceleft():
# set Trigger to HIGH
GPIO.output(GPIO_TRIGGERleft, True)
# set Trigger after 0.01ms to LOW
time.sleep(0.00001)
GPIO.output(GPIO_TRIGGERleft, False)
StartTime = time.time()
StopTime = time.time()
# save StartTime
while GPIO.input(GPIO_ECHOleft) == 0:
StartTime = time.time()
# save time of arrival
while GPIO.input(GPIO_ECHOleft) == 1:
StopTime = time.time()
# time difference between start and arrival
TimeElapsed = StopTime - StartTime
# multiply with the sonic speed (34300 cm/s)
# and divide by 2, because there and back
distance = (TimeElapsed * 34300) / 2
return distance
def distancerigth():
# set Trigger to HIGH
GPIO.output(GPIO_TRIGGERrigth, True)
# set Trigger after 0.01ms to LOW
time.sleep(0.00001)
GPIO.output(GPIO_TRIGGERrigth, False)
StartTime = time.time()
StopTime = time.time()
# save StartTime
while GPIO.input(GPIO_ECHOrigth) == 0:
StartTime = time.time()
# save time of arrival
while GPIO.input(GPIO_ECHOrigth) == 1:
StopTime = time.time()
# time difference between start and arrival
TimeElapsed = StopTime - StartTime
# multiply with the sonic speed (34300 cm/s)
# and divide by 2, because there and back
distance = (TimeElapsed * 34300) / 2
return distance
if __name__ == '__main__':
#print("Action")
try:
work()
while not joy.Back():
#work()
action='none'
# Show connection status
# show("Connected:")
# showIf(joy.connected(), "Y", "N")
# Left analog stick
# show(" Left X/Y:", fmtFloat(joy.leftX()), "/", fmtFloat(joy.leftY()))
# Right trigger
# show(" RightTrg:", fmtFloat(joy.rightTrigger()))
# A/B/X/Y buttons
show(" Buttons:")
showIf(joy.A(), "A")
# if(joy.A()==1):
showIf(joy.B(), "B")
showIf(joy.X(), "X")
showIf(joy.Y(), "Y")
if(joy.A()==1):
# pX.ChangeDutyCycle(7.2)
# pY.ChangeDutyCycle(4.8)
# show("down")
backward()
time.sleep(1)
stop()
action='backward'
# time.sleep(1)
elif(joy.Y()==1):
# pX.ChangeDutyCycle(9)
# pY.ChangeDutyCycle(3)
# show("up")
forward()
time.sleep(1)
stop()
action='forward'
# time.sleep(1)
elif(joy.X()==1):
left()
time.sleep(1)
stop()
action='left'
elif(joy.B()==1):
right()
time.sleep(1)
stop()
action='right'
# Dpad U/D/L/R
show(" Dpad:")
showIf(joy.dpadUp(), "U")
showIf(joy.dpadDown(), "D")
showIf(joy.dpadLeft(), "L")
showIf(joy.dpadRight(), "R")
if(joy.dpadUp()==1):
# forward()
# time.sleep(1)
# stop()
# action='forward'
pX.ChangeDutyCycle(9)
pY.ChangeDutyCycle(3)
show("up")
elif(joy.dpadDown()==1):
pX.ChangeDutyCycle(8.0)
pY.ChangeDutyCycle(4.0)
show("down")
# backward()
# time.sleep(1)
# stop()
# action='backward'
elif(joy.dpadLeft()==1):
show("")
# left()
# time.sleep(1)
# stop()
# action='left'
elif(joy.dpadRight()==1):
show("")
# right()
# time.sleep(1)
# stop()
# action='right'
# Move cursor back to start of line
show(chr(13))
distleft = distanceleft()
print ("Distanceleft = %.1f cm" % distleft)
distrigth = distancerigth()
print ("Distancerigth = %.1f cm" % distrigth)
print ("action = %s" %action)
time.sleep(2)
file.write("distLeft %.2f\r\n" %distleft)
file.write("distRigth %.2f\r\n" %distrigth)
file.write("action %s\r\n" %action)
# Close out when done
joy.close()
file.close()
# while True:
#
# distleft = distanceleft()
# print ("Measured Distanceleft = %.1f cm" % distleft)
# distrigth = distancerigth()
# print ("Measured Distancerigth = %.1f cm" % distrigth)
# time.sleep(2)
# file.write("distLeft %.2f\r\n" %distleft)
# file.write("distRigth %.2f\r\n" %distrigth)
## time.sleep(2)
# print("Action")
# file.close()
# Reset by pressing CTRL + C
except KeyboardInterrupt:
print("Measurement stopped by User")
GPIO.cleanup()
| [
"noreply@github.com"
] | srejun.noreply@github.com |
bf0840495fc063b35d948fe9b69befd937bd7de7 | d60acaac9e460c5693efe61449667b3c399c53c8 | /algebra/linear/fishercriterion.py | 1c1c14ab2e5666bf05a05221df9b5c7bd15195f6 | [] | no_license | HussainAther/mathematics | 53ea7fb2470c88d674faa924405786ba3b860705 | 6849cc891bbb9ac69cb20dfb13fe6bb5bd77d8c5 | refs/heads/master | 2021-07-22T00:07:53.940786 | 2020-05-07T03:11:17 | 2020-05-07T03:11:17 | 157,749,226 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 900 | py | import numpy as np
"""
We can use dimensionality reduction for linear classification models.
One-dimensional input vector x projected down to one dimension using
y = w^T x
We consider a two-class problem with N1 points of class C1 and N2 points of class C2
so the mean vectors of the two classes aare given by:
m1 = (1/N1) * summation of x_n over class C1 and m2 = (1/N2) times summation of x_n over class C2
Separation of the projected class means lets us choose w (the plane onto which we project)
m2 - m1 = w^T (m2-m1)
such that mk = w^T mk .
Fisher criterion is defined as the ratio of the between-class variance to the
within-class variance given by:
J(w) = (m2-m1)^2 / (s1^2 + s2^2)
in which sk^2 for some k is given by the summation of (yn - mk)^2
for one-dimensional space y
"""
def fisher_criterion(v1, v2):
return abs(np.mean(v1) - np.mean(v2)) / (np.var(v1) + np.var(v2))
| [
"shussainather@gmail.com"
] | shussainather@gmail.com |
0d6f563bf487e50143491c9294e56c9e298e24ec | a7596165a29e5186bc6c4718e3b6e835939b105d | /apps/pig/src/pig/views.py | 47823c4bb576f890292573687f7d79887416ac0b | [
"Apache-2.0"
] | permissive | lockhart39/HueQualityAndIngestionApp | f0c778665f0fbe699ec30e0df5e9f3ed8a9c3384 | c75e55a43a8bdeb7aa0f5bf2101ec72b01dcac1c | refs/heads/master | 2021-08-20T00:31:29.481333 | 2017-11-27T19:22:16 | 2017-11-27T19:22:16 | 112,237,923 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,542 | py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import ensure_csrf_cookie
from desktop.lib.django_util import JsonResponse, render
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.rest.http_client import RestException
from desktop.models import Document
from oozie.views.dashboard import show_oozie_error, check_job_access_permission,\
check_job_edition_permission
from pig import api
from pig.management.commands import pig_setup
from pig.models import get_workflow_output, hdfs_link, PigScript,\
create_or_update_script, get_scripts
LOG = logging.getLogger(__name__)
@ensure_csrf_cookie
def app(request):
autocomplete_base_url = ''
try:
autocomplete_base_url = reverse('beeswax:api_autocomplete_databases', kwargs={}) + '/'
except:
LOG.exception('failed to find autocomplete base url')
return render('app.mako', request, {
'autocomplete_base_url': autocomplete_base_url,
})
def scripts(request):
return JsonResponse(get_scripts(request.user, is_design=True), safe=False)
@show_oozie_error
def dashboard(request):
pig_api = api.get(request.fs, request.jt, request.user)
jobs = pig_api.get_jobs()
hue_jobs = Document.objects.available(PigScript, request.user, with_history=True)
massaged_jobs = pig_api.massaged_jobs_for_json(request, jobs, hue_jobs)
return JsonResponse(massaged_jobs, safe=False)
def save(request):
if request.method != 'POST':
raise PopupException(_('POST request required.'))
attrs = {
'id': request.POST.get('id'),
'name': request.POST.get('name'),
'script': request.POST.get('script'),
'user': request.user,
'parameters': json.loads(request.POST.get('parameters')),
'resources': json.loads(request.POST.get('resources')),
'hadoopProperties': json.loads(request.POST.get('hadoopProperties')),
}
pig_script = create_or_update_script(**attrs)
pig_script.is_design = True
pig_script.save()
response = {
'id': pig_script.id,
'docId': pig_script.doc.get().id
}
return JsonResponse(response, content_type="text/plain")
@show_oozie_error
def stop(request):
if request.method != 'POST':
raise PopupException(_('POST request required.'))
pig_script = PigScript.objects.get(id=request.POST.get('id'))
job_id = pig_script.dict['job_id']
job = check_job_access_permission(request, job_id)
check_job_edition_permission(job, request.user)
try:
api.get(request.fs, request.jt, request.user).stop(job_id)
except RestException, e:
raise PopupException(_("Error stopping Pig script.") % e.message)
return watch(request, job_id)
@show_oozie_error
def run(request):
if request.method != 'POST':
raise PopupException(_('POST request required.'))
attrs = {
'id': request.POST.get('id'),
'name': request.POST.get('name'),
'script': request.POST.get('script'),
'user': request.user,
'parameters': json.loads(request.POST.get('parameters')),
'resources': json.loads(request.POST.get('resources')),
'hadoopProperties': json.loads(request.POST.get('hadoopProperties')),
'is_design': False
}
pig_script = create_or_update_script(**attrs)
params = request.POST.get('submissionVariables')
oozie_id = api.get(request.fs, request.jt, request.user).submit(pig_script, params)
pig_script.update_from_dict({'job_id': oozie_id})
pig_script.save()
response = {
'id': pig_script.id,
'watchUrl': reverse('pig:watch', kwargs={'job_id': oozie_id}) + '?format=python'
}
return JsonResponse(response, content_type="text/plain")
def copy(request):
if request.method != 'POST':
raise PopupException(_('POST request required.'))
pig_script = PigScript.objects.get(id=request.POST.get('id'))
doc = pig_script.doc.get()
try:
doc.can_read_or_exception(request.user)
except Exception, e:
raise PopupException(e)
existing_script_data = pig_script.dict
owner = request.user
name = existing_script_data["name"] + _(' (Copy)')
script = existing_script_data["script"]
parameters = existing_script_data["parameters"]
resources = existing_script_data["resources"]
hadoopProperties = existing_script_data["hadoopProperties"]
script_copy = PigScript.objects.create(owner=owner)
script_copy.update_from_dict({
'name': name,
'script': script,
'parameters': parameters,
'resources': resources,
'hadoopProperties': hadoopProperties
})
script_copy.save()
copy_doc = doc.copy(content_object=script_copy, name=name, owner=owner)
response = {
'id': script_copy.id,
'docId': copy_doc.id,
'name': name,
'script': script,
'parameters': parameters,
'resources': resources,
'hadoopProperties': hadoopProperties
}
return JsonResponse(response, content_type="text/plain")
def delete(request):
if request.method != 'POST':
raise PopupException(_('POST request required.'))
ids = request.POST.get('ids').split(",")
for script_id in ids:
try:
pig_script = PigScript.objects.get(id=script_id)
pig_script.can_edit_or_exception(request.user)
pig_script.doc.all().delete()
pig_script.delete()
except:
LOG.exception('failed to delete pig script')
None
response = {
'ids': ids,
}
return JsonResponse(response, content_type="text/plain")
@show_oozie_error
def watch(request, job_id):
oozie_workflow = check_job_access_permission(request, job_id)
logs, workflow_actions, is_really_done = api.get(request.fs, request.jt, request.user).get_log(request, oozie_workflow)
output = get_workflow_output(oozie_workflow, request.fs)
workflow = {
'job_id': oozie_workflow.id,
'status': oozie_workflow.status,
'progress': oozie_workflow.get_progress(),
'isRunning': oozie_workflow.is_running(),
'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id': oozie_workflow.id, 'action': 'kill'}),
'rerunUrl': reverse('oozie:rerun_oozie_job', kwargs={'job_id': oozie_workflow.id, 'app_path': oozie_workflow.appPath}),
'actions': workflow_actions
}
response = {
'workflow': workflow,
'logs': logs,
'isReallyDone': is_really_done,
'output': hdfs_link(output)
}
return JsonResponse(response, content_type="text/plain")
def install_examples(request):
result = {'status': -1, 'message': ''}
if request.method != 'POST':
result['message'] = _('A POST request is required.')
else:
try:
pig_setup.Command().handle_noargs()
result['status'] = 0
except Exception, e:
LOG.exception(e)
result['message'] = str(e)
return JsonResponse(result)
| [
"cloudera@quickstart.cloudera"
] | cloudera@quickstart.cloudera |
8af064ef0d7490610f6c59dfd4002054ce1eda91 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_27094.py | ca9e4b98345e5ed3db4156dcb2812fcc628ce499 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | # Save full text of a tweet with tweepy
retweeted_status
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
5148b36fb7b804d585edaef072685b6c32aa6ce1 | 63e8a1c42aad04fa471d5dc92ee2308b511bf33c | /hotel/hotel/settings.py | a1964374a2b5d3a9b27a53f7058b5804bdf5645e | [] | no_license | sampathkumar0511/new_proj | 7a5dfcaf0aaa7f27b50a4d1d11012587d2818221 | 67944367d35da3639e35786d3d9842d97af2c3dd | refs/heads/main | 2023-01-14T12:37:15.549711 | 2020-11-21T16:47:10 | 2020-11-21T16:47:10 | 312,472,762 | 0 | 0 | null | 2020-11-21T16:47:11 | 2020-11-13T04:26:55 | Python | UTF-8 | Python | false | false | 3,073 | py | """
Django settings for hotel project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'zlwnxe%ypi2m0z-5=(l4zj^v)-l1o%5h1*5kwa7ogs_+a4t&8v'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'recipe',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'hotel.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hotel.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"sampath@sampaths-MacBook-Pro.local"
] | sampath@sampaths-MacBook-Pro.local |
187bd2a6ff0bfea7ed5629278eea007adedb4d97 | 54d3a1558a4bd38888d4d51f1ae2d2699965087c | /exa.py | 59f998f63b4e4f8e21e59e08b9035fd514853656 | [] | no_license | A8IK/Python-2 | a86843c6ccfe23d42faebb020307351a108075bd | 538aee64bac73110cd0a8ac74747c9d2fa485149 | refs/heads/main | 2023-01-21T12:42:51.226144 | 2020-12-04T18:14:32 | 2020-12-04T18:14:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 43 | py | def div(a,b):
print(a/b)
div(4,2)
| [
"noreply@github.com"
] | A8IK.noreply@github.com |
c1b71ce4bf116be38058532866d68049bfa605b1 | 88ea6ae5a8f97e3771490583d8acecdbe2877fd8 | /zips/plugin.video.vistatv-ini-maker/main.py | 773a4185cc39459dd2f2a721e93b53361a46dfec | [] | no_license | staycanuca/PersonalDataVistaTV | 26497a29e6f8b86592609e7e950d6156aadf881c | 4844edbfd4ecfc1d48e31432c39b9ab1b3b1a222 | refs/heads/master | 2021-01-25T14:46:25.763952 | 2018-03-03T10:48:06 | 2018-03-03T10:48:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,546 | py | from xbmcswift2 import Plugin
from xbmcswift2 import actions
import xbmc,xbmcaddon,xbmcvfs,xbmcgui
import re
from rpc import RPC
import requests
import random
import sqlite3
from datetime import datetime,timedelta
import time
#import urllib
import HTMLParser
import xbmcplugin
#import xml.etree.ElementTree as ET
#import sqlite3
import os
#import shutil
#from rpc import RPC
from types import *
plugin = Plugin()
big_list_view = False
def log2(v):
xbmc.log(repr(v))
def log(v):
xbmc.log(re.sub(',',',\n',repr(v)))
def get_icon_path(icon_name):
addon_path = xbmcaddon.Addon().getAddonInfo("path")
return os.path.join(addon_path, 'resources', 'img', icon_name+".png")
def remove_formatting(label):
label = re.sub(r"\[/?[BI]\]",'',label)
label = re.sub(r"\[/?COLOR.*?\]",'',label)
return label
@plugin.route('/addon/<id>')
def addon(id):
addon = plugin.get_storage(id)
items = []
for name in sorted(addon):
url = addon[name]
items.append(
{
'label': name,
'path': url,
'thumbnail':get_icon_path('tv'),
'is_playable':True,
})
return items
@plugin.route('/player')
def player():
if not plugin.get_setting('addons.folder'):
dialog = xbmcgui.Dialog()
dialog.notification("Echo INI Creator", "Set Folder",xbmcgui.NOTIFICATION_ERROR )
xbmcaddon.Addon ('plugin.video.vistatv-ini-maker').openSettings()
addons = plugin.get_storage("addons")
for a in addons.keys():
add = plugin.get_storage(a)
add.clear()
addons.clear()
folder = plugin.get_setting("addons.folder")
file = plugin.get_setting("addons.file")
filename = os.path.join(folder,file)
f = xbmcvfs.File(filename,"rb")
lines = f.read().splitlines()
addon = None
for line in lines:
if line.startswith('['):
a = line.strip('[]')
addons[a] = a
addon = plugin.get_storage(a)
addon.clear()
elif "=" in line:
(name,url) = line.split('=',1)
if url and addon is not None:
addon[name] = url
items = []
for id in sorted(addons):
items.append(
{
'label': id,
'path': plugin.url_for('addon',id=id),
'thumbnail':get_icon_path('tv'),
})
return items
@plugin.route('/play/<url>')
def play(url):
xbmc.executebuiltin('PlayMedia(%s)' % url)
@plugin.route('/pvr_subscribe')
def pvr_subscribe():
plugin.set_setting("pvr.subscribe","true")
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/pvr_unsubscribe')
def pvr_unsubscribe():
plugin.set_setting("pvr.subscribe","false")
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/add_folder/<id>/<path>')
def add_folder(id,path):
folders = plugin.get_storage('folders')
#ids = plugin.get_storage('ids')
folders[path] = id
#ids[id] = id
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/remove_folder/<id>/<path>')
def remove_folder(id,path):
folders = plugin.get_storage('folders')
del folders[path]
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/clear')
def clear():
folders = plugin.get_storage('folders')
folders.clear()
@plugin.route('/folder/<id>/<path>')
def folder(id,path):
folders = plugin.get_storage('folders')
response = RPC.files.get_directory(media="files", directory=path, properties=["thumbnail"])
files = response["files"]
dirs = dict([[remove_formatting(f["label"]), f["file"]] for f in files if f["filetype"] == "directory"])
links = {}
thumbnails = {}
for f in files:
if f["filetype"] == "file":
label = remove_formatting(f["label"])
file = f["file"]
while (label in links):
label = "%s." % label
links[label] = file
thumbnails[label] = f["thumbnail"]
items = []
for label in sorted(dirs):
path = dirs[label]
context_items = []
if path in folders:
fancy_label = "[COLOR red][B]%s[/B][/COLOR] " % label
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Unsubscribe', 'XBMC.RunPlugin(%s)' % (plugin.url_for(remove_folder, id=id, path=path))))
else:
fancy_label = "[B]%s[/B]" % label
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Subscribe', 'XBMC.RunPlugin(%s)' % (plugin.url_for(add_folder, id=id, path=path))))
items.append(
{
'label': fancy_label,
'path': plugin.url_for('folder',id=id, path=path),
'thumbnail': get_icon_path('tv'),
'context_menu': context_items,
})
for label in sorted(links):
items.append(
{
'label': label,
'path': plugin.url_for('play',url=links[label]),
'thumbnail': thumbnails[label],
})
return items
@plugin.route('/pvr')
def pvr():
index = 0
urls = []
channels = {}
for group in ["radio","tv"]:
urls = urls + xbmcvfs.listdir("pvr://channels/%s/All channels/" % group)[1]
for group in ["radio","tv"]:
groupid = "all%s" % group
json_query = RPC.PVR.get_channels(channelgroupid=groupid, properties=[ "thumbnail", "channeltype", "hidden", "locked", "channel", "lastplayed", "broadcastnow" ] )
if "channels" in json_query:
for channel in json_query["channels"]:
channelname = channel["label"]
channelid = channel["channelid"]-1
channellogo = channel['thumbnail']
streamUrl = urls[index]
index = index + 1
url = "pvr://channels/%s/All channels/%s" % (group,streamUrl)
channels[url] = channelname
items = []
for url in sorted(channels, key=lambda x: channels[x]):
name = channels[url]
items.append(
{
'label': name,
'path': url,
'is_playable': True,
})
return items
@plugin.route('/subscribe')
def subscribe():
folders = plugin.get_storage('folders')
ids = {}
for folder in folders:
id = folders[folder]
ids[id] = id
all_addons = []
for type in ["xbmc.addon.video", "xbmc.addon.audio"]:
response = RPC.addons.get_addons(type=type,properties=["name", "thumbnail"])
if "addons" in response:
found_addons = response["addons"]
all_addons = all_addons + found_addons
seen = set()
addons = []
for addon in all_addons:
if addon['addonid'] not in seen:
addons.append(addon)
seen.add(addon['addonid'])
items = []
pvr = plugin.get_setting('pvr.subscribe')
context_items = []
label = "PVR"
if pvr == "true":
fancy_label = "[COLOR red][B]%s[/B][/COLOR] " % label
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Unsubscribe', 'XBMC.RunPlugin(%s)' % (plugin.url_for(pvr_unsubscribe))))
else:
fancy_label = "[B]%s[/B]" % label
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Subscribe', 'XBMC.RunPlugin(%s)' % (plugin.url_for(pvr_subscribe))))
items.append(
{
'label': fancy_label,
'path': plugin.url_for('pvr'),
'thumbnail':get_icon_path('tv'),
'context_menu': context_items,
})
addons = sorted(addons, key=lambda addon: remove_formatting(addon['name']).lower())
for addon in addons:
label = remove_formatting(addon['name'])
id = addon['addonid']
path = "plugin://%s" % id
context_items = []
if id in ids:
fancy_label = "[COLOR red][B]%s[/B][/COLOR] " % label
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Unsubscribe', 'XBMC.RunPlugin(%s)' % (plugin.url_for(remove_folder, id=id, path=path))))
else:
fancy_label = "[B]%s[/B]" % label
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Subscribe', 'XBMC.RunPlugin(%s)' % (plugin.url_for(add_folder, id=id, path=path))))
items.append(
{
'label': fancy_label,
'path': plugin.url_for('folder',id=id, path=path),
'thumbnail': get_icon_path('tv'),
'context_menu': context_items,
})
return items
@plugin.route('/update')
def update():
if not plugin.get_setting('addons.folder'):
dialog = xbmcgui.Dialog()
dialog.notification("Echo INI Creator", "Set Folder",xbmcgui.NOTIFICATION_ERROR )
xbmcaddon.Addon ('plugin.video.vistatv-ini-maker').openSettings()
folders = plugin.get_storage('folders')
streams = {}
for folder in folders:
log("[plugin.video.vistatv-ini-maker] " + folder)
path = folder
id = folders[folder]
if not id in streams:
streams[id] = {}
response = RPC.files.get_directory(media="files", directory=path, properties=["thumbnail"])
if not 'error' in response:
files = response["files"]
links = {}
thumbnails = {}
for f in files:
if f["filetype"] == "file":
label = remove_formatting(f["label"])
file = f["file"]
while (label in links):
label = "%s." % label
links[label] = file
thumbnails[label] = f["thumbnail"]
streams[id][label] = file
if plugin.get_setting("pvr.subscribe") == "true":
streams["plugin.video.vistatv-ini-maker"] = {}
items = pvr()
for item in items:
name = item["label"]
url = item["path"]
streams["plugin.video.vistatv-ini-maker"][name] = url
folder = plugin.get_setting("addons.folder")
file = plugin.get_setting("addons.file")
filename = os.path.join(folder,file)
f = xbmcvfs.File(filename,"wb")
# steams contains all the addon ids of the addons you are generating i.e plugin.video.sportie
for id in sorted(streams):
# make a line that contains the plugin to a line before all the channels i.e [plugin.video.sportie]
line = "[%s]\n" % id
# write that line to the ini file.
f.write(line.encode("utf8"))
# make the word channels contain all the streams from said addon.
channels = streams[id]
# for each channel in the addon. i.e bbc one
for channel in sorted(channels):
# Grab the URL to the channel from the list
url = channels[channel]
# make a list called naughty that contains all the funny characters, all within "" and seperated by a comma.
naughty = [":","!",'"',"$","%","^","&","*","(",")","-","_","=","+","[","]","{","}","#","~","@",";",":","/","?",".",">",",","<","|",","]
# go through every item in the list. So in the first instance item would become :
for item in naughty:
# Check if that character exists in the channel name, if so replace it with a space.
channel = channel.replace(item,' ')
# Strip all whitespace from the beggining of the channel name (AKA Remove all spaces)
channel=channel.lstrip()
# Strip all whitespace from the end of the channel name (AKA Remove all spaces)
channel=channel.rstrip()
# Check if there are any double spaces in the channel name and replace them with a single space.
while " " in channel:
# Replace double spaces with single spaces.
channel = channel.replace(" "," ")
#Check if the length of the channel name is one or more characters.
if len(channel) >= 1:
# If so make the line to conatin the channel anme and url, you can see the = below, channel before the = url after.
line = "%s=%s\n" % (channel,url)
#write the line to the ini file.
f.write(line.encode("utf8"))
#Close the file.
f.close()
xbmcgui.Dialog().notification("Echo INI Creator", "Finished Update")
@plugin.route('/search/<what>')
def search(what):
if not what:
return
addons = plugin.get_storage("addons")
folder = plugin.get_setting("addons.folder")
file = plugin.get_setting("addons.file")
filename = os.path.join(folder,file)
f = xbmcvfs.File(filename,"rb")
lines = f.read().splitlines()
addon = None
for line in lines:
if line.startswith('['):
a = line.strip('[]')
addons[a] = a
addon = plugin.get_storage(a)
addon.clear()
elif "=" in line:
(name,url) = line.split('=',1)
if url and addon is not None:
addon[name] = url
items = []
for a in addons.keys():
add = plugin.get_storage(a)
log2(add.keys())
exact = [x for x in add.keys() if x.lower() == what.lower()]
log2(exact)
partial = [x for x in add.keys() if what.lower() in x.lower()]
ignore_space = [x for x in add.keys() if re.sub(' ','',what).lower() in re.sub(' ','',x).lower()]
found = exact + partial
for f in sorted(set(exact)):
items.append({
"label": "[COLOR green]%s [%s][/COLOR]" % (f,a),
"path" : add[f],
"is_playable" : True,
})
for f in sorted(set(partial)-set(exact)):
items.append({
"label": "[COLOR orange]%s [%s][/COLOR]" % (f,a),
"path" : add[f],
"is_playable" : True,
})
for f in sorted(set(ignore_space)-set(partial)-set(exact)):
items.append({
"label": "[COLOR red]%s [%s][/COLOR]" % (f,a),
"path" : add[f],
"is_playable" : True,
})
return items
@plugin.route('/search_dialog')
def search_dialog():
dialog = xbmcgui.Dialog()
what = dialog.input("Search")
if what:
return search(what)
@plugin.route('/add_channel')
def add_channel():
channels = plugin.get_storage('channels')
d = xbmcgui.Dialog()
channel = d.input("Add Channel")
if channel:
channels[channel] = ""
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/remove_channel')
def remove_channel():
channels = plugin.get_storage('channels')
channel_list = sorted(channels)
d = xbmcgui.Dialog()
which = d.select("Remove Channel",channel_list)
if which == -1:
return
channel = channel_list[which]
del channels[channel]
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/remove_this_channel/<channel>')
def remove_this_channel(channel):
channels = plugin.get_storage('channels')
del channels[channel]
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/clear_channels')
def clear_channels():
channels = plugin.get_storage('channels')
channels.clear()
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/import_channels')
def import_channels():
channels = plugin.get_storage('channels')
d = xbmcgui.Dialog()
filename = d.browse(1, 'Import Channels', 'files', '', False, False, 'special://home/')
if not filename:
return
if filename.endswith('.ini'):
lines = xbmcvfs.File(filename,'rb').read().splitlines()
for line in lines:
if not line.startswith('[') and not line.startswith('#') and "=" in line:
channel_url = line.split('=',1)
if len(channel_url) == 2:
name = channel_url[0]
channels[name] = ""
xbmc.executebuiltin('Container.Refresh')
@plugin.route('/stream_search/<channel>')
def stream_search(channel):
#folders = plugin.get_storage('folders')
streams = {}
folder = plugin.get_setting("addons.folder")
file = plugin.get_setting("addons.file")
filename = os.path.join(folder,file)
f = xbmcvfs.File(filename,"rb")
lines = f.read().splitlines()
for line in lines:
if line.startswith('['):
addon = line.strip('[]')
if addon not in streams:
streams[addon] = {}
elif "=" in line:
(name,url) = line.split('=',1)
if url and addon is not None:
streams[addon][url] = name
channel_search = channel.lower().replace(' ','')
stream_list = []
for id in sorted(streams):
files = streams[id]
for f in sorted(files, key=lambda k: files[k]):
label = files[f]
label_search = label.lower().replace(' ','')
if label_search in channel_search or channel_search in label_search:
stream_list.append((id,f,label))
labels = ["[%s] %s" % (x[0],x[2]) for x in stream_list]
d = xbmcgui.Dialog()
which = d.select(channel, labels)
if which == -1:
return
stream_name = stream_list[which][2]
stream_link = stream_list[which][1]
plugin.set_resolved_url(stream_link)
@plugin.route('/export_channels')
def export_channels():
channels = plugin.get_storage('channels')
f = xbmcvfs.File('special://profile/addon_data/plugin.video.vistatv-ini-maker/export.ini','wb')
for channel in sorted(channels):
url = plugin.url_for('stream_search',channel=channel)
channel = channel.replace(':','')
s = "%s=%s\n" % (channel,url)
f.write(s)
f.close()
@plugin.route('/channel_player')
def channel_player():
channels = plugin.get_storage("channels")
items = []
for channel in sorted(channels):
context_items = []
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Add Channel', 'XBMC.RunPlugin(%s)' % (plugin.url_for(add_channel))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Remove Channel', 'XBMC.RunPlugin(%s)' % (plugin.url_for(remove_this_channel, channel=channel))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Import Channels', 'XBMC.RunPlugin(%s)' % (plugin.url_for(import_channels))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Export Channels', 'XBMC.RunPlugin(%s)' % (plugin.url_for(export_channels))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Clear Channels', 'XBMC.RunPlugin(%s)' % (plugin.url_for(clear_channels))))
items.append(
{
'label': channel,
'path': plugin.url_for('stream_search',channel=channel),
'thumbnail':get_icon_path('tv'),
'is_playable': True,
'context_menu': context_items,
})
return items
@plugin.route('/')
def index():
items = []
context_items = []
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Clear Subscriptions', 'XBMC.RunPlugin(%s)' % (plugin.url_for(clear))))
items.append(
{
'label': "[COLOR red]Subscribe[/COLOR]",
'path': plugin.url_for('subscribe'),
'thumbnail':get_icon_path('tv'),
'context_menu': context_items,
})
items.append(
{
'label': "[COLOR green]Create[/COLOR]",
'path': plugin.url_for('update'),
'thumbnail':get_icon_path('tv'),
})
items.append(
{
'label': "Play",
'path': plugin.url_for('player'),
'thumbnail':get_icon_path('tv'),
})
context_items = []
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Add Channel', 'XBMC.RunPlugin(%s)' % (plugin.url_for(add_channel))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Remove Channel', 'XBMC.RunPlugin(%s)' % (plugin.url_for(remove_channel))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Import Channels', 'XBMC.RunPlugin(%s)' % (plugin.url_for(import_channels))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Export Channels', 'XBMC.RunPlugin(%s)' % (plugin.url_for(export_channels))))
context_items.append(("[COLOR red][B]%s[/B][/COLOR] " % 'Clear Channels', 'XBMC.RunPlugin(%s)' % (plugin.url_for(clear_channels))))
items.append(
{
'label': "Channels",
'path': plugin.url_for('channel_player'),
'thumbnail':get_icon_path('tv'),
'context_menu': context_items,
})
return items
if __name__ == '__main__':
plugin.run()
if big_list_view == True:
view_mode = int(plugin.get_setting('view_mode'))
plugin.set_view_mode(view_mode) | [
"biglad@mgawow.co.uk"
] | biglad@mgawow.co.uk |
5df953e7136216e7adfa597079d091686b4fa538 | deb97b21457bc360563e09c7bbba235cdd915548 | /gitkit/commands/del_merged.py | de55050ed183a4ab19f91ae4bcc81325227a18e2 | [
"MIT"
] | permissive | akx/git-kit | e381ae5516a6f36f39d72af00e93aa5d4f0e985f | 8084d99c6a113aad56764b0907d157c6957a3977 | refs/heads/master | 2023-07-19T20:16:27.358018 | 2023-07-18T07:49:41 | 2023-07-18T07:49:41 | 22,340,212 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 556 | py | import click
from gitkit.conf import sacred_branches
from gitkit.util.refs import get_main_branch
from gitkit.util.shell import get_lines, run
@click.command()
@click.argument("ref", required=False, default=None)
def del_merged(ref):
"""
Delete merged branches.
"""
if not ref:
ref = get_main_branch()
for branch in set(get_lines(["git", "branch", "-l", "--merged", ref])):
branch = branch.strip("* ")
if branch != ref and branch not in sacred_branches:
run(["git", "branch", "-v", "-d", branch])
| [
"akx@iki.fi"
] | akx@iki.fi |
47ecc56f687300df5c2ad1bba94ec239449193de | d81c04e592aec9d9bb8ad48a7fe3c8d446852f17 | /StateMachine.py | 84fb8efa974ac75cbd20244b2befa59a77073f14 | [] | no_license | Swia/movingrepo | e5e141d49dbde243572d0fe0211c131d18812e6d | 18aeee5b3adc64ad151c70bb40512c96e2f1316f | refs/heads/main | 2023-05-06T12:59:06.456699 | 2021-05-25T14:30:13 | 2021-05-25T14:30:13 | 370,630,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 359 | py | # -*- coding: utf-8 -*-
from enum import Enum
class States(Enum):
"""
A simple state machine to easier navigation through @decorator functions
in alarms_bot.py
"""
STATE_START = 0
STATE_NEWALARM = 1
STATE_SETTING_TIMEZONE_SEPARATE = 2
STATE_SETTING_TIMEZONE_FOR_ALARM = 3
STATE_SETTING_TIME = 4
STATE_SETTING_TEXT = 5 | [
"noreply@github.com"
] | Swia.noreply@github.com |
c703a262839b247143130d0cf69dd4626cb5d5ff | a63590f247d914b6993f4e72a5c27a439344d12a | /env/lib/python3.7/io.py | 062f32ae1bab0a72f1d55ace8c1184b6d81bdb8e | [] | no_license | wgcv/Social-Media-Analyze-Election-Guayaquil | e6c65e68e6f54a11aadad9d1765568521df9a20e | 784e6e4c94552307fefdf85367bb6a793ae878c3 | refs/heads/master | 2020-05-09T20:33:58.585077 | 2019-05-11T16:46:43 | 2019-05-11T16:46:43 | 181,410,783 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41 | py | /Users/wgcv/anaconda3/lib/python3.7/io.py | [
"gstavocevallos@gmail.com"
] | gstavocevallos@gmail.com |
562d6b667658cc8ca7127a940db800debd92f225 | eccc9f30b406903761c85fa8edf239b809805cf0 | /listings/migrations/0001_initial.py | 5c2e992fd3779b68d2c904bb4d8a262cd3107f4f | [] | no_license | InnaAndreeva/real_estate_django | 40f9510155476f7e4ea135f520112539f2845f89 | 90f9414d76c901c73c412335ebca39610040466a | refs/heads/main | 2023-01-14T05:51:47.931878 | 2020-11-24T10:32:05 | 2020-11-24T10:32:05 | 315,594,612 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,161 | py | # Generated by Django 2.1.7 on 2019-03-25 18:38
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('realtors', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Listing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('address', models.CharField(max_length=200)),
('city', models.CharField(max_length=100)),
('state', models.CharField(max_length=100)),
('zipcode', models.CharField(max_length=20)),
('description', models.TextField(blank=True)),
('price', models.IntegerField()),
('badrooms', models.IntegerField()),
('bathrooms', models.DecimalField(decimal_places=1, max_digits=2)),
('garage', models.IntegerField(default=0)),
('sqft', models.IntegerField()),
('lot_size', models.DecimalField(decimal_places=1, max_digits=5)),
('photo_main', models.ImageField(upload_to='photos/%Y/%m/%d')),
('photo_1', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('photo_2', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('photo_3', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('photo_4', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('photo_5', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('photo_6', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('is_published', models.BooleanField(default=True)),
('list_date', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('realtor', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='realtors.Realtor')),
],
),
]
| [
"innaandreeva17yo@gmail.com"
] | innaandreeva17yo@gmail.com |
1b29e17738ce8017bc364637474cffdf18602a34 | 2ad64e7398057a09c2a5b8543199f3781c515e45 | /Pilha.py | 1e39a66b1b69ab75517b19e2d8a114afc785b3a4 | [] | no_license | edinhograno/provadepython | e56281657d0b27d0ecf327ab5befde12323a9075 | 55ab15c451a760dbfef0afa561b1bdea5c66186d | refs/heads/master | 2023-06-25T20:08:19.564672 | 2021-07-07T13:43:11 | 2021-07-07T13:43:11 | 383,813,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 820 | py | from Livros import Livro
class Pilha:
def __init__(self):
self.top = None
self._size = 0
def adiciona(self, id, nome, autor):
node = Livro(id, nome, autor)
node.next = self.top
self.top = node
self._size = self._size + 1
def remove(self):
if self._size > 0:
node = self.top
self.top = self.top.next
self._size = self._size - 1
return node.titulo
raise IndexError("A pilha estรก vazia")
def imprimir(self):
linha = "========="
hook = ""
pointer = self.top
while(pointer):
hook = hook + str(f" \n Id: {pointer.id} \n Titulo: {pointer.titulo} \n Autor: {pointer.autor.nome}\n {linha}") + "\n"
pointer = pointer.next
return hook | [
"granomotorista@gmail.com"
] | granomotorista@gmail.com |
ca295de07a553fcbc33476193a590c0edf04cefc | 64310ffff77de9878f4a51e8e1c74ae6e796a79c | /external/gnuradio/gfsk_rx.py | af6cdc083decc97bba9b1aee10931101a178d2e3 | [] | no_license | learning-lte/gnuradio-modem-gmsk | f9849f35dadc95f145d92a67a28d42fd6939093d | dab60f749f39466ca8708a693b41fdbee4603d7b | refs/heads/master | 2022-01-19T21:02:59.480389 | 2019-04-29T18:08:20 | 2019-04-29T18:08:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54,621 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: GFSK Receiver
# GNU Radio version: 3.7.13.5
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from PyQt4 import Qt
from gnuradio import analog
from gnuradio import blocks
from gnuradio import digital
from gnuradio import eng_notation
from gnuradio import filter
from gnuradio import gr
from gnuradio import iio
from gnuradio import qtgui
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from gnuradio.qtgui import Range, RangeWidget
from grc_gnuradio import blks2 as grc_blks2
from optparse import OptionParser
import correctiq
import math
import satellites
import sip
import sys
from gnuradio import qtgui
class gfsk_rx(gr.top_block, Qt.QWidget):
def __init__(self, default_bandwidth=20e3, default_baud=9600, default_bin_file_sink="/tmp/rx_data.bin", default_dev=4950/2, default_freq=436750000, default_gain=16, default_ip='127.0.0.1', default_port=7000, default_samp=1920000, sdr_dev="rtl=0"):
gr.top_block.__init__(self, "GFSK Receiver")
Qt.QWidget.__init__(self)
self.setWindowTitle("GFSK Receiver")
qtgui.util.check_set_qss()
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "gfsk_rx")
self.restoreGeometry(self.settings.value("geometry").toByteArray())
##################################################
# Parameters
##################################################
self.default_bandwidth = default_bandwidth
self.default_baud = default_baud
self.default_bin_file_sink = default_bin_file_sink
self.default_dev = default_dev
self.default_freq = default_freq
self.default_gain = default_gain
self.default_ip = default_ip
self.default_port = default_port
self.default_samp = default_samp
self.sdr_dev = sdr_dev
##################################################
# Variables
##################################################
self.samp_rate_dec = samp_rate_dec = default_baud*8
self.interp_tx = interp_tx = default_samp/default_baud
self.dec_rx = dec_rx = default_samp/samp_rate_dec
self.sps_rx = sps_rx = interp_tx/dec_rx
self.t_points = t_points = 5000
self.rx_gain = rx_gain = 64
self.rrc_taps = rrc_taps = firdes.root_raised_cosine(1, samp_rate_dec, sps_rx, 0.3, 88)
self.low_pass_taps_2 = low_pass_taps_2 = firdes.low_pass(1.0, samp_rate_dec, 9600, 1200, firdes.WIN_HAMMING, 6.76)
self.low_pass_taps = low_pass_taps = firdes.low_pass(1.0, default_samp, 150000, 20000, firdes.WIN_HAMMING, 6.76)
self.freq_xlating = freq_xlating = 000000
self.freq_offset = freq_offset = 2200
self.filter_offset = filter_offset = 0
self.demod_gain = demod_gain = (samp_rate_dec)/(2*math.pi*default_dev)
self.cc_omega_lim = cc_omega_lim = 0.002
self.cc_mu_gain = cc_mu_gain = 0.175
self.cc_mu = cc_mu = 0.5
self.cc_gain = cc_gain = 0.25*0.175*0.175
##################################################
# Blocks
##################################################
self.controls = Qt.QTabWidget()
self.controls_widget_0 = Qt.QWidget()
self.controls_layout_0 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.controls_widget_0)
self.controls_grid_layout_0 = Qt.QGridLayout()
self.controls_layout_0.addLayout(self.controls_grid_layout_0)
self.controls.addTab(self.controls_widget_0, 'RF')
self.controls_widget_1 = Qt.QWidget()
self.controls_layout_1 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.controls_widget_1)
self.controls_grid_layout_1 = Qt.QGridLayout()
self.controls_layout_1.addLayout(self.controls_grid_layout_1)
self.controls.addTab(self.controls_widget_1, 'Filter/Demod')
self.controls_widget_2 = Qt.QWidget()
self.controls_layout_2 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.controls_widget_2)
self.controls_grid_layout_2 = Qt.QGridLayout()
self.controls_layout_2.addLayout(self.controls_grid_layout_2)
self.controls.addTab(self.controls_widget_2, 'Receiver DSP')
self.top_grid_layout.addWidget(self.controls, 0, 0, 1, 4)
for r in range(0, 1):
self.top_grid_layout.setRowStretch(r, 1)
for c in range(0, 4):
self.top_grid_layout.setColumnStretch(c, 1)
self._demod_gain_range = Range(1, 100, 1, (samp_rate_dec)/(2*math.pi*default_dev), 200)
self._demod_gain_win = RangeWidget(self._demod_gain_range, self.set_demod_gain, 'Demodulator Gain', "counter_slider", float)
self.controls_grid_layout_1.addWidget(self._demod_gain_win, 0, 0, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_1.setRowStretch(r, 1)
for c in range(0, 1):
self.controls_grid_layout_1.setColumnStretch(c, 1)
self.signals = Qt.QTabWidget()
self.signals_widget_0 = Qt.QWidget()
self.signals_layout_0 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.signals_widget_0)
self.signals_grid_layout_0 = Qt.QGridLayout()
self.signals_layout_0.addLayout(self.signals_grid_layout_0)
self.signals.addTab(self.signals_widget_0, 'Receiver')
self.signals_widget_1 = Qt.QWidget()
self.signals_layout_1 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.signals_widget_1)
self.signals_grid_layout_1 = Qt.QGridLayout()
self.signals_layout_1.addLayout(self.signals_grid_layout_1)
self.signals.addTab(self.signals_widget_1, 'Filter RX')
self.signals_widget_2 = Qt.QWidget()
self.signals_layout_2 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.signals_widget_2)
self.signals_grid_layout_2 = Qt.QGridLayout()
self.signals_layout_2.addLayout(self.signals_grid_layout_2)
self.signals.addTab(self.signals_widget_2, 'Modulator')
self.signals_widget_3 = Qt.QWidget()
self.signals_layout_3 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.signals_widget_3)
self.signals_grid_layout_3 = Qt.QGridLayout()
self.signals_layout_3.addLayout(self.signals_grid_layout_3)
self.signals.addTab(self.signals_widget_3, 'Dec Filter')
self.signals_widget_4 = Qt.QWidget()
self.signals_layout_4 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.signals_widget_4)
self.signals_grid_layout_4 = Qt.QGridLayout()
self.signals_layout_4.addLayout(self.signals_grid_layout_4)
self.signals.addTab(self.signals_widget_4, 'Clock Recovery/Bitstream')
self.top_grid_layout.addWidget(self.signals, 1, 0, 2, 4)
for r in range(1, 3):
self.top_grid_layout.setRowStretch(r, 1)
for c in range(0, 4):
self.top_grid_layout.setColumnStretch(c, 1)
self._rx_gain_range = Range(0, 100, 1, 64, 200)
self._rx_gain_win = RangeWidget(self._rx_gain_range, self.set_rx_gain, 'RX Power Gain', "counter_slider", float)
self.controls_grid_layout_0.addWidget(self._rx_gain_win, 0, 0, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_0.setRowStretch(r, 1)
for c in range(0, 1):
self.controls_grid_layout_0.setColumnStretch(c, 1)
self._freq_offset_range = Range(-20000, 20000, 100, 2200, 200)
self._freq_offset_win = RangeWidget(self._freq_offset_range, self.set_freq_offset, 'Signal Frequency Offset', "counter_slider", int)
self.controls_grid_layout_0.addWidget(self._freq_offset_win, 0, 1, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_0.setRowStretch(r, 1)
for c in range(1, 2):
self.controls_grid_layout_0.setColumnStretch(c, 1)
self._filter_offset_range = Range(-1*demod_gain, 1*demod_gain, 0.01, 0, 200)
self._filter_offset_win = RangeWidget(self._filter_offset_range, self.set_filter_offset, 'Signal Offset', "counter_slider", float)
self.controls_grid_layout_1.addWidget(self._filter_offset_win, 0, 1, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_1.setRowStretch(r, 1)
for c in range(1, 2):
self.controls_grid_layout_1.setColumnStretch(c, 1)
self._cc_omega_lim_range = Range(0.0005, 0.02, 0.0001, 0.002, 200)
self._cc_omega_lim_win = RangeWidget(self._cc_omega_lim_range, self.set_cc_omega_lim, 'CC Omega Lim', "counter_slider", float)
self.controls_grid_layout_2.addWidget(self._cc_omega_lim_win, 0, 3, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_2.setRowStretch(r, 1)
for c in range(3, 4):
self.controls_grid_layout_2.setColumnStretch(c, 1)
self._cc_mu_gain_range = Range(0.01, 0.5, 0.05, 0.175, 200)
self._cc_mu_gain_win = RangeWidget(self._cc_mu_gain_range, self.set_cc_mu_gain, 'CC MU gain', "counter_slider", float)
self.controls_grid_layout_2.addWidget(self._cc_mu_gain_win, 0, 2, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_2.setRowStretch(r, 1)
for c in range(2, 3):
self.controls_grid_layout_2.setColumnStretch(c, 1)
self._cc_mu_range = Range(0.1, 2, 0.1, 0.5, 200)
self._cc_mu_win = RangeWidget(self._cc_mu_range, self.set_cc_mu, 'CC MU', "counter_slider", float)
self.controls_grid_layout_2.addWidget(self._cc_mu_win, 0, 1, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_2.setRowStretch(r, 1)
for c in range(1, 2):
self.controls_grid_layout_2.setColumnStretch(c, 1)
self.satellites_nrzi_decode_0_0 = satellites.nrzi_decode()
self.satellites_nrzi_decode_0 = satellites.nrzi_decode()
self.qtgui_waterfall_sink_x_0_0_0_0_0 = qtgui.waterfall_sink_f(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate_dec, #bw
"", #name
1 #number of inputs
)
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_update_time(0.10)
self.qtgui_waterfall_sink_x_0_0_0_0_0.enable_grid(False)
self.qtgui_waterfall_sink_x_0_0_0_0_0.enable_axis_labels(True)
if not True:
self.qtgui_waterfall_sink_x_0_0_0_0_0.disable_legend()
if "float" == "float" or "float" == "msg_float":
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
colors = [0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_line_label(i, labels[i])
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_color_map(i, colors[i])
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_line_alpha(i, alphas[i])
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_intensity_range(-140, 10)
self._qtgui_waterfall_sink_x_0_0_0_0_0_win = sip.wrapinstance(self.qtgui_waterfall_sink_x_0_0_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_3.addWidget(self._qtgui_waterfall_sink_x_0_0_0_0_0_win, 1, 3, 1, 3)
for r in range(1, 2):
self.signals_grid_layout_3.setRowStretch(r, 1)
for c in range(3, 6):
self.signals_grid_layout_3.setColumnStretch(c, 1)
self.qtgui_waterfall_sink_x_0_0_0_0 = qtgui.waterfall_sink_f(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate_dec, #bw
"", #name
1 #number of inputs
)
self.qtgui_waterfall_sink_x_0_0_0_0.set_update_time(0.10)
self.qtgui_waterfall_sink_x_0_0_0_0.enable_grid(False)
self.qtgui_waterfall_sink_x_0_0_0_0.enable_axis_labels(True)
if not True:
self.qtgui_waterfall_sink_x_0_0_0_0.disable_legend()
if "float" == "float" or "float" == "msg_float":
self.qtgui_waterfall_sink_x_0_0_0_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
colors = [0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_waterfall_sink_x_0_0_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_waterfall_sink_x_0_0_0_0.set_line_label(i, labels[i])
self.qtgui_waterfall_sink_x_0_0_0_0.set_color_map(i, colors[i])
self.qtgui_waterfall_sink_x_0_0_0_0.set_line_alpha(i, alphas[i])
self.qtgui_waterfall_sink_x_0_0_0_0.set_intensity_range(-140, 10)
self._qtgui_waterfall_sink_x_0_0_0_0_win = sip.wrapinstance(self.qtgui_waterfall_sink_x_0_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_2.addWidget(self._qtgui_waterfall_sink_x_0_0_0_0_win, 1, 3, 1, 3)
for r in range(1, 2):
self.signals_grid_layout_2.setRowStretch(r, 1)
for c in range(3, 6):
self.signals_grid_layout_2.setColumnStretch(c, 1)
self.qtgui_waterfall_sink_x_0_0_0 = qtgui.waterfall_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate_dec, #bw
"", #name
1 #number of inputs
)
self.qtgui_waterfall_sink_x_0_0_0.set_update_time(0.10)
self.qtgui_waterfall_sink_x_0_0_0.enable_grid(False)
self.qtgui_waterfall_sink_x_0_0_0.enable_axis_labels(True)
if not True:
self.qtgui_waterfall_sink_x_0_0_0.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_waterfall_sink_x_0_0_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
colors = [0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_waterfall_sink_x_0_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_waterfall_sink_x_0_0_0.set_line_label(i, labels[i])
self.qtgui_waterfall_sink_x_0_0_0.set_color_map(i, colors[i])
self.qtgui_waterfall_sink_x_0_0_0.set_line_alpha(i, alphas[i])
self.qtgui_waterfall_sink_x_0_0_0.set_intensity_range(-140, 10)
self._qtgui_waterfall_sink_x_0_0_0_win = sip.wrapinstance(self.qtgui_waterfall_sink_x_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_1.addWidget(self._qtgui_waterfall_sink_x_0_0_0_win, 1, 3, 1, 3)
for r in range(1, 2):
self.signals_grid_layout_1.setRowStretch(r, 1)
for c in range(3, 6):
self.signals_grid_layout_1.setColumnStretch(c, 1)
self.qtgui_waterfall_sink_x_0_0 = qtgui.waterfall_sink_c(
2048, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
default_samp, #bw
"", #name
1 #number of inputs
)
self.qtgui_waterfall_sink_x_0_0.set_update_time(0.0000010)
self.qtgui_waterfall_sink_x_0_0.enable_grid(False)
self.qtgui_waterfall_sink_x_0_0.enable_axis_labels(True)
if not True:
self.qtgui_waterfall_sink_x_0_0.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_waterfall_sink_x_0_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
colors = [0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_waterfall_sink_x_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_waterfall_sink_x_0_0.set_line_label(i, labels[i])
self.qtgui_waterfall_sink_x_0_0.set_color_map(i, colors[i])
self.qtgui_waterfall_sink_x_0_0.set_line_alpha(i, alphas[i])
self.qtgui_waterfall_sink_x_0_0.set_intensity_range(-140, 10)
self._qtgui_waterfall_sink_x_0_0_win = sip.wrapinstance(self.qtgui_waterfall_sink_x_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_0.addWidget(self._qtgui_waterfall_sink_x_0_0_win, 2, 0, 1, 6)
for r in range(2, 3):
self.signals_grid_layout_0.setRowStretch(r, 1)
for c in range(0, 6):
self.signals_grid_layout_0.setColumnStretch(c, 1)
self.qtgui_time_sink_x_0_0_0_0_0_1 = qtgui.time_sink_f(
t_points, #size
samp_rate_dec, #samp_rate
'', #name
1 #number of inputs
)
self.qtgui_time_sink_x_0_0_0_0_0_1.set_update_time(0.10)
self.qtgui_time_sink_x_0_0_0_0_0_1.set_y_axis(-2, 2)
self.qtgui_time_sink_x_0_0_0_0_0_1.set_y_label('Amplitude', "")
self.qtgui_time_sink_x_0_0_0_0_0_1.enable_tags(-1, True)
self.qtgui_time_sink_x_0_0_0_0_0_1.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, 0, "")
self.qtgui_time_sink_x_0_0_0_0_0_1.enable_autoscale(False)
self.qtgui_time_sink_x_0_0_0_0_0_1.enable_grid(False)
self.qtgui_time_sink_x_0_0_0_0_0_1.enable_axis_labels(True)
self.qtgui_time_sink_x_0_0_0_0_0_1.enable_control_panel(False)
self.qtgui_time_sink_x_0_0_0_0_0_1.enable_stem_plot(False)
if not True:
self.qtgui_time_sink_x_0_0_0_0_0_1.disable_legend()
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "blue"]
styles = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
markers = [-1, -1, -1, -1, -1,
-1, -1, -1, -1, -1]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_time_sink_x_0_0_0_0_0_1.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_time_sink_x_0_0_0_0_0_1.set_line_label(i, labels[i])
self.qtgui_time_sink_x_0_0_0_0_0_1.set_line_width(i, widths[i])
self.qtgui_time_sink_x_0_0_0_0_0_1.set_line_color(i, colors[i])
self.qtgui_time_sink_x_0_0_0_0_0_1.set_line_style(i, styles[i])
self.qtgui_time_sink_x_0_0_0_0_0_1.set_line_marker(i, markers[i])
self.qtgui_time_sink_x_0_0_0_0_0_1.set_line_alpha(i, alphas[i])
self._qtgui_time_sink_x_0_0_0_0_0_1_win = sip.wrapinstance(self.qtgui_time_sink_x_0_0_0_0_0_1.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_3.addWidget(self._qtgui_time_sink_x_0_0_0_0_0_1_win, 0, 0, 1, 6)
for r in range(0, 1):
self.signals_grid_layout_3.setRowStretch(r, 1)
for c in range(0, 6):
self.signals_grid_layout_3.setColumnStretch(c, 1)
self.qtgui_time_sink_x_0_0_0_0_0_0_0 = qtgui.time_sink_f(
t_points, #size
samp_rate_dec/8, #samp_rate
'Time RX In', #name
2 #number of inputs
)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_update_time(0.10)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_y_axis(-2, 2)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_y_label('Amplitude', "")
self.qtgui_time_sink_x_0_0_0_0_0_0_0.enable_tags(-1, True)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, 0, "")
self.qtgui_time_sink_x_0_0_0_0_0_0_0.enable_autoscale(False)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.enable_grid(False)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.enable_axis_labels(True)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.enable_control_panel(False)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.enable_stem_plot(False)
if not True:
self.qtgui_time_sink_x_0_0_0_0_0_0_0.disable_legend()
labels = ['Clock Recovery', 'Bitstream', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "blue"]
styles = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
markers = [-1, 0, -1, -1, -1,
-1, -1, -1, -1, -1]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(2):
if len(labels[i]) == 0:
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_line_label(i, labels[i])
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_line_width(i, widths[i])
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_line_color(i, colors[i])
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_line_style(i, styles[i])
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_line_marker(i, markers[i])
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_line_alpha(i, alphas[i])
self._qtgui_time_sink_x_0_0_0_0_0_0_0_win = sip.wrapinstance(self.qtgui_time_sink_x_0_0_0_0_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_4.addWidget(self._qtgui_time_sink_x_0_0_0_0_0_0_0_win, 0, 0, 2, 2)
for r in range(0, 2):
self.signals_grid_layout_4.setRowStretch(r, 1)
for c in range(0, 2):
self.signals_grid_layout_4.setColumnStretch(c, 1)
self.qtgui_time_sink_x_0_0_0_0_0 = qtgui.time_sink_f(
t_points*2, #size
samp_rate_dec, #samp_rate
'', #name
1 #number of inputs
)
self.qtgui_time_sink_x_0_0_0_0_0.set_update_time(0.10)
self.qtgui_time_sink_x_0_0_0_0_0.set_y_axis(-2, 2)
self.qtgui_time_sink_x_0_0_0_0_0.set_y_label('Amplitude', "")
self.qtgui_time_sink_x_0_0_0_0_0.enable_tags(-1, True)
self.qtgui_time_sink_x_0_0_0_0_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, 0, "")
self.qtgui_time_sink_x_0_0_0_0_0.enable_autoscale(True)
self.qtgui_time_sink_x_0_0_0_0_0.enable_grid(False)
self.qtgui_time_sink_x_0_0_0_0_0.enable_axis_labels(True)
self.qtgui_time_sink_x_0_0_0_0_0.enable_control_panel(False)
self.qtgui_time_sink_x_0_0_0_0_0.enable_stem_plot(False)
if not True:
self.qtgui_time_sink_x_0_0_0_0_0.disable_legend()
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "blue"]
styles = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
markers = [-1, -1, -1, -1, -1,
-1, -1, -1, -1, -1]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_time_sink_x_0_0_0_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_time_sink_x_0_0_0_0_0.set_line_label(i, labels[i])
self.qtgui_time_sink_x_0_0_0_0_0.set_line_width(i, widths[i])
self.qtgui_time_sink_x_0_0_0_0_0.set_line_color(i, colors[i])
self.qtgui_time_sink_x_0_0_0_0_0.set_line_style(i, styles[i])
self.qtgui_time_sink_x_0_0_0_0_0.set_line_marker(i, markers[i])
self.qtgui_time_sink_x_0_0_0_0_0.set_line_alpha(i, alphas[i])
self._qtgui_time_sink_x_0_0_0_0_0_win = sip.wrapinstance(self.qtgui_time_sink_x_0_0_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_2.addWidget(self._qtgui_time_sink_x_0_0_0_0_0_win, 0, 0, 1, 6)
for r in range(0, 1):
self.signals_grid_layout_2.setRowStretch(r, 1)
for c in range(0, 6):
self.signals_grid_layout_2.setColumnStretch(c, 1)
self.qtgui_time_sink_x_0_0_0_0 = qtgui.time_sink_c(
200, #size
samp_rate_dec, #samp_rate
'Time RX In', #name
1 #number of inputs
)
self.qtgui_time_sink_x_0_0_0_0.set_update_time(0.10)
self.qtgui_time_sink_x_0_0_0_0.set_y_axis(-2, 2)
self.qtgui_time_sink_x_0_0_0_0.set_y_label('Amplitude', "")
self.qtgui_time_sink_x_0_0_0_0.enable_tags(-1, True)
self.qtgui_time_sink_x_0_0_0_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, 0, "")
self.qtgui_time_sink_x_0_0_0_0.enable_autoscale(False)
self.qtgui_time_sink_x_0_0_0_0.enable_grid(False)
self.qtgui_time_sink_x_0_0_0_0.enable_axis_labels(True)
self.qtgui_time_sink_x_0_0_0_0.enable_control_panel(False)
self.qtgui_time_sink_x_0_0_0_0.enable_stem_plot(False)
if not True:
self.qtgui_time_sink_x_0_0_0_0.disable_legend()
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "blue"]
styles = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
markers = [-1, -1, -1, -1, -1,
-1, -1, -1, -1, -1]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(2):
if len(labels[i]) == 0:
if(i % 2 == 0):
self.qtgui_time_sink_x_0_0_0_0.set_line_label(i, "Re{{Data {0}}}".format(i/2))
else:
self.qtgui_time_sink_x_0_0_0_0.set_line_label(i, "Im{{Data {0}}}".format(i/2))
else:
self.qtgui_time_sink_x_0_0_0_0.set_line_label(i, labels[i])
self.qtgui_time_sink_x_0_0_0_0.set_line_width(i, widths[i])
self.qtgui_time_sink_x_0_0_0_0.set_line_color(i, colors[i])
self.qtgui_time_sink_x_0_0_0_0.set_line_style(i, styles[i])
self.qtgui_time_sink_x_0_0_0_0.set_line_marker(i, markers[i])
self.qtgui_time_sink_x_0_0_0_0.set_line_alpha(i, alphas[i])
self._qtgui_time_sink_x_0_0_0_0_win = sip.wrapinstance(self.qtgui_time_sink_x_0_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_1.addWidget(self._qtgui_time_sink_x_0_0_0_0_win, 0, 0, 1, 6)
for r in range(0, 1):
self.signals_grid_layout_1.setRowStretch(r, 1)
for c in range(0, 6):
self.signals_grid_layout_1.setColumnStretch(c, 1)
self.qtgui_time_sink_x_0_0_0 = qtgui.time_sink_c(
t_points+1000, #size
default_samp, #samp_rate
'Time RX In', #name
1 #number of inputs
)
self.qtgui_time_sink_x_0_0_0.set_update_time(0.10)
self.qtgui_time_sink_x_0_0_0.set_y_axis(-2, 2)
self.qtgui_time_sink_x_0_0_0.set_y_label('Amplitude', "")
self.qtgui_time_sink_x_0_0_0.enable_tags(-1, True)
self.qtgui_time_sink_x_0_0_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, 0, "")
self.qtgui_time_sink_x_0_0_0.enable_autoscale(False)
self.qtgui_time_sink_x_0_0_0.enable_grid(False)
self.qtgui_time_sink_x_0_0_0.enable_axis_labels(True)
self.qtgui_time_sink_x_0_0_0.enable_control_panel(False)
self.qtgui_time_sink_x_0_0_0.enable_stem_plot(False)
if not True:
self.qtgui_time_sink_x_0_0_0.disable_legend()
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "blue"]
styles = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
markers = [-1, -1, -1, -1, -1,
-1, -1, -1, -1, -1]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(2):
if len(labels[i]) == 0:
if(i % 2 == 0):
self.qtgui_time_sink_x_0_0_0.set_line_label(i, "Re{{Data {0}}}".format(i/2))
else:
self.qtgui_time_sink_x_0_0_0.set_line_label(i, "Im{{Data {0}}}".format(i/2))
else:
self.qtgui_time_sink_x_0_0_0.set_line_label(i, labels[i])
self.qtgui_time_sink_x_0_0_0.set_line_width(i, widths[i])
self.qtgui_time_sink_x_0_0_0.set_line_color(i, colors[i])
self.qtgui_time_sink_x_0_0_0.set_line_style(i, styles[i])
self.qtgui_time_sink_x_0_0_0.set_line_marker(i, markers[i])
self.qtgui_time_sink_x_0_0_0.set_line_alpha(i, alphas[i])
self._qtgui_time_sink_x_0_0_0_win = sip.wrapinstance(self.qtgui_time_sink_x_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_0.addWidget(self._qtgui_time_sink_x_0_0_0_win, 0, 0, 1, 3)
for r in range(0, 1):
self.signals_grid_layout_0.setRowStretch(r, 1)
for c in range(0, 3):
self.signals_grid_layout_0.setColumnStretch(c, 1)
self.qtgui_freq_sink_x_0_0_1_0_0_0 = qtgui.freq_sink_f(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate_dec, #bw
'FFT RX in', #name
1 #number of inputs
)
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_update_time(0.10)
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_0_0_1_0_0_0.enable_autoscale(False)
self.qtgui_freq_sink_x_0_0_1_0_0_0.enable_grid(False)
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_fft_average(0.05)
self.qtgui_freq_sink_x_0_0_1_0_0_0.enable_axis_labels(True)
self.qtgui_freq_sink_x_0_0_1_0_0_0.enable_control_panel(False)
if not True:
self.qtgui_freq_sink_x_0_0_1_0_0_0.disable_legend()
if "float" == "float" or "float" == "msg_float":
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [2, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_0_0_1_0_0_0_win = sip.wrapinstance(self.qtgui_freq_sink_x_0_0_1_0_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_3.addWidget(self._qtgui_freq_sink_x_0_0_1_0_0_0_win, 1, 0, 1, 3)
for r in range(1, 2):
self.signals_grid_layout_3.setRowStretch(r, 1)
for c in range(0, 3):
self.signals_grid_layout_3.setColumnStretch(c, 1)
self.qtgui_freq_sink_x_0_0_1_0_0 = qtgui.freq_sink_f(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate_dec, #bw
'FFT RX in', #name
1 #number of inputs
)
self.qtgui_freq_sink_x_0_0_1_0_0.set_update_time(0.10)
self.qtgui_freq_sink_x_0_0_1_0_0.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_0_0_1_0_0.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_0_0_1_0_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_0_0_1_0_0.enable_autoscale(False)
self.qtgui_freq_sink_x_0_0_1_0_0.enable_grid(False)
self.qtgui_freq_sink_x_0_0_1_0_0.set_fft_average(0.05)
self.qtgui_freq_sink_x_0_0_1_0_0.enable_axis_labels(True)
self.qtgui_freq_sink_x_0_0_1_0_0.enable_control_panel(False)
if not True:
self.qtgui_freq_sink_x_0_0_1_0_0.disable_legend()
if "float" == "float" or "float" == "msg_float":
self.qtgui_freq_sink_x_0_0_1_0_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [2, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_0_0_1_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_0_0_1_0_0.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_0_0_1_0_0.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_0_0_1_0_0.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_0_0_1_0_0.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_0_0_1_0_0_win = sip.wrapinstance(self.qtgui_freq_sink_x_0_0_1_0_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_2.addWidget(self._qtgui_freq_sink_x_0_0_1_0_0_win, 1, 0, 1, 3)
for r in range(1, 2):
self.signals_grid_layout_2.setRowStretch(r, 1)
for c in range(0, 3):
self.signals_grid_layout_2.setColumnStretch(c, 1)
self.qtgui_freq_sink_x_0_0_1_0 = qtgui.freq_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate_dec, #bw
'FFT RX in', #name
1 #number of inputs
)
self.qtgui_freq_sink_x_0_0_1_0.set_update_time(0.10)
self.qtgui_freq_sink_x_0_0_1_0.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_0_0_1_0.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_0_0_1_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_0_0_1_0.enable_autoscale(False)
self.qtgui_freq_sink_x_0_0_1_0.enable_grid(False)
self.qtgui_freq_sink_x_0_0_1_0.set_fft_average(1.0)
self.qtgui_freq_sink_x_0_0_1_0.enable_axis_labels(True)
self.qtgui_freq_sink_x_0_0_1_0.enable_control_panel(False)
if not True:
self.qtgui_freq_sink_x_0_0_1_0.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_freq_sink_x_0_0_1_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [2, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_0_0_1_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_0_0_1_0.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_0_0_1_0.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_0_0_1_0.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_0_0_1_0.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_0_0_1_0_win = sip.wrapinstance(self.qtgui_freq_sink_x_0_0_1_0.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_1.addWidget(self._qtgui_freq_sink_x_0_0_1_0_win, 1, 0, 1, 3)
for r in range(1, 2):
self.signals_grid_layout_1.setRowStretch(r, 1)
for c in range(0, 3):
self.signals_grid_layout_1.setColumnStretch(c, 1)
self.qtgui_freq_sink_x_0_0_1 = qtgui.freq_sink_c(
2048, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
default_samp, #bw
'FFT RX in', #name
1 #number of inputs
)
self.qtgui_freq_sink_x_0_0_1.set_update_time(0.0000010)
self.qtgui_freq_sink_x_0_0_1.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_0_0_1.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_0_0_1.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_0_0_1.enable_autoscale(False)
self.qtgui_freq_sink_x_0_0_1.enable_grid(False)
self.qtgui_freq_sink_x_0_0_1.set_fft_average(1.0)
self.qtgui_freq_sink_x_0_0_1.enable_axis_labels(True)
self.qtgui_freq_sink_x_0_0_1.enable_control_panel(False)
if not True:
self.qtgui_freq_sink_x_0_0_1.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_freq_sink_x_0_0_1.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [2, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_0_0_1.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_0_0_1.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_0_0_1.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_0_0_1.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_0_0_1.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_0_0_1_win = sip.wrapinstance(self.qtgui_freq_sink_x_0_0_1.pyqwidget(), Qt.QWidget)
self.signals_grid_layout_0.addWidget(self._qtgui_freq_sink_x_0_0_1_win, 0, 3, 1, 3)
for r in range(0, 1):
self.signals_grid_layout_0.setRowStretch(r, 1)
for c in range(3, 6):
self.signals_grid_layout_0.setColumnStretch(c, 1)
self.iio_fmcomms2_source_0 = iio.fmcomms2_source_f32c('ip:pluto.local', default_freq-freq_xlating+freq_offset, default_samp, 20000000, True, False, 0x8000, True, True, True, "fast_attack", rx_gain, "fast_attack", 64.0, "A_BALANCED", '', True)
self.freq_xlating_fir_filter_xxx_0 = filter.freq_xlating_fir_filter_ccc(dec_rx, (low_pass_taps), freq_xlating, default_samp)
self.fir_filter_xxx_0_0 = filter.fir_filter_fff(1, (low_pass_taps_2))
self.fir_filter_xxx_0_0.declare_sample_delay(0)
self.digital_clock_recovery_mm_xx_0 = digital.clock_recovery_mm_ff(sps_rx, 0.25*0.175*0.175, cc_mu, cc_mu_gain, cc_omega_lim)
self.digital_binary_slicer_fb_0 = digital.binary_slicer_fb()
self.correctiq_correctiq_0 = correctiq.correctiq()
self._cc_gain_range = Range(1e-3, 50e-3, 1e-3, 0.25*0.175*0.175, 200)
self._cc_gain_win = RangeWidget(self._cc_gain_range, self.set_cc_gain, 'CC Omega Gain', "counter_slider", float)
self.controls_grid_layout_2.addWidget(self._cc_gain_win, 0, 0, 1, 1)
for r in range(0, 1):
self.controls_grid_layout_2.setRowStretch(r, 1)
for c in range(0, 1):
self.controls_grid_layout_2.setColumnStretch(c, 1)
self.blocks_pack_k_bits_bb_0_0 = blocks.pack_k_bits_bb(8)
self.blocks_char_to_float_0 = blocks.char_to_float(1, 1)
self.blocks_add_const_vxx_0 = blocks.add_const_vff((filter_offset*demod_gain, ))
self.blks2_tcp_sink_0 = grc_blks2.tcp_sink(
itemsize=gr.sizeof_char*1,
addr=default_ip,
port=default_port,
server=True,
)
self.analog_quadrature_demod_cf_0 = analog.quadrature_demod_cf(demod_gain)
##################################################
# Connections
##################################################
self.connect((self.analog_quadrature_demod_cf_0, 0), (self.fir_filter_xxx_0_0, 0))
self.connect((self.analog_quadrature_demod_cf_0, 0), (self.qtgui_freq_sink_x_0_0_1_0_0, 0))
self.connect((self.analog_quadrature_demod_cf_0, 0), (self.qtgui_time_sink_x_0_0_0_0_0, 0))
self.connect((self.analog_quadrature_demod_cf_0, 0), (self.qtgui_waterfall_sink_x_0_0_0_0, 0))
self.connect((self.blocks_add_const_vxx_0, 0), (self.digital_clock_recovery_mm_xx_0, 0))
self.connect((self.blocks_add_const_vxx_0, 0), (self.qtgui_freq_sink_x_0_0_1_0_0_0, 0))
self.connect((self.blocks_add_const_vxx_0, 0), (self.qtgui_time_sink_x_0_0_0_0_0_1, 0))
self.connect((self.blocks_add_const_vxx_0, 0), (self.qtgui_waterfall_sink_x_0_0_0_0_0, 0))
self.connect((self.blocks_char_to_float_0, 0), (self.qtgui_time_sink_x_0_0_0_0_0_0_0, 1))
self.connect((self.blocks_pack_k_bits_bb_0_0, 0), (self.blks2_tcp_sink_0, 0))
self.connect((self.correctiq_correctiq_0, 0), (self.freq_xlating_fir_filter_xxx_0, 0))
self.connect((self.digital_binary_slicer_fb_0, 0), (self.satellites_nrzi_decode_0, 0))
self.connect((self.digital_binary_slicer_fb_0, 0), (self.satellites_nrzi_decode_0_0, 0))
self.connect((self.digital_clock_recovery_mm_xx_0, 0), (self.digital_binary_slicer_fb_0, 0))
self.connect((self.digital_clock_recovery_mm_xx_0, 0), (self.qtgui_time_sink_x_0_0_0_0_0_0_0, 0))
self.connect((self.fir_filter_xxx_0_0, 0), (self.blocks_add_const_vxx_0, 0))
self.connect((self.freq_xlating_fir_filter_xxx_0, 0), (self.analog_quadrature_demod_cf_0, 0))
self.connect((self.freq_xlating_fir_filter_xxx_0, 0), (self.qtgui_freq_sink_x_0_0_1_0, 0))
self.connect((self.freq_xlating_fir_filter_xxx_0, 0), (self.qtgui_time_sink_x_0_0_0_0, 0))
self.connect((self.freq_xlating_fir_filter_xxx_0, 0), (self.qtgui_waterfall_sink_x_0_0_0, 0))
self.connect((self.iio_fmcomms2_source_0, 0), (self.correctiq_correctiq_0, 0))
self.connect((self.iio_fmcomms2_source_0, 0), (self.qtgui_freq_sink_x_0_0_1, 0))
self.connect((self.iio_fmcomms2_source_0, 0), (self.qtgui_time_sink_x_0_0_0, 0))
self.connect((self.iio_fmcomms2_source_0, 0), (self.qtgui_waterfall_sink_x_0_0, 0))
self.connect((self.satellites_nrzi_decode_0, 0), (self.blocks_char_to_float_0, 0))
self.connect((self.satellites_nrzi_decode_0_0, 0), (self.blocks_pack_k_bits_bb_0_0, 0))
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "gfsk_rx")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_default_bandwidth(self):
return self.default_bandwidth
def set_default_bandwidth(self, default_bandwidth):
self.default_bandwidth = default_bandwidth
def get_default_baud(self):
return self.default_baud
def set_default_baud(self, default_baud):
self.default_baud = default_baud
self.set_samp_rate_dec(self.default_baud*8)
self.set_interp_tx(self.default_samp/self.default_baud)
def get_default_bin_file_sink(self):
return self.default_bin_file_sink
def set_default_bin_file_sink(self, default_bin_file_sink):
self.default_bin_file_sink = default_bin_file_sink
def get_default_dev(self):
return self.default_dev
def set_default_dev(self, default_dev):
self.default_dev = default_dev
self.set_demod_gain((self.samp_rate_dec)/(2*math.pi*self.default_dev))
def get_default_freq(self):
return self.default_freq
def set_default_freq(self, default_freq):
self.default_freq = default_freq
self.iio_fmcomms2_source_0.set_params(self.default_freq-self.freq_xlating+self.freq_offset, self.default_samp, 20000000, True, True, True, "fast_attack", self.rx_gain, "fast_attack", 64.0, "A_BALANCED", '', True)
def get_default_gain(self):
return self.default_gain
def set_default_gain(self, default_gain):
self.default_gain = default_gain
def get_default_ip(self):
return self.default_ip
def set_default_ip(self, default_ip):
self.default_ip = default_ip
def get_default_port(self):
return self.default_port
def set_default_port(self, default_port):
self.default_port = default_port
def get_default_samp(self):
return self.default_samp
def set_default_samp(self, default_samp):
self.default_samp = default_samp
self.set_dec_rx(self.default_samp/self.samp_rate_dec)
self.qtgui_waterfall_sink_x_0_0.set_frequency_range(0, self.default_samp)
self.qtgui_time_sink_x_0_0_0.set_samp_rate(self.default_samp)
self.qtgui_freq_sink_x_0_0_1.set_frequency_range(0, self.default_samp)
self.set_interp_tx(self.default_samp/self.default_baud)
self.iio_fmcomms2_source_0.set_params(self.default_freq-self.freq_xlating+self.freq_offset, self.default_samp, 20000000, True, True, True, "fast_attack", self.rx_gain, "fast_attack", 64.0, "A_BALANCED", '', True)
def get_sdr_dev(self):
return self.sdr_dev
def set_sdr_dev(self, sdr_dev):
self.sdr_dev = sdr_dev
def get_samp_rate_dec(self):
return self.samp_rate_dec
def set_samp_rate_dec(self, samp_rate_dec):
self.samp_rate_dec = samp_rate_dec
self.set_demod_gain((self.samp_rate_dec)/(2*math.pi*self.default_dev))
self.set_dec_rx(self.default_samp/self.samp_rate_dec)
self.qtgui_waterfall_sink_x_0_0_0_0_0.set_frequency_range(0, self.samp_rate_dec)
self.qtgui_waterfall_sink_x_0_0_0_0.set_frequency_range(0, self.samp_rate_dec)
self.qtgui_waterfall_sink_x_0_0_0.set_frequency_range(0, self.samp_rate_dec)
self.qtgui_time_sink_x_0_0_0_0_0_1.set_samp_rate(self.samp_rate_dec)
self.qtgui_time_sink_x_0_0_0_0_0_0_0.set_samp_rate(self.samp_rate_dec/8)
self.qtgui_time_sink_x_0_0_0_0_0.set_samp_rate(self.samp_rate_dec)
self.qtgui_time_sink_x_0_0_0_0.set_samp_rate(self.samp_rate_dec)
self.qtgui_freq_sink_x_0_0_1_0_0_0.set_frequency_range(0, self.samp_rate_dec)
self.qtgui_freq_sink_x_0_0_1_0_0.set_frequency_range(0, self.samp_rate_dec)
self.qtgui_freq_sink_x_0_0_1_0.set_frequency_range(0, self.samp_rate_dec)
def get_interp_tx(self):
return self.interp_tx
def set_interp_tx(self, interp_tx):
self.interp_tx = interp_tx
self.set_sps_rx(self.interp_tx/self.dec_rx)
def get_dec_rx(self):
return self.dec_rx
def set_dec_rx(self, dec_rx):
self.dec_rx = dec_rx
self.set_sps_rx(self.interp_tx/self.dec_rx)
def get_sps_rx(self):
return self.sps_rx
def set_sps_rx(self, sps_rx):
self.sps_rx = sps_rx
self.digital_clock_recovery_mm_xx_0.set_omega(self.sps_rx)
def get_t_points(self):
return self.t_points
def set_t_points(self, t_points):
self.t_points = t_points
def get_rx_gain(self):
return self.rx_gain
def set_rx_gain(self, rx_gain):
self.rx_gain = rx_gain
self.iio_fmcomms2_source_0.set_params(self.default_freq-self.freq_xlating+self.freq_offset, self.default_samp, 20000000, True, True, True, "fast_attack", self.rx_gain, "fast_attack", 64.0, "A_BALANCED", '', True)
def get_rrc_taps(self):
return self.rrc_taps
def set_rrc_taps(self, rrc_taps):
self.rrc_taps = rrc_taps
def get_low_pass_taps_2(self):
return self.low_pass_taps_2
def set_low_pass_taps_2(self, low_pass_taps_2):
self.low_pass_taps_2 = low_pass_taps_2
self.fir_filter_xxx_0_0.set_taps((self.low_pass_taps_2))
def get_low_pass_taps(self):
return self.low_pass_taps
def set_low_pass_taps(self, low_pass_taps):
self.low_pass_taps = low_pass_taps
self.freq_xlating_fir_filter_xxx_0.set_taps((self.low_pass_taps))
def get_freq_xlating(self):
return self.freq_xlating
def set_freq_xlating(self, freq_xlating):
self.freq_xlating = freq_xlating
self.iio_fmcomms2_source_0.set_params(self.default_freq-self.freq_xlating+self.freq_offset, self.default_samp, 20000000, True, True, True, "fast_attack", self.rx_gain, "fast_attack", 64.0, "A_BALANCED", '', True)
self.freq_xlating_fir_filter_xxx_0.set_center_freq(self.freq_xlating)
def get_freq_offset(self):
return self.freq_offset
def set_freq_offset(self, freq_offset):
self.freq_offset = freq_offset
self.iio_fmcomms2_source_0.set_params(self.default_freq-self.freq_xlating+self.freq_offset, self.default_samp, 20000000, True, True, True, "fast_attack", self.rx_gain, "fast_attack", 64.0, "A_BALANCED", '', True)
def get_filter_offset(self):
return self.filter_offset
def set_filter_offset(self, filter_offset):
self.filter_offset = filter_offset
self.blocks_add_const_vxx_0.set_k((self.filter_offset*self.demod_gain, ))
def get_demod_gain(self):
return self.demod_gain
def set_demod_gain(self, demod_gain):
self.demod_gain = demod_gain
self.blocks_add_const_vxx_0.set_k((self.filter_offset*self.demod_gain, ))
self.analog_quadrature_demod_cf_0.set_gain(self.demod_gain)
def get_cc_omega_lim(self):
return self.cc_omega_lim
def set_cc_omega_lim(self, cc_omega_lim):
self.cc_omega_lim = cc_omega_lim
def get_cc_mu_gain(self):
return self.cc_mu_gain
def set_cc_mu_gain(self, cc_mu_gain):
self.cc_mu_gain = cc_mu_gain
self.digital_clock_recovery_mm_xx_0.set_gain_mu(self.cc_mu_gain)
def get_cc_mu(self):
return self.cc_mu
def set_cc_mu(self, cc_mu):
self.cc_mu = cc_mu
self.digital_clock_recovery_mm_xx_0.set_mu(self.cc_mu)
def get_cc_gain(self):
return self.cc_gain
def set_cc_gain(self, cc_gain):
self.cc_gain = cc_gain
def argument_parser():
parser = OptionParser(usage="%prog: [options]", option_class=eng_option)
parser.add_option(
"-w", "--default-bandwidth", dest="default_bandwidth", type="eng_float", default=eng_notation.num_to_str(20e3),
help="Set default_bandwidth [default=%default]")
parser.add_option(
"-b", "--default-baud", dest="default_baud", type="intx", default=9600,
help="Set default_baud [default=%default]")
parser.add_option(
"-o", "--default-bin-file-sink", dest="default_bin_file_sink", type="string", default="/tmp/rx_data.bin",
help="Set default_bin_file_sink [default=%default]")
parser.add_option(
"-j", "--default-dev", dest="default_dev", type="eng_float", default=eng_notation.num_to_str(4950/2),
help="Set Input [default=%default]")
parser.add_option(
"-f", "--default-freq", dest="default_freq", type="intx", default=436750000,
help="Set default_freq [default=%default]")
parser.add_option(
"-g", "--default-gain", dest="default_gain", type="eng_float", default=eng_notation.num_to_str(16),
help="Set default_gain [default=%default]")
parser.add_option(
"-i", "--default-ip", dest="default_ip", type="string", default='127.0.0.1',
help="Set default_ip [default=%default]")
parser.add_option(
"-p", "--default-port", dest="default_port", type="intx", default=7000,
help="Set default_port [default=%default]")
parser.add_option(
"-s", "--default-samp", dest="default_samp", type="intx", default=1920000,
help="Set default_samp [default=%default]")
parser.add_option(
"-d", "--sdr-dev", dest="sdr_dev", type="string", default="rtl=0",
help="Set sdr_dev [default=%default]")
return parser
def main(top_block_cls=gfsk_rx, options=None):
if options is None:
options, _ = argument_parser().parse_args()
if gr.enable_realtime_scheduling() != gr.RT_OK:
print "Error: failed to enable real-time scheduling."
from distutils.version import StrictVersion
if StrictVersion(Qt.qVersion()) >= StrictVersion("4.5.0"):
style = gr.prefs().get_string('qtgui', 'style', 'raster')
Qt.QApplication.setGraphicsSystem(style)
qapp = Qt.QApplication(sys.argv)
tb = top_block_cls(default_bandwidth=options.default_bandwidth, default_baud=options.default_baud, default_bin_file_sink=options.default_bin_file_sink, default_dev=options.default_dev, default_freq=options.default_freq, default_gain=options.default_gain, default_ip=options.default_ip, default_port=options.default_port, default_samp=options.default_samp, sdr_dev=options.sdr_dev)
tb.start()
tb.show()
def quitting():
tb.stop()
tb.wait()
qapp.connect(qapp, Qt.SIGNAL("aboutToQuit()"), quitting)
qapp.exec_()
if __name__ == '__main__':
main()
| [
"mario.baldini@gmail.com"
] | mario.baldini@gmail.com |
e1e86389d66b93dd4822b7ba5af7fe578432b75a | 1662e063d62bddb3f3e63204f66f8d7685d59d9a | /blog/views.py | 49d6f7ad68dd59e72576de8a887ecc140b0bea0c | [] | no_license | danielmjales/my-first-blog | b2f9ae9bca676d367cc015765e6653ee0a64eabf | 66bd359926b0a38dcc06e4b4c4f1caf815382b3f | refs/heads/master | 2020-04-17T11:40:04.905284 | 2019-01-20T12:37:02 | 2019-01-20T12:37:02 | 166,549,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,632 | py | from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from .models import Post
from .forms import PostForm
from rest_framework import viewsets
from .serializers import PostModelSerializer
def post_list(request):
posts = Post.objects.all().order_by('title')
return render(request, 'blog/post_list.html', {'posts': posts})
def post_detail(request, pk):
post = get_object_or_404(Post, pk=pk)
return render(request, 'blog/post_detail.html', {'post': post})
def post_new(request):
if request.method == "POST":
form = PostForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = PostForm()
return render(request, 'blog/post_edit.html', {'form': form})
def post_edit(request, pk):
post = get_object_or_404(Post, pk=pk)
if request.method == "POST":
form = PostForm(request.POST, instance=post)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = PostForm(instance=post)
return render(request, 'blog/post_edit.html', {'form': form})
class PostModelViewSet(viewsets.ModelViewSet):
serializer_class = PostModelSerializer
queryset = Post.objects.all().order_by('-title') | [
"danielmjales@bct.ect.ufrn.br"
] | danielmjales@bct.ect.ufrn.br |
af54235f1808ded628afe0b1d54a6be553ceaa24 | 91e57f5ef0a4477e12a946dc7c9c66ad2ced0778 | /baekjoon_py/14889.py | c6a840877bf2e2890dc01538a4278279e1abb982 | [] | no_license | popcon9424/algorithm | 85b50de51cf6d61dfa8edfcc508e5c7c333eb82b | 5f9c685f4f684ea398758ab1f71f02e0bf1bac03 | refs/heads/master | 2020-05-28T03:08:24.286215 | 2019-11-28T08:01:26 | 2019-11-28T08:01:26 | 188,863,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 781 | py | import sys
from itertools import combinations
N = int(sys.stdin.readline())
board = [ list(map(int, sys.stdin.readline().split())) for _ in range(N) ]
minimum = 0
for bd in board:
minimum += sum(bd)
combs = combinations(list(range(N)), N//2)
for com in combs:
firstSum, secondSum = 0, 0
smallcombs = combinations(com, 2)
for smallcomb in smallcombs:
firstSum += board[smallcomb[0]][smallcomb[1]] + board[smallcomb[1]][smallcomb[0]]
notcombs = combinations(list(set(range(N)) - set(com)), 2)
for notcomb in notcombs:
secondSum += board[notcomb[0]][notcomb[1]] + board[notcomb[1]][notcomb[0]]
diff = abs(firstSum - secondSum)
if diff == 0:
minimum = 0
break
if diff < minimum:
minimum = diff
print(minimum) | [
"gusgh9424@naver.com"
] | gusgh9424@naver.com |
bb9a53589955ef9aa479dbd294e34706c2932991 | 1ff34305a38b92eb33983ec90f29c67eac731f31 | /next level platform.py | 63a34dbb60b17f2a65176d70adbc3e5e620c2c64 | [] | no_license | jerhieb/pygame | 3e4e78fd3a9f1bfdda52404c4994025249e0c0b0 | 81d737d82d0562389cab87eb80f5913806082080 | refs/heads/master | 2020-08-04T16:56:53.348022 | 2019-10-02T02:06:38 | 2019-10-02T02:06:38 | 212,210,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,741 | py | import pygame
pygame.init()
display_width = 800
display_height = 600
surface = pygame.display.set_mode((display_width, display_height))
running1 = pygame.image.load('Running1.png')
running2 = pygame.image.load('Running2.png')
standing = pygame.image.load('playerstanding.png')
door = pygame.image.load('Door.png')
key = pygame.image.load('key.png')
running_left1 = pygame.transform.flip(running1, True, False)
running_left2 = pygame.transform.flip(running2, True, False)
running_list = [running1, running2]
counter = 0
clock = pygame.time.Clock()
player_x = 100
player_y = 300
player_xvel = 0
player_yvel = 2
run_direction = 'standing'
jumping = False
gravity = 1.2
key_found = False
level =1
trip = 0
while True:
clock.tick(40)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RIGHT:
run_direction = 'right'
player_xvel = 5
if event.key == pygame.K_LEFT:
run_direction = 'left'
player_xvel = -5
if event.key ==pygame.K_SPACE:
if jumping ==False:
jumping = True
player_yvel = -15
if event.type == pygame.KEYUP:
if event.key == pygame.K_RIGHT:
run_direction='standing'
player_xvel = 0
if event.key == pygame.K_LEFT:
run_direction='standing'
player_xvel = 0
if jumping ==True:
if player_yvel < 8:
player_yvel = player_yvel + gravity
player_x = player_x + player_xvel
player_y = player_y + player_yvel
surface.fill((255, 255, 255))
if level ==1:
pygame.draw.rect(surface, (100, 100, 100), (30, 500, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (200, 450, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (380, 400, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (680, 500, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (550, 450, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (200, 350, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (550, 350, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (680, 300, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (550, 250, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (380, 300, 100, 10))
if level ==2:
if trip == 0:
player_x = 720
player_y = 50
trip = trip + 1
pygame.draw.rect(surface, (100, 100, 100), (700, 100, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (100, 280, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (0, 200, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (100, 120, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (300, 100, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (200, 350, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (680, 500, 100, 10))
pygame.draw.rect(surface, (100, 100, 100), (380, 300, 100, 10))
pygame.draw.rect(surface, (255, 0, 0), (0, display_height-40, display_width, display_height))
if level ==1:
door_rect = surface.blit(door, (425, 270))
if key_found ==False:
key_rect = surface.blit(key, (720, 475))
if level ==2:
door_rect = surface.blit(door, (720, 470))
if key_found ==False:
key_rect = surface.blit(key, (320, 75))
if run_direction== 'right':
if counter%2==0:
character = surface.blit(running1, (player_x, player_y))
else:
character =surface.blit(running2, (player_x, player_y))
if run_direction == 'left':
if counter%2==0:
character =surface.blit(running_left1, (player_x, player_y))
else:
character =surface.blit(running_left2, (player_x, player_y))
if run_direction=='standing':
character = surface.blit(standing, (player_x, player_y))
if character.colliderect(key_rect):
key_found = True
if character.colliderect(door_rect):
if key_found == True:
print('you win')
key_found = False
level =2
if surface.get_at((character.left, character.bottom))==(100, 100, 100) or surface.get_at((character.right, character.bottom))==(100, 100, 100):
player_yvel = 0
jumping = False
else:
if jumping == False:
player_yvel = 2
if player_y>display_height-40:
pygame.quit()
counter = counter + 1
pygame.display.update()
| [
"noreply@github.com"
] | jerhieb.noreply@github.com |
0d307cf1b2d2df910db56e7f5bfb1b8f8f5ab2a4 | 288ccb79d6b73572d6d82366031813a3a7976eb5 | /venv/lib/python3.6/site-packages/secretstorage/util.py | 60bafca0f46b04c7a1d59de295c9b98aaa186a15 | [
"MIT"
] | permissive | abualrubbaraa/Baraa-Validator | d9182767b696270dbcc6f071c12574e470ed0f5d | bff356f4e35ea7de66de799e7f063c383e298d1f | refs/heads/master | 2022-11-29T11:19:30.624281 | 2020-08-18T20:37:22 | 2020-08-18T20:37:22 | 288,558,878 | 0 | 1 | MIT | 2022-11-28T19:57:51 | 2020-08-18T20:34:28 | Python | UTF-8 | Python | false | false | 6,213 | py | # SecretStorage module for Python
# Access passwords using the SecretService DBus API
# Author: Dmitry Shachnev, 2013-2018
# License: 3-clause BSD, see LICENSE file
"""This module provides some utility functions, but these shouldn't
normally be used by external applications."""
import os
from typing import Any, List, Tuple
from jeepney import DBusAddress
from jeepney.bus_messages import MatchRule
from jeepney.integrate.blocking import DBusConnection
from jeepney.low_level import Message
from jeepney.wrappers import new_method_call, Properties, DBusErrorResponse
from secretstorage.defines import DBUS_UNKNOWN_METHOD, DBUS_NO_SUCH_OBJECT, \
DBUS_SERVICE_UNKNOWN, DBUS_NO_REPLY, DBUS_NOT_SUPPORTED, DBUS_EXEC_FAILED, \
SS_PATH, SS_PREFIX, ALGORITHM_DH, ALGORITHM_PLAIN
from secretstorage.dhcrypto import Session, int_to_bytes
from secretstorage.exceptions import ItemNotFoundException, \
SecretServiceNotAvailableException
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
from cryptography.utils import int_from_bytes
BUS_NAME = 'org.freedesktop.secrets'
SERVICE_IFACE = SS_PREFIX + 'Service'
PROMPT_IFACE = SS_PREFIX + 'Prompt'
class DBusAddressWrapper(DBusAddress): # type: ignore
"""A wrapper class around :class:`jeepney.wrappers.DBusAddress`
that adds some additional methods for calling and working with
properties, and converts error responses to SecretStorage
exceptions.
.. versionadded:: 3.0
"""
def __init__(self, path: str, interface: str,
connection: DBusConnection) -> None:
DBusAddress.__init__(self, path, BUS_NAME, interface)
self._connection = connection
def send_and_get_reply(self, msg: Message) -> Any:
try:
return self._connection.send_and_get_reply(msg)
except DBusErrorResponse as resp:
if resp.name in (DBUS_UNKNOWN_METHOD, DBUS_NO_SUCH_OBJECT):
raise ItemNotFoundException('Item does not exist!') from resp
elif resp.name in (DBUS_SERVICE_UNKNOWN, DBUS_EXEC_FAILED,
DBUS_NO_REPLY):
data = resp.data
if isinstance(data, tuple):
data = data[0]
raise SecretServiceNotAvailableException(data) from resp
raise
def call(self, method: str, signature: str, *body: Any) -> Any:
msg = new_method_call(self, method, signature, body)
return self.send_and_get_reply(msg)
def get_property(self, name: str) -> Any:
msg = Properties(self).get(name)
(signature, value), = self.send_and_get_reply(msg)
return value
def set_property(self, name: str, signature: str, value: Any) -> None:
msg = Properties(self).set(name, signature, value)
self.send_and_get_reply(msg)
def open_session(connection: DBusConnection) -> Session:
"""Returns a new Secret Service session."""
service = DBusAddressWrapper(SS_PATH, SERVICE_IFACE, connection)
session = Session()
try:
output, result = service.call('OpenSession', 'sv',
ALGORITHM_DH,
('ay', int_to_bytes(session.my_public_key)))
except DBusErrorResponse as resp:
if resp.name != DBUS_NOT_SUPPORTED:
raise
output, result = service.call('OpenSession', 'sv',
ALGORITHM_PLAIN,
('s', ''))
session.encrypted = False
else:
signature, value = output
assert signature == 'ay'
key = int_from_bytes(value, 'big')
session.set_server_public_key(key)
session.object_path = result
return session
def format_secret(session: Session, secret: bytes,
content_type: str) -> Tuple[str, bytes, bytes, str]:
"""Formats `secret` to make possible to pass it to the
Secret Service API."""
if isinstance(secret, str):
secret = secret.encode('utf-8')
elif not isinstance(secret, bytes):
raise TypeError('secret must be bytes')
assert session.object_path is not None
if not session.encrypted:
return (session.object_path, b'', secret, content_type)
assert session.aes_key is not None
# PKCS-7 style padding
padding = 0x10 - (len(secret) & 0xf)
secret += bytes((padding,) * padding)
aes_iv = os.urandom(0x10)
aes = algorithms.AES(session.aes_key)
encryptor = Cipher(aes, modes.CBC(aes_iv), default_backend()).encryptor()
encrypted_secret = encryptor.update(secret) + encryptor.finalize()
return (
session.object_path,
aes_iv,
encrypted_secret,
content_type
)
def exec_prompt(connection: DBusConnection,
prompt_path: str) -> Tuple[bool, List[str]]:
"""Executes the prompt in a blocking mode.
:returns: a tuple; the first element is a boolean value showing
whether the operation was dismissed, the second element
is a list of unlocked object paths
"""
prompt = DBusAddressWrapper(prompt_path, PROMPT_IFACE, connection)
dismissed = result = None
def callback(msg_body: Tuple[bool, List[str]]) -> None:
_dismissed, _result = msg_body
nonlocal dismissed, result
dismissed, result = bool(_dismissed), _result
connection.router.subscribe_signal(callback, prompt_path, PROMPT_IFACE, 'Completed')
prompt.call('Prompt', 's', '')
if result is None:
connection.recv_messages()
assert dismissed is not None
assert result is not None
return dismissed, result
def unlock_objects(connection: DBusConnection, paths: List[str]) -> bool:
"""Requests unlocking objects specified in `paths`.
Returns a boolean representing whether the operation was dismissed.
.. versionadded:: 2.1.2"""
service = DBusAddressWrapper(SS_PATH, SERVICE_IFACE, connection)
unlocked_paths, prompt = service.call('Unlock', 'ao', paths)
if len(prompt) > 1:
dismissed, (signature, unlocked) = exec_prompt(connection, prompt)
assert signature == 'ao'
return dismissed
return False
def add_match_rules(connection: DBusConnection) -> None:
"""Adds match rules for the given connection.
Currently it matches all messages from the Prompt interface, as the
mock service (unlike GNOME Keyring) does not specify the signal
destination.
.. versionadded:: 3.1
"""
rule = MatchRule(sender=BUS_NAME, interface=PROMPT_IFACE)
dbus = DBusAddressWrapper(path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
connection=connection)
dbus.bus_name = 'org.freedesktop.DBus'
dbus.call('AddMatch', 's', rule.serialise())
| [
"baraaabualrub1998@gmail.com"
] | baraaabualrub1998@gmail.com |
e19d83d920cbf214a0559c2f0bb610c90b9d69ee | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_116/1914.py | 20d7b72d1b8a35128812032e9655e83a53e17756 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,493 | py | f = open("A-large.in","r")
t = int (f.readline())
ent = []
def check(ent):
for i in range(0,4):
if ('.' not in ent[i])and ('O' not in ent[i]):
return 0
if ('.' not in ent[i])and ('X' not in ent[i]):
return 1
for i in range(0,4):
a = []
for j in range(0,4):
a.append(ent[j][i])
if ('.' not in a)and ('O' not in a):
return 0
if ('.' not in a)and ('X' not in a):
return 1
a = [ent[0][0],ent[1][1],ent[2][2],ent[3][3]]
if ('.' not in a)and ('O' not in a):
return 0
if ('.' not in a)and ('X' not in a):
return 1
a = [ent[0][3],ent[1][2],ent[2][1],ent[3][0]]
if ('.' not in a)and ('O' not in a):
return 0
if ('.' not in a)and ('X' not in a):
return 1
if ('.' not in ent[0]) and ('.' not in ent[1]) and ('.' not in ent[2]) and ('.' not in ent[3]):
return 2
return 3
s = open("output.out","w")
for i in range(1,t+1):
for j in range(0,4):
ent.append(f.readline())
x = check(ent)
if x == 0:
s.write("Case #%d: X won" % i)
if x == 1:
s.write("Case #%d: O won" % i)
if x == 2:
s.write("Case #%d: Draw" % i)
if x == 3:
s.write("Case #%d: Game has not completed" % i)
if i<t:
ent.append(f.readline())
s.write("\n")
ent = []
f.close()
s.close()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
e1a8258c295bb435d8d056b45f9adbadb1d5fb35 | 6d2307761303169e6527e10d4ee3b8692c84e9b7 | /Code/Algorithm Testing/NB.py | 82463836cc9517bc6e6411e58f5807ed2a177bf8 | [] | no_license | bpblakely/Email-Spam-Classification-Project | e783bed5a6b5b4bb6ccf8c9be092cdb9e163207f | d657bfbf475edb5731e78a4122fb7aaeee9e6ab5 | refs/heads/master | 2020-10-02T04:39:27.867598 | 2019-12-12T22:42:19 | 2019-12-12T22:42:19 | 227,703,943 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,664 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Oct 31 10:29:48 2019
@author: Brian
"""
import numpy as np
from sklearn.naive_bayes import MultinomialNB,GaussianNB,ComplementNB
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.svm import SVC
from sklearn.neural_network import MLPClassifier
from sklearn.pipeline import Pipeline
from sklearn.linear_model import LassoCV
from sklearn.feature_selection import SelectFromModel
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import LinearSVC
from kerasClassifier import predictTest as pt
#Main code for testing different algorithms. Commented code are algorithms testing prior. Uncomment in order to test again
def predictTest(trainFeatures,trainLabels,testFeatures,i):
#model=MultinomialNB(alpha=.01)
#model= MLPClassifier(random_state=2,hidden_layer_sizes=[100, 100],max_iter=1000)
#model = SVC(kernel="rbf",probability=True,gamma='scale')
#model=LinearSVC(penalty="l2")
#model=RandomForestClassifier(n_estimators=100, max_depth=12,random_state=2)
ch2=SelectKBest(chi2,k=i)
train=ch2.fit_transform(trainFeatures, trainLabels)
test= ch2.transform(testFeatures)
#lsvc = LinearSVC(C=20, penalty="l1", dual=False)
#clf = Pipeline([('feature_selection', SelectFromModel(lsvc)), ('classification', model)])
#clf.fit(trainFeatures, trainLabels)
#trainFeaturesN = select.fit_transform(trainFeatures,trainLabels)
return pt(train,trainLabels,test,i)
# model.fit(train,trainLabels)
# predicted = model.predict_proba(test)[:,1]
# return predicted
| [
"noreply@github.com"
] | bpblakely.noreply@github.com |
9c7c5a026f89ccbfb7ccfb066f21b2da5e6310a4 | 03644227f51ff3ebfd0b5321d40c7d392dfcd315 | /exchange_plane/venv/Scripts/easy_install-script.py | 272aecf33bbda4ac76aa86b4c487a6da0a400aa8 | [] | no_license | wufeipku/python | 32fc26b85bafad15fe8f873d9806b6ab8d699310 | 1eb16a01d11aecd23097172a45caf79a4042f3bf | refs/heads/master | 2020-04-29T15:11:11.065925 | 2019-03-18T07:28:45 | 2019-03-18T07:28:45 | 176,220,111 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | #!E:\python36\Pycharmproject\exchange_plane\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| [
"wufei.pku@163.com"
] | wufei.pku@163.com |
4fd29bedd1166b6c40908bcbd80ecf7dd8f39329 | 8a67943c7dfdf9c89a140d1a4c5aabc63d1b7263 | /train/train_transfer.py | 300ccb9c01c4c5b326954a52bac63326c93ccd29 | [
"MIT"
] | permissive | huynhtuan17ti/UnsupervisedLearning-JigsawPuzzle | c37782050b61a60695681a195dd5f38b36803976 | 1aafade4b6f169cef8815f90c27ec485bf64ca7d | refs/heads/main | 2023-04-30T07:46:43.300200 | 2021-05-16T13:08:52 | 2021-05-16T13:08:52 | 366,249,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,749 | py | import sys
sys.path.insert(1, '../UnsupervisedLearning-JigsawPuzzle')
import torch
import torch.nn as nn
import torchvision
import os
import cv2
from torch.autograd import Variable
from dataset_factory.data_loader import AnimalDataset
from dataset_factory.data_utils import get_all_imgs
from models.AlexNet import AlexNet
from config import Config
import math
from metric import accuracy as acc_metric
from tqdm import tqdm
from train.train_utils import prepare_dataloader
from torchvision import models
import argparse
parser = argparse.ArgumentParser(description='Train JigsawPuzzle Classifer')
parser.add_argument('--lr', default=0.001, type=float, help='learning rate for SGD optimizer')
parser.add_argument('--gamma', default=0.3, type=float, help='gamma for StepLR')
parser.add_argument('--period', default=30, type=int, help='period range for StepLR')
parser.add_argument('--pretrained', default=None, type=str, help='Path to pretrained model')
parser.add_argument('--checkpoint', default=None, type=str, help='Path to save checkpoint model')
parser.add_argument('--train_csv', default='../UnsupervisedLearning-JigsawPuzzle/dataset/csv/train.csv', type=str, help='Path to train.csv')
parser.add_argument('--valid_csv', default='../UnsupervisedLearning-JigsawPuzzle/dataset/csv/valid.csv', type=str, help='Path to valid.csv')
parser.add_argument('--epochs', default=200, type=int, help='number of total epochs for training')
parser.add_argument('--train_batch', default=16, type=int, help='train batch size')
parser.add_argument('--valid_batch', default=16, type=int, help='valid batch size')
parser.add_argument('--init_acc', default=0, type=float, help='initial accuracy for training')
parser.add_argument('--result', default=None, type=str, help='Path to save result log')
args = parser.parse_args()
def train_one_epoch(epoch, net, train_loader, loss_fc, optimizer):
net.train()
total_loss = 0
total_acc = 0
pbar = tqdm(enumerate(train_loader), total = len(train_loader))
for step, (images, labels) in pbar:
images = Variable(images).cuda()
labels = Variable(labels).cuda()
optimizer.zero_grad()
outputs = net(images)
preds = torch.argmax(outputs, 1).detach().cpu().numpy()
targets = labels.detach().cpu().numpy()
acc = (preds == targets).mean()*100
loss = loss_fc(outputs, labels)
loss.backward()
optimizer.step()
total_loss += loss.item()
total_acc += acc
description = f'epoch {epoch} || Loss: {total_loss/(step+1):.6f} | Acc: {total_acc/(step+1):.6}'
pbar.set_description(description)
def valid_one_epoch(epoch, net, valid_loader, loss_fc):
net.eval()
total_loss = 0
total_acc = 0
pbar = tqdm(enumerate(valid_loader), total = len(valid_loader))
for step, (images, labels) in pbar:
images = Variable(images).cuda()
labels = Variable(labels).cuda()
outputs = net(images)
preds = torch.argmax(outputs, 1).detach().cpu().numpy()
targets = labels.detach().cpu().numpy()
acc = (preds == targets).mean()*100
loss = loss_fc(outputs, labels)
total_loss += loss.item()
total_acc += acc
description = f'epoch {epoch} || Loss: {total_loss/(step+1):.6f} | Acc: {total_acc/(step+1):.6}'
pbar.set_description(description)
return total_acc/(step+1)
if __name__ == '__main__':
train_loader, valid_loader = prepare_dataloader(AnimalDataset, args.train_csv, args.valid_csv, args.train_batch, args.valid_batch)
net = models.alexnet(pretrained = True)
num_ftrs = net.classifier[6].in_features
net.classifier[6] = nn.Linear(num_ftrs, 10)
net.cuda()
loss = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, net.parameters()), lr = args.lr, momentum=0.9, weight_decay=5e-4)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, args.period, args.gamma, verbose = True)
if args.result:
f = open(args.result, "w")
# training
print('='*30)
print('Start training ...')
best_acc = args.init_acc
for epoch in range(args.epochs):
train_one_epoch(epoch, net, train_loader, loss, optimizer)
with torch.no_grad():
acc = valid_one_epoch(epoch, net, valid_loader, loss)
if acc > best_acc:
best_acc = acc
torch.save(net.state_dict(), args.checkpoint)
print('Save checkpoint ... Best accuracy {:.3f}'.format(best_acc))
if args.result:
f.write("Epoch: " + str(epoch) + ', best acc save: ' + str(best_acc) + '\n')
scheduler.step()
if args.result:
f.close() | [
"huynhminhtuan6429@gmail.com"
] | huynhminhtuan6429@gmail.com |
c34cf6fe9e2f299d9e4c23ed305ee16c98656660 | 20257efe43389be6da440cf0ae1d28d203cc49c0 | /label/label_V1/srcs/photos.py | 514f826c196a235e8fd79e8737b31042153db91e | [] | no_license | Sebds/Patate | 26a7b8590edffe7a08dae8696d949ab8c5cb36da | 7676189d6051a4530bd47392639e4169217b61c5 | refs/heads/master | 2020-03-26T21:33:29.513474 | 2018-08-19T17:33:00 | 2018-08-19T17:33:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,561 | py | import os
import time
import srcs.color as c
from tkinter import *
from PIL import ImageTk, Image
class Photos(object):
def __init__(self, photos, lab_photos, trash='trash/', auto_next=False, width=800, height=600):
self.photos_dir = photos
self.lab_photos_dir = lab_photos
self.trash_dir = trash
self.photo_act = 0
self.fen = {
'fen' : None,
'lab_photo' : None,
'photo' : None,
'lab_info' : None
}
self.width = width
self.height = height
self.width_img = self.width
self.height_img = self.height - 50
self.auto_next = auto_next
def load(self):
self.photos = os.listdir(self.photos_dir)
self.photos_inf = [{'del' : False, 'label' : ''} for i in range(len(self.photos))]
self.photo_act = 0
def init_win(self, width=0, height=0):
self.width = width if width != 0 else self.width
self.width_img = width if width != 0 else self.width_img
self.height_img = height - 50 if height != 0 else self.height_img
self.fen['fen'] = Tk()
self.fen['fen'].title('lab_photo')
self.print_win()
self.init_key()
self.fen['fen'].mainloop()
def init_key(self):
self.fen['fen'].bind("<Escape>", self.quit_win)
self.fen['fen'].bind("<BackSpace>", self.del_label)
self.fen['fen'].bind("<Control-Key-s>", self.save)
self.fen['fen'].bind("<Right>", self.next_photo)
self.fen['fen'].bind("<Left>", self.last_photo)
self.fen['fen'].bind("<Up>", self.del_photo)
self.fen['fen'].bind("<KeyPress>", self.event_win)
def save(self, event):
for i in range(len(self.photos)):
if self.photos_inf[i]['del'] == True:
print(c.RED + 'DELETE -> ' + c.EOC + self.photos[i])
os.rename(self.photos_dir + self.photos[i], self.trash_dir + self.photos[i])
if self.photos_inf[i]['label'] != '':
print(c.GREEN + 'LABEL -> ' + c.EOC + self.photos_inf[i]['label'] + '_' + self.photos[i])
os.rename(self.photos_dir + self.photos[i], self.lab_photos_dir + self.photos_inf[i]['label'] + '_' + self.photos[i])
self.load()
def quit_win(self, event):
self.save(None)
self.fen['fen'].destroy()
self.fen['fen'].quit()
def del_label(self, event):
self.photos_inf[self.photo_act]['label'] = ''
if self.auto_next == True:
self.next_photo(None)
else:
self.print_win()
def event_win(self, event):
if event.char in ('0', '1', '2', '3', '4', '5', '6', '7', '8', '9'):
self.photos_inf[self.photo_act]['label'] = event.char
if self.auto_next == True:
self.next_photo(None)
else:
self.print_win()
def del_photo(self, event):
if self.photos_inf[self.photo_act]['del'] == True:
self.photos_inf[self.photo_act]['del'] = False
else:
self.photos_inf[self.photo_act]['del'] = True
if self.auto_next == True:
self.next_photo(None)
else:
self.print_win()
def last_photo(self, event):
self.photo_act -= 1
if self.photo_act < 0:
self.photo_act = len(self.photos) - 1
self.print_win()
def next_photo(self, event):
self.photo_act += 1
if self.photo_act >= len(self.photos):
self.photo_act = 0
self.print_win()
def print_win(self):
if self.fen['lab_photo'] != None:
self.fen['lab_photo'].destroy()
if self.fen['lab_info'] != None:
self.fen['lab_info'].destroy()
image = Image.open(self.photos_dir + self.photos[self.photo_act])
image = image.resize((self.width_img, self.height_img), Image.ANTIALIAS)
self.fen['photo'] = ImageTk.PhotoImage(image)
self.fen['lab_photo'] = Label(self.fen['fen'], image=self.fen['photo'])
self.fen['lab_photo'].pack(side=TOP)
self.fen['lab_info'] = Label(self.fen['fen'], width=32, height=2, font=("Courier", 40))
if self.photos_inf[self.photo_act]['del'] == True:
self.fen['lab_info'].configure(bg='red')
else:
self.fen['lab_info'].configure(bg='white')
self.fen['lab_info']['text'] = self.photos_inf[self.photo_act]['label'] + '\t\t' + str(self.photo_act) + '/' + str(len(self.photos))
self.fen['lab_info'].pack(side=BOTTOM)
| [
"tnicolas@student.42.fr"
] | tnicolas@student.42.fr |
360fbd0df75ba142aadd5589508fdb2d95ba7602 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_074/ch117_2020_04_01_19_24_01_200930.py | 446a96f7337eaf516aa30fe9c7ef40edbc6f0571 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | import math
def snell_descartes (n1,n2,c):
o1>=o2
c=math.degrees(o1)
d=math.degrees(o2)
a=math.sin(c)
b=math.sin(d)
b=a*n1/n2
o1<=90 and o1>=0
if o1==0:
o2==0 and a==b==1
return(o1) | [
"you@example.com"
] | you@example.com |
b87d3f64e713ba53fb5b94de3507f74d8a97ea0b | 5c533e2cf1f2fa87e55253cdbfc6cc63fb2d1982 | /python/quantumhall/cyclotron.py | 108c267d7ee00673328a312228abdcb7f535d40f | [] | no_license | philzook58/python | 940c24088968f0d5c655e2344dfa084deaefe7c6 | 6d43db5165c9bcb17e8348a650710c5f603e6a96 | refs/heads/master | 2020-05-25T15:42:55.428149 | 2018-05-14T03:33:29 | 2018-05-14T03:33:29 | 69,040,196 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 993 | py |
#A suggesiton for the classical fractional hall effect
#Is a mean field organiztion of the cycltron phases, such that they synchronize.
#Leading to an effective time and angle dependant
# self consistantly dz/dt2 = i w dz/dt + P
# where E is a vortex configuration by conjecture. P = f(|z|)z^n
# and also has angular time dependance z/|z|
import numpy as np
from scipy.integrate import odeint
import matplotlib.pyplot as plt
omega =1.
g = -.5
def pack(z,zdot):
return np.array([np.real(z),np.imag(z),np.real(zdot),np.imag(zdot)])
def unpack(x):
return x[0]+1.j * x[1], x[2]+1.j * x[3],
def accel(z,zdot):
return 1.j * omega * zdot + g * np.conj(z)**3
def diffeq(x,t):
z, zdot = unpack(x)
return pack(zdot, accel(z,zdot))
from scipy import signal
T = 1000.
N = 1000
initcond = pack(1. + 0.j ,0. + 1.j)
t = np.linspace(0,T, N)
sol = odeint(diffeq, initcond, t)
f , P = signal.periodogram(sol[:,1],N/T)
plt.plot(t,sol[:,1])
plt.figure()
plt.plot(f,P)
plt.show()
| [
"philip@FartMachine7.local"
] | philip@FartMachine7.local |
60d4e232d5fa663fa88d5d6da7e0953144542f33 | 9ef0f266173887eafd5c797d13a6538733b39002 | /trimesh/path/entities.py | de2166781a4699322e91ad3e70b13e8fccd4f1c4 | [
"MIT"
] | permissive | MiaoLi/trimesh | a850e3a922e43ce6500085eeaf16df8404ad0f17 | 8f6e537151d914d23180a1c1152d849c41d2c1fa | refs/heads/master | 2021-01-14T12:36:02.831270 | 2015-10-17T01:36:33 | 2015-10-17T01:36:33 | 44,636,986 | 2 | 0 | null | 2015-10-20T21:52:11 | 2015-10-20T21:52:10 | null | UTF-8 | Python | false | false | 5,072 | py | '''
entities.py: basic geometric primitives
Design intent: only store references to vertex indices and pass the vertex
array back to functions that require it.
This keeps all vertices in one external list.
'''
import numpy as np
from .arc import discretize_arc, arc_center
from .curve import discretize_bezier, discretize_bspline
from ..points import unitize
from ..util import replace_references
_HASH_LENGTH = 5
class Entity(object):
def __init__(self,
points,
closed = False):
self.points = np.array(points)
self.closed = closed
@property
def _class_id(self):
'''
Return an integer that is unique to the class type.
Note that this implementation will fail if a class is defined
that starts with the same letter as an existing class.
Since this function is called a lot, it is a tradeoff between
speed and robustness where speed won.
'''
return ord(self.__class__.__name__[0])
@property
def hash(self):
'''
Returns a string unique to the entity.
If two identical entities exist, they can be removed
by comparing the string returned by this function.
'''
hash = np.zeros(_HASH_LENGTH, dtype=np.int)
hash[-2:] = self._class_id, int(self.closed)
points_count = np.min([3, len(self.points)])
hash[0:points_count] = np.sort(self.points)[-points_count:]
return hash
def to_dict(self):
'''
Returns a dictionary with all of the information about the entity.
'''
return {'type' : self.__class__.__name__,
'points': self.points.tolist(),
'closed': self.closed}
def rereference(self, replacement):
'''
Given a replacement dictionary, change points to reflect the dictionary.
eg, if replacement = {0:107}, self.points = [0,1902] becomes [107, 1902]
'''
self.points = replace_references(self.points, replacement)
@property
def nodes(self):
'''
Returns an (n,2) list of nodes, or vertices on the path.
Note that this generic class function assumes that all of the reference
points are on the path, which is true for lines and three point arcs.
If you were to define another class where that wasn't the case
(for example, the control points of a bezier curve),
you would need to implement an entity- specific version of this function.
The purpose of having a list of nodes is so that they can then be added
as edges to a graph, so we can use functions to check connectivity,
extract paths, etc.
The slicing on this function is essentially just tiling points
so the first and last vertices aren't repeated. Example:
self.points = [0,1,2]
returns: [[0,1], [1,2]]
'''
return np.column_stack((self.points,
self.points)).reshape(-1)[1:-1].reshape((-1,2))
@property
def end_points(self):
'''
Returns the first and last points. Also note that if you
define a new entity class where the first and last vertices
in self.points aren't the endpoints of the curve you need to
implement this function for your class.
self.points = [0,1,2]
returns: [0,2]
'''
return self.points[[0,-1]]
class Arc(Entity):
def discrete(self, vertices, scale=1.0):
return discretize_arc(vertices[self.points],
close = self.closed,
scale = scale)
def center(self, vertices):
return arc_center(vertices[self.points])
class Line(Entity):
def discrete(self, vertices, scale=1.0):
return vertices[self.points]
class Curve(Entity):
@property
def _class_id(self):
return sum([ord(i) for i in self.__class__.__name__])
@property
def nodes(self):
return [[self.points[0],
self.points[1]],
[self.points[1],
self.points[-1]]]
class Bezier(Curve):
def discrete(self, vertices, scale=1.0):
return discretize_bezier(vertices[self.points], scale=scale)
class BSpline(Curve):
def __init__(self, points, knots, closed=False):
self.points = points
self.knots = knots
self.closed = closed
def discrete(self, vertices, count=None, scale=1.0):
result = discretize_bspline(control = vertices[self.points],
knots = self.knots,
count = count,
scale = scale)
return result
| [
"mik3dh@gmail.com"
] | mik3dh@gmail.com |
64dcffc9f3b11462172adb89a4680a202824afe1 | 669b9fd39398de1fc55ad7da8e7f3182c3d25ade | /sonar.py | aa1179e0e5de9bb69342c6ea8306a32da0a67705 | [
"Apache-2.0"
] | permissive | milos85vasic/Website-Sonar | 3fa811082221f90225d174b17a8dce31c69d05f3 | 406fa3f1baa82d1a7279c0d50dada9b141dec506 | refs/heads/master | 2020-04-15T16:11:10.500880 | 2019-01-31T13:59:25 | 2019-01-31T13:59:25 | 164,824,149 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,171 | py | import os
import sys
import time
import json
import urllib2
import requests
import logging
import os.path
from requests import ConnectionError
from logging.handlers import RotatingFileHandler
param_configuration_name = '--configuration'
configuration_default_file = 'configuration.json'
configuration = {}
elapsed_times = {}
unreachable_websites = []
key_websites = 'websites'
key_overrides = 'overrides'
key_notification = 'notification'
key_connectivity_verification_website = 'connectivity_verification_website'
def load_configuration():
configuration_file = configuration_default_file
for arg in sys.argv:
if sys.argv.index(arg) > 0:
if param_configuration_name in arg:
configuration_name = arg.replace(param_configuration_name, "")
configuration_name = configuration_name.replace(".json", "")
configuration_name = configuration_name.replace("=", "")
configuration_name = configuration_name.replace("'", "")
configuration_name = configuration_name.replace("\"", "")
configuration_name = configuration_name.replace(" ", "")
configuration_file = configuration_name + ".json"
log("Starting Website Sonar (version: " + version + "). Configuration file: " + configuration_file + ".")
if os.path.isfile(configuration_file):
try:
json_file = open(configuration_file)
json_str = json_file.read()
loaded = json.loads(json_str)
configuration.update(loaded)
if key_websites in configuration and key_overrides in configuration and \
key_connectivity_verification_website in configuration[key_overrides]:
for item in configuration[key_websites]:
elapsed_times[item] = 0
return True
except Exception as e:
log("Error: " + str(e))
return False
return False
app_log = logging.getLogger('root')
debug = False
verbose = True
do_logging = True
version = "1.2.0"
working_frequency = 1
key_frequency = 'frequency'
key_verification = 'verification'
key_working_frequency = 'working_frequency'
default_frequency = 10 * 60 if not debug else 10
key_notification_mechanism_println = "Println"
key_notification_mechanism_slack = "Slack-Notifier"
key_notification_mechanism_email = "Email-Notifier"
headers = {'user-agent': 'Website Sonar, version: ' + version}
log_filename = 'website-sonar.log'
log_files_count = 10 if not debug else 5
log_max_file_size = 5 * 1024 * 1024 if not debug else 1024
def log(what):
if verbose:
print what
if do_logging:
app_log.info(what)
def internet_on():
try:
urllib2.urlopen(configuration[key_overrides][key_connectivity_verification_website], timeout=1)
return True
except urllib2.URLError:
return False
def check(website, website_configuration):
log("Checking: " + website)
if "http" not in website:
log("No schema defined for: " + website + ", falling back to default: http:// schema.")
website = "http://" + website
try:
response = requests.get(website, headers=headers)
if response.status_code != 200 and response.status_code != 201:
return False
body = response.text
if key_verification in website_configuration:
for criteria in website_configuration[key_verification]:
if criteria not in body:
return False
except ConnectionError:
return False
return True
def perform_check(website):
if check(website, configuration[key_websites][website]):
message = "Website " + website + " is ok."
if website in unreachable_websites:
message = "Website " + website + " is reachable again."
unreachable_websites.remove(website)
notify(message)
log(message)
else:
if website not in unreachable_websites:
fail(website)
else:
log("Website is still not reachable: " + website)
def run(what):
for cmd in what:
os.system(cmd)
def fail(website):
unreachable_websites.append(website)
message = "Website is not reachable: " + website
log(message)
notify(message)
return
def notify(message):
if key_notification in configuration:
for mechanism in configuration[key_notification]:
if mechanism == key_notification_mechanism_slack:
slack(message)
continue
if mechanism == key_notification_mechanism_email:
email(message)
continue
if mechanism == key_notification_mechanism_println:
print ("MSG :: " + message)
continue
def slack(message):
command = [
"python Slack/notify.py \"" + message + "\""
]
if internet_on():
run(command)
def email(message):
command = [
"python Email/notify.py \"" + message + "\""
]
if internet_on():
run(command)
def run_sonar():
if do_logging:
logging.basicConfig(
filename="website-sonar.log",
filemode='a',
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%H:%M:%S',
level=logging.DEBUG
)
formatter = logging.Formatter('%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s')
handler = RotatingFileHandler(
log_filename, mode='a', maxBytes=log_max_file_size, backupCount=log_files_count, encoding=None, delay=0
)
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
app_log.setLevel(logging.DEBUG)
app_log.addHandler(handler)
if not load_configuration():
log("Website Sonar (version: " + version + ") could not be started. Could not load configuration JSON.")
sys.exit(1)
start_message = "Website Sonar (version: " + version + ") is STARTED."
if key_notification in configuration and key_notification_mechanism_email in configuration[key_notification]:
email(start_message)
log(start_message)
frequency = working_frequency
if key_working_frequency in configuration[key_overrides]:
frequency = configuration[key_overrides][key_working_frequency]
while True:
time.sleep(frequency)
for website in elapsed_times:
elapsed_times[website] = elapsed_times[website] + frequency
if debug:
log("Tick. " + str(elapsed_times[website]))
expected_frequency = default_frequency
if key_frequency in configuration[key_websites][website]:
expected_frequency = configuration[key_websites][website][key_frequency]
if elapsed_times[website] >= expected_frequency:
elapsed_times[website] = 0
if not internet_on():
log("No internet connection available.")
continue
perform_check(website)
if __name__ == '__main__':
run_sonar()
| [
"milos85vasic@gmail.com"
] | milos85vasic@gmail.com |
ebbb121bfb497e7f272ba80d191c5af2c3a9b31d | 04adc1a7ae0f9577076321a5931b7816cacc980b | /Exercicios/media-de-lista-com-input-e-1-while.py | 54a19fcbeed414daebbc49a459bf230dbdd8f22b | [] | no_license | jacquesfelipe/python-learning | 455db8ab474edf3e230c20667aa54d194381b7dd | 668a0c6393655e18841c5ca76bfed9de54d13f32 | refs/heads/main | 2023-06-30T11:11:05.384635 | 2021-08-08T21:34:59 | 2021-08-08T21:34:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | """Mรฉdia de lista com input"""
notas = []
x = 0
soma = 0
while x <= 3:
notas.append(float(input("Digite qual nota gostaria de adicionar: ")))
soma = soma + notas[x]
x = x + 1
media = soma / (len(notas))
print(f"A mรฉdia das notas: {[notas]} รฉ: {media}")
| [
"jacquesfelipej@gmail.com"
] | jacquesfelipej@gmail.com |
dee362941322f9741b27f098fc60916cc88f260a | b5e3b4b8e2c70e06e3b19bcd86789b83028da78f | /django_project/blog/migrations/0009_auto_20190620_2016.py | fd3d4764fd5b6fae3dd1dfcc853bd20a330c5b05 | [] | no_license | feridbedelov/Django_Project_Blog_Aurora | 334593d2d523f38e7c472b6e8439cd19f777ec6a | 130f3db455590333c45d40c042722f5908e7bb32 | refs/heads/master | 2020-07-31T15:47:39.431799 | 2019-09-24T17:41:33 | 2019-09-24T17:41:33 | 210,662,296 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | # Generated by Django 2.2.1 on 2019-06-20 16:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0008_post_rating'),
]
operations = [
migrations.AlterField(
model_name='post',
name='rating',
field=models.FloatField(default=5.5, max_length=20),
),
]
| [
"User@DESKTOP-K24KG53"
] | User@DESKTOP-K24KG53 |
611e6bea09e4fc1314eb651ee69043dad69aec8d | 0af76aee48453b64d2f09dfadeb79f4a4ac6fef3 | /solution/practice/data-structures/multiple-choice/how-well-do-you-know-trees/solution.py | fc9723e63948c87114dd0dc2b22b2b8d0c594e5f | [
"MIT"
] | permissive | Abhishek2019/HackerRank | 99ee5d437eb3abe8f041a04bea3968848605a811 | d8a297e2707545957452d07ca564086e3e34a527 | refs/heads/master | 2021-05-03T12:06:18.488528 | 2019-10-30T17:19:59 | 2019-10-30T17:19:59 | 120,493,921 | 0 | 1 | MIT | 2019-10-30T17:20:05 | 2018-02-06T17:08:34 | Python | UTF-8 | Python | false | false | 13 | py | print("n-1")
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
9ef08444444fb5f68dc415a3902027135ded3355 | 7fa478e503293dad2b12ffc5a7648e5ead2cf3df | /outliers/enron_outliers.py | ffd44aaaccb50fdc57f9afed72de4250f8092578 | [] | no_license | bluewaitor/ud120 | e81457fec36b8d1841bbecb91fde4e893d4df37b | b120ca580443d92721f9a46955b0f42a01b15e66 | refs/heads/master | 2021-08-28T23:33:16.267964 | 2017-12-13T08:37:13 | 2017-12-13T08:37:13 | 114,091,057 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 886 | py | #!/usr/bin/python
import pickle
import sys
import matplotlib.pyplot
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
### read in data dictionary, convert to numpy array
data_dict = pickle.load( open("../final_project/final_project_dataset.pkl", "r") )
features = ["salary", "bonus"]
data_dict.pop('TOTAL',0)
data = featureFormat(data_dict, features)
### your code below
for point in data:
salary = point[0]
bonus = point[1]
matplotlib.pyplot.scatter(salary, bonus)
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("bonus")
matplotlib.pyplot.show()
for key, value in data_dict.items():
if value['bonus'] == data.max():
print '==' + key
biggest = 0
for key, value in data_dict.items():
if value['bonus'] > biggest:
biggest = value['bonus']
print key, biggest, value['salary'] | [
"405622394@qq.com"
] | 405622394@qq.com |
e08349ddfbec65c303385ec355d6356a79d8056f | f9ca6da37554c16211eae83d280765205d98a72d | /gesture_recognizer/picture_cropper.py | e8a989109264ea1d288cc96b69918eec6dbe2f3e | [] | no_license | kgalloway2/VSC-Code | 03f8955f0c6b630ad97dd1d42ca83af64317e6f8 | 7c3d321de7a4880a9c92f57c153cd23a154390f6 | refs/heads/master | 2023-07-09T12:20:02.147818 | 2021-08-09T13:50:06 | 2021-08-09T13:50:06 | 291,090,362 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | from PIL import Image
# 640x480
i = 0
while i <= 105:
img=Image.open("C:/Users/kgtrm/Documents/VSC Code/gesture_recognizer/screenshots/test_hands/test_hand" + str(i) + ".jpg")
c_i = img.crop(box=(20,20,550,400))
c_i.save("C:/Users/kgtrm/Documents/VSC Code/gesture_recognizer/screenshots/test_hands/cropped_test_hand" + str(i) + ".jpg")
i += 1
| [
"keatongalloway@yahoo.com"
] | keatongalloway@yahoo.com |
76732c90be1e6c89d923ed2aabebc32359ae7817 | b73b77dbbd6b4b2c216c1c1e08e5d92c734e545c | /hotel/migrations/0102_auto_20200414_1402.py | 4c95c54d31333b48f288d476d6df915d58142931 | [] | no_license | aadarshachapagain/hotel_booking | 0cf248b78a03277a5208aecb1a72aa1282319ead | 58503c57d2fd6d07fdbe6b7eb113954a0282dc3d | refs/heads/main | 2023-08-27T01:53:21.176194 | 2021-10-01T03:13:42 | 2021-10-01T03:13:42 | 412,294,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 410 | py | # Generated by Django 2.1.5 on 2020-04-14 08:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hotel', '0101_bedtype_status'),
]
operations = [
migrations.AlterField(
model_name='bedtype',
name='description',
field=models.TextField(blank=True, max_length=500, null=True),
),
]
| [
"aadarshachapagain@gmail.com"
] | aadarshachapagain@gmail.com |
900753b09ad104145d9f0ffbfa579ec628962275 | f62cf89d4e87a053c442e24f50cef4eb0ada2263 | /01-Python3ๅบ็ก่ฏญๆณ.py | ed343984a27df1a3348f6ad4e7e5b2fdf7dd6e7e | [] | no_license | Yushallchao/PythonPractise | e9024a45b01658805ad39c47c86574d241b11f5e | 4d5fe0a7870af9b38569d715d73f8f057ce9b37d | refs/heads/main | 2023-01-31T20:54:54.449565 | 2020-12-10T06:10:57 | 2020-12-10T06:10:57 | 316,146,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,602 | py | #้ป่ฎคๆ
ๅตไธ๏ผPython 3 ๆบ็ ๆไปถไปฅ UTF-8 ็ผ็ ๏ผๆๆๅญ็ฌฆไธฒ้ฝๆฏ unicode ๅญ็ฌฆไธฒใ ๅฝ็ถไฝ ไนๅฏไปฅไธบๆบ็ ๆไปถๆๅฎไธๅ็็ผ็ ๏ผ
# -*- coding: utf-8 -*-
#!/usr/bin/python3
# coding=utf-8
# ็ฌฌไธไธชๆณจ้
print ("Hello, Python3!") # ็ฌฌไบไธชๆณจ้
'''
็ฌฌไธๆณจ้
'''
"""
็ฌฌๅๆณจ้
"""
if True:
print ("True")
print("Hello, Python3! again")
else:
print ("False")
str = 'Runoob'#pythonไธญๅๅผๅทๅๅๅผๅทไฝฟ็จๅฎๅ
จ็ธๅ (''="")
print(str)
print(str[0:-1]) # ่พๅบ็ฌฌไธไธชๅฐๅๆฐ็ฌฌไบไธช็ๆๆๅญ็ฌฆ,ไปๅณๅพๅทฆไปฅ-1ๅผๅง
print(str[0]) # ่พๅบๅญ็ฌฆไธฒ็ฌฌไธไธชๅญ็ฌฆ
print(str[2:5]) # ่พๅบไป็ฌฌไธไธชๅผๅงๅฐ็ฌฌไบไธช็ๅญ็ฌฆ
print(str[2:]) # ่พๅบไป็ฌฌไธไธชๅผๅงๅ็ๆๆๅญ็ฌฆ
print(str * 2) # ่พๅบๅญ็ฌฆไธฒไธคๆฌก
print(str + 'ไฝ ๅฅฝ') # ่ฟๆฅๅญ็ฌฆไธฒ
print('------------------------------')
print('hello\nrunoob') # ไฝฟ็จๅๆๆ (\)+n่ฝฌไน็นๆฎๅญ็ฌฆ
print(r'hello\nrunoob') # ๅจๅญ็ฌฆไธฒๅ้ขๆทปๅ ไธไธช r๏ผ่กจ็คบๅๅงๅญ็ฌฆไธฒ๏ผไธไผๅ็่ฝฌไน
p = input()
print(p)
#Pythonๅฏไปฅๅจๅไธ่กไธญไฝฟ็จๅคๆก่ฏญๅฅ๏ผ่ฏญๅฅไน้ดไฝฟ็จๅๅท(;)ๅๅฒ
import sys;x = 'runoob'; sys.stdout.write(x + '\n')
## ไธๆข่ก่พๅบๅจๅ้ๆซๅฐพๅ ไธ end=""
print(str, end="")
print(str, end="")
from sys import argv,path # ๅฏผๅ
ฅ็นๅฎ็ๆๅ
print('================python from import===================================')
print('path:',path) # ๅ ไธบๅทฒ็ปๅฏผๅ
ฅpathๆๅ๏ผๆไปฅๆญคๅคๅผ็จๆถไธ้่ฆๅ sys.path | [
"yushallchao@163.com"
] | yushallchao@163.com |
4e6028989cc9120f76b6ac9bca31ed716230e93f | 001b95da5e994198a53c21f39f0e5b2c88fcd885 | /apiloaderv2.py | 7ad66da9d150f4797a02b8be4897272e256e9e83 | [
"Apache-2.0"
] | permissive | vunetsys/conf-analysis | 43ed78a4e5859ece4b94067156825c0478a6833e | 20945710646ac346caff5d2d7b45a44402721426 | refs/heads/master | 2022-12-05T15:11:04.506836 | 2020-08-28T11:06:48 | 2020-08-28T11:06:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,582 | py | import csv
import json
import requests
from papertracker.models import ConfPaper, ConfAuthor, Conference
csvfile = open('C:/Users/Mattia/Pictures/csrankings.csv', encoding='utf-8')
a = requests.get('https://dblp.org/search/publ/api/?q=conf/fast/2011$&format=json&h=1000')
c = a.json()
conf = Conference.objects.get(id=715)
spamreader = csv.reader(csvfile)
inst = 'None'
for items in c['result']['hits']['hit']:
if 'venue' in items['info']:
if items['info']['venue'] == 'FAST':
#if 'FSE' in items['info']['venue']:
cc = ConfPaper.objects.create(conf=conf, title=items['info']['title'])
cc.save()
for it in items['info']:
if 'authors' in it:
for i in items['info']['authors']['author']:
check = False
if isinstance(i, dict):
for row in spamreader:
if row[0] == i['text']:
check = True
inst = row[1]
csvfile.seek(0)
if check == True:
au = ConfAuthor.objects.create(paper=cc, name=i['text'], institution=inst)
au.save()
else:
au = ConfAuthor.objects.create(paper=cc, name=i['text'], institution='None')
au.save()
inst = 'None'
elif isinstance(i, str):
if i == 'text':
for row in spamreader:
if row[0] == items['info']['authors']['author']['text']:
check = True
inst = row[1]
csvfile.seek(0)
if check == True:
au = ConfAuthor.objects.create(paper=cc, name=items['info']['authors']['author']['text'], institution=inst)
au.save()
else:
au = ConfAuthor.objects.create(paper=cc, name=items['info']['authors']['author']['text'], institution='None')
au.save()
inst = 'None'
| [
"mtt.manzaroli@gmail.com"
] | mtt.manzaroli@gmail.com |
d772e8a81b0341e954f8e91fbfad37c97cf003c4 | b7fa6ec316abd8b0df7a873f2a0f82ed55e13c0e | /Datos/Operator_In-Contando_Vocales.py | dfd28a47d4377f0b3ad975603c990ad5a5576c59 | [
"MIT"
] | permissive | CaosMx/Code-Exercises-in-Python-Language | 5230ec32f9606563bc92d77415f11b12946803f4 | 0693e445a48cf8b06432affbf72c9182ce9cfb20 | refs/heads/main | 2023-02-01T16:03:05.763750 | 2020-12-14T02:41:29 | 2020-12-14T02:41:29 | 321,198,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 949 | py | # CaosMX
# Dic 2020
# Ex Python Practica
'''
Comprobar dada una palabra, la cantidad de vocales que contiene:
Usando el operador in -> Nos dice en determinada posiciรณn de un string si el caracter existe en
otro string predeterminado:
Siguiendo Curso de Python de Manuel Gonzalez:
https://www.youtube.com/channel/UCQLWbxZbgftDIhw21i6q_OA/featured
https://programarescomounjuego.blogspot.com
'''
#Input
palabra = input ("Dame un una palabra: ")
# String para verificar las vocales
vocales = "aeiouรกรฉรญรณรบ"
# Para verificar el caracter en el รญndice del string:
indice = 0
# Para contar las vocales:
num_vocales = 0
# Recorremos la palabra
while indice <= len(palabra)-1:
# Si el caracter es una vocal
if palabra[indice] in vocales:
# Incremento de contador:
num_vocales += 1
# Aumentamos el indice para recorrer el siguiente caracter:
indice += 1
print ("La cantidad de vocales es: ", num_vocales) | [
"ing.manuel.arreola@gmail.com"
] | ing.manuel.arreola@gmail.com |
5352686f4e473327fc059b46ee1eb30a3308f534 | 8efd2eccd36946f430f1243e13070685d4695bfe | /satfire/tests/test_utils.py | bc3d156a7a881af9851ed304324705627a97bc8a | [] | no_license | pytroll/satfire | 5ca99ccb2b346692eb5fd136c917fd74e55d36d5 | f8bc309ed84aa92673cc02c61eeef0cc997b662b | refs/heads/master | 2020-05-05T13:36:52.702079 | 2020-02-04T13:29:32 | 2020-02-04T13:29:32 | 180,085,989 | 4 | 2 | null | 2020-02-04T13:29:33 | 2019-04-08T06:40:30 | Python | UTF-8 | Python | false | false | 8,356 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Panu Lahtinen / FMI
#
# Author(s):
#
# Panu Lahtinen <panu.lahtinen@fmi.fi>
"""Unit testing for utils
"""
import sys
import os.path
from collections import OrderedDict
import numpy as np
from satfire import utils
from posttroll.message import Message
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
class TestUtils(unittest.TestCase):
yaml_config = """config:
item_1: 1
item_2: 2
"""
def test_ordered_load(self):
fid = StringIO(self.yaml_config)
res = utils.ordered_load(fid)
fid.close()
self.assertTrue(list(res.keys())[0] == "config")
keys = list(res["config"].keys())
self.assertTrue(keys[0] == "item_1")
self.assertTrue(res["config"][keys[0]] == 1)
self.assertTrue(keys[1] == "item_2")
self.assertTrue(res["config"][keys[1]] == 2)
def test_read_config(self):
config = utils.read_config(os.path.join(os.path.dirname(__file__),
"test_data", "config.yaml"))
self.assertTrue(len(config) > 0)
keys = list(config.keys())
self.assertTrue(isinstance(config, OrderedDict))
self.assertEqual(keys[0], 'item_1')
self.assertTrue(isinstance(config['item_1'], str))
self.assertTrue(isinstance(config['item_2'], list))
self.assertTrue(isinstance(config['item_3'], OrderedDict))
self.assertTrue(isinstance(config['item_4'], int))
def test_get_filenames_from_msg(self):
config = {"cma_message_tag": "pps",
"sat_message_tag": "hrpt"}
cma_fname = "/tmp/foo.nc"
sat_fname = "/tmp/bar.l1b"
# Both files present
data = {"collection":
{"pps":
{"dataset":
[{"uri": cma_fname}]},
"hrpt":
{"dataset":
[{"uri": sat_fname}]}}}
msg = Message("/topic", "collection", data)
sat, cma = utils.get_filenames_from_msg(msg, config)
self.assertEqual(sat, sat_fname)
self.assertEqual(cma, cma_fname)
# Only satellite file
data = {"collection":
{"hrpt":
{"dataset":
[{"uri": sat_fname}]}}}
msg = Message("/topic", "collection", data)
sat, cma = utils.get_filenames_from_msg(msg, config)
self.assertEqual(sat, sat_fname)
self.assertIsNone(cma)
# Only cloud mask file
data = {"collection":
{"pps":
{"dataset":
[{"uri": cma_fname}]}}}
msg = Message("/topic", "collection", data)
sat, cma = utils.get_filenames_from_msg(msg, config)
self.assertEqual(cma, cma_fname)
self.assertIsNone(sat)
# No files
data = {"collection": {}}
msg = Message("/topic", "dataset", data)
sat, cma = utils.get_filenames_from_msg(msg, config)
self.assertIsNone(cma)
self.assertIsNone(sat)
def test_get_idxs_around_location(self):
side = 5
# Note that the centre pixel is always masked out
y_cor = np.array([0, 1, 2, 3, 4,
0, 1, 2, 3, 4,
0, 1, 3, 4,
0, 1, 2, 3, 4,
0, 1, 2, 3, 4])
x_cor = np.array([0, 0, 0, 0, 0,
1, 1, 1, 1, 1,
2, 2, 2, 2,
3, 3, 3, 3, 3,
4, 4, 4, 4, 4])
y_res, x_res = utils.get_idxs_around_location(2, 2, side,
remove_neighbours=False)
self.assertTrue(y_res.size == 24)
self.assertTrue(x_res.size == 24)
self.assertTrue((y_cor == y_res).all())
self.assertTrue((x_cor == x_res).all())
side = 5
y_cor = np.array([0, 1, 2, 3, 4,
0, 4,
0, 4,
0, 4,
0, 1, 2, 3, 4])
x_cor = np.array([0, 0, 0, 0, 0,
1, 1,
2, 2,
3, 3,
4, 4, 4, 4, 4])
y_res, x_res = utils.get_idxs_around_location(2, 2, side,
remove_neighbours=True)
self.assertTrue(y_res.size == side * side - 9)
self.assertTrue(x_res.size == side * side - 9)
self.assertTrue((y_cor == y_res).all())
self.assertTrue((x_cor == x_res).all())
def test_calc_footprint_size(self):
sat_zens = np.array([0, 68.5])
ifov = 1.4e-3
sat_alt = 830.
max_swath_width = 1446.58
along, across = utils.calc_footprint_size(sat_zens, ifov, sat_alt,
max_swath_width)
self.assertAlmostEqual(along[0], 1.16, 2)
self.assertAlmostEqual(along[1], 2.46, 2)
self.assertAlmostEqual(across[0], 1.16, 2)
self.assertAlmostEqual(across[1], 6.70, 2)
def test_haversine(self):
lon1, lat1 = 25., 60.
lon2, lat2 = 21.3, 68.3
dists, bearings = utils.haversine(lon1, lat1, lon2, lat2,
calc_bearings=True)
self.assertAlmostEqual(dists[0], 939.8, 1)
self.assertAlmostEqual(bearings[0], 350.66, 2)
lon1, lat1 = 0, 0
lon2, lat2 = 0, 90
dists, bearings = utils.haversine(lon1, lat1, lon2, lat2,
calc_bearings=True)
self.assertAlmostEqual(dists[0], 10007.9, 1)
self.assertAlmostEqual(bearings[0], 0.0, 1)
lon1, lat1 = 0, 0
lon2, lat2 = 90, 0
dists, bearings = utils.haversine(lon1, lat1, lon2, lat2,
calc_bearings=True)
self.assertAlmostEqual(dists[0], 10007.9, 1)
self.assertAlmostEqual(bearings[0], 90.0, 1)
lon1, lat1 = 0, 0
lon2, lat2 = -90, 0
dists, bearings = utils.haversine(lon1, lat1, lon2, lat2,
calc_bearings=True)
self.assertAlmostEqual(dists[0], 10007.9, 1)
self.assertAlmostEqual(bearings[0], 270.0, 1)
lon1, lat1 = 0, 0
lon2, lat2 = 0, -90
dists, bearings = utils.haversine(lon1, lat1, lon2, lat2,
calc_bearings=True)
self.assertAlmostEqual(dists[0], 10007.9, 1)
self.assertAlmostEqual(bearings[0], 180.0, 1)
lon1, lat1 = 0, 0
lon2, lat2 = 0, -90
dists, bearings = utils.haversine(lon1, lat1, lon2, lat2,
calc_bearings=False)
self.assertAlmostEqual(dists[0], 10007.9, 1)
self.assertIsNone(bearings)
def test_ensure_numpy(self):
res = utils.ensure_numpy(1, dtype=None)
self.assertTrue(isinstance(res, np.ndarray))
self.assertTrue(res.dtype == np.int64)
self.assertEqual(res[0], 1)
res = utils.ensure_numpy(1, dtype=np.float32)
self.assertTrue(isinstance(res, np.ndarray))
self.assertTrue(res.dtype == np.float32)
self.assertEqual(res[0], 1.0)
res = utils.ensure_numpy([1], dtype=np.float32)
self.assertTrue(isinstance(res, np.ndarray))
self.assertTrue(res.dtype == np.float32)
self.assertEqual(res[0], 1.0)
res = utils.ensure_numpy(np.array([1]), dtype=np.float32)
self.assertTrue(isinstance(res, np.ndarray))
self.assertTrue(res.dtype == np.float32)
self.assertEqual(res[0], 1.0)
res = utils.ensure_numpy(np.array(1), dtype=np.float32)
self.assertTrue(isinstance(res, np.ndarray))
self.assertTrue(res.dtype == np.float32)
self.assertEqual(res[0], 1.0)
def suite():
"""The suite for test_utils
"""
loader = unittest.TestLoader()
mysuite = unittest.TestSuite()
mysuite.addTest(loader.loadTestsFromTestCase(TestUtils))
return mysuite
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())
| [
"pnuu+git@iki.fi"
] | pnuu+git@iki.fi |
3d614af7145d14a806ab6a25d4ba583b74ca5e28 | b7fd24b41333575264a18f4631a0054b8eecea40 | /schema/zst_alarm.py | 7208762ef2097495930ac7fb6ba35582680e3203 | [] | no_license | zhouwanchun/zst_online_server | 5d430d4fe928cb30fed0060f12839eb9519890ca | 955f7ad62f020128c40a3f0ca14848034a3e7bbd | refs/heads/master | 2023-03-17T21:50:35.792788 | 2021-08-18T14:06:54 | 2021-08-18T14:06:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,055 | py | import json
import logging
import time
import requests
# Get an instance of a logger
logger = logging.getLogger(__name__)
class WexinAlarm:
def __init__(self):
self.token = ""
self.expired = int(time.time())
def refresh_token(self):
now = int(time.time())
if now < self.expired and len(self.token) > 0:
return
# TODO ้
็ฝฎๅๅฐ้
็ฝฎๆไปถไธญ
url = "https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=ww2ef294fd1f043429&corpsecret=deLb5gd4hiP-l5ekwbEZ6h1WZbGz43VPOWgqwRrfqIM"
response = requests.request("GET", url, headers={}, data={})
if response.status_code > 300:
logger.error("error status code for weixin token: %d", response.status_code)
return
resp_obj = json.loads(response.text)
if resp_obj['errcode'] != 0:
logger.error("failed to get token: %s", resp_obj['errmsg'])
return
self.token = resp_obj['access_token']
self.expired = int(time.time()) + resp_obj['expires_in']
def send_msg(self, users, msg):
self.refresh_token()
url = "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=" + self.token
payload = {
"touser": users,
"toparty": "1",
"msgtype": "text",
"agentid": 1000002,
"text": {
"content": msg
},
"safe": 0,
"enable_id_trans": 0,
"enable_duplicate_check": 0,
"duplicate_check_interval": 1800
}
# ๅ้jsonๆฐๆฎ็ๆถๅ๏ผ่ฆๅ ไธ'Content-Type': 'application/json'
# ๅฆๆไธๅ ๏ผๆๅฏ่ฝไผๅบ็ฐไปฅไธ้่ฏฏ
# 400 bad request
# 415 unsupported media type
headers = {
'Content-Type': 'application/json'
}
resp = requests.request("POST", url, headers=headers, data=json.dumps(payload))
if resp.status_code >= 300:
logger.error('failed to send message to wechat: %s', resp.text)
| [
"text.zwb@gmail.com"
] | text.zwb@gmail.com |
75b886785f83e8dc3312498f8d4259af161c02b6 | 337976db44254cb997c721139298328416af4086 | /study/part1/dump_db_classes.py | f53647f2e7bbacd3e73ac19d7d18643ad1fcd836 | [] | no_license | vzhukov85/python-study | 3f5d00aa2f84a9b01432d0c0fb378a4b79f46442 | 28b84be1ce50247b8f0b89a8a4b285029c924cde | refs/heads/master | 2020-09-25T23:12:00.044547 | 2020-01-15T06:46:13 | 2020-01-15T06:46:13 | 226,102,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 180 | py | import shelve
db = shelve.open('class-shelve')
for key in db:
print(key, '=>\n', db[key].name, db[key].pay)
bob = db['bob']
print(bob.lastName())
print(db['tom'].lastName())
| [
"erzhukov@i-teco.ru"
] | erzhukov@i-teco.ru |
2b44e8f347c687e6cfc33ec6220eb5f18acfb6ef | e287d17181ca901f52f81662dddcb6e6e34af9d0 | /Fallout's Hacking Game.py | 1361bf710d7eaca2e9b01500a45c519e57fff22b | [] | no_license | MarkMillerKeene/DailyProgrammer | 92819771dce19e7e4671f34198f57127bed9d39e | f03b63051c84827e386c08f96b3f95df393317c3 | refs/heads/master | 2021-01-17T11:54:53.355033 | 2014-11-05T20:48:17 | 2014-11-05T20:48:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22 | py | __author__ = 'peiggs'
| [
"mark.miller@ksc.keene.edu"
] | mark.miller@ksc.keene.edu |
acbeb910b65258b18b71182806b2cc75e84ffa03 | 3b1efdd0aacc98738f3b8b9ee09c6ff59cccc14e | /ietf/person/factories.py | e076b4ef72e4bec53e2bc6a55c5798054d06ced0 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | unofficial-mirror/ietfdb | 15beb6bf17b1d4abb257ee656ac6b7488339d331 | ce54adb30dc7299c6eb4d42b9aa9d2c2929c1a81 | refs/heads/master | 2020-08-06T17:24:13.966746 | 2019-10-04T20:54:05 | 2019-10-04T20:54:05 | 213,088,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,456 | py | # Copyright The IETF Trust 2015-2019, All Rights Reserved
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import factory
import faker
import faker.config
import os
import random
import shutil
from unidecode import unidecode
from django.conf import settings
from django.contrib.auth.models import User
from django.utils.text import slugify
from django.utils.encoding import force_text
import debug # pyflakes:ignore
from ietf.person.models import Person, Alias, Email
from ietf.person.name import normalize_name, unidecode_name
fake = faker.Factory.create()
def random_faker():
# The transliteration of some arabic and devanagari names introduces
# non-alphabetic characgters that don't work with the draft author
# extraction code, and also don't seem to match the way people with arabic
# names romanize arabic names. Exlude those locales from name generation
# in order to avoid test failures.
locales = set( [ l for l in faker.config.AVAILABLE_LOCALES if not (l.startswith('ar_') or l.startswith('sg_')) ] )
return faker.Faker(random.sample(locales, 1)[0])
class UserFactory(factory.DjangoModelFactory):
class Meta:
model = User
django_get_or_create = ('username',)
exclude = ['faker', ]
faker = factory.LazyFunction(random_faker)
first_name = factory.LazyAttribute(lambda o: o.faker.first_name())
last_name = factory.LazyAttribute(lambda o: o.faker.last_name())
email = factory.LazyAttributeSequence(lambda u, n: '%s.%s_%d@%s'%( slugify(unidecode(u.first_name)),
slugify(unidecode(u.last_name)), n, fake.domain_name()))
username = factory.LazyAttribute(lambda u: u.email)
@factory.post_generation
def set_password(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument
obj.set_password( '%s+password' % obj.username ) # pylint: disable=no-value-for-parameter
class PersonFactory(factory.DjangoModelFactory):
class Meta:
model = Person
user = factory.SubFactory(UserFactory)
name = factory.LazyAttribute(lambda p: normalize_name('%s %s'%(p.user.first_name, p.user.last_name)))
ascii = factory.LazyAttribute(lambda p: force_text(unidecode_name(p.name)))
class Params:
with_bio = factory.Trait(biography = "\n\n".join(fake.paragraphs()))
@factory.post_generation
def default_aliases(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument
make_alias = getattr(AliasFactory, 'create' if create else 'build')
make_alias(person=obj,name=obj.name)
make_alias(person=obj,name=obj.ascii)
if obj.name != obj.plain_name():
make_alias(person=obj,name=obj.plain_name())
if obj.ascii != obj.plain_ascii():
make_alias(person=obj,name=obj.plain_ascii())
@factory.post_generation
def default_emails(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument
if extracted is None:
extracted = True
if create and extracted:
make_email = getattr(EmailFactory, 'create' if create else 'build')
make_email(person=obj, address=obj.user.email)
@factory.post_generation
def default_photo(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument
import atexit
if obj.biography:
photo_name = obj.photo_name()
media_name = "%s/%s.jpg" % (settings.PHOTOS_DIRNAME, photo_name)
obj.photo = media_name
obj.photo_thumb = media_name
photosrc = os.path.join(settings.TEST_DATA_DIR, "profile-default.jpg")
photodst = os.path.join(settings.PHOTOS_DIR, photo_name + '.jpg')
if not os.path.exists(photodst):
shutil.copy(photosrc, photodst)
def delete_file(file):
os.unlink(file)
atexit.register(delete_file, photodst)
class AliasFactory(factory.DjangoModelFactory):
class Meta:
model = Alias
@classmethod
def _create(cls, model_class, *args, **kwargs):
person = kwargs['person']
name = kwargs['name']
existing_aliases = set(model_class.objects.filter(person=person).values_list('name', flat=True))
if not name in existing_aliases:
obj = model_class(*args, **kwargs)
obj.save()
return obj
name = factory.Faker('name')
def fake_email_address(n):
address_field = [ f for f in Email._meta.fields if f.name == 'address'][0]
count = 0
while True:
address = '%s.%s_%d@%s' % (
slugify(unidecode(fake.first_name())),
slugify(unidecode(fake.last_name())),
n, fake.domain_name()
)
count += 1
if len(address) <= address_field.max_length:
break
if count >= 10:
raise RuntimeError("Failed generating a fake email address to fit in Email.address(max_length=%s)"%address_field.max_lenth)
return address
class EmailFactory(factory.DjangoModelFactory):
class Meta:
model = Email
django_get_or_create = ('address',)
address = factory.Sequence(fake_email_address)
person = factory.SubFactory(PersonFactory)
active = True
primary = False
origin = factory.LazyAttribute(lambda obj: obj.person.user.username if obj.person.user else '')
| [
"henrik@levkowetz.com"
] | henrik@levkowetz.com |
16a90710f419b70d6f28a6bc8e178229f4dd5d27 | aeae1f547225452774a109f2e9a5a2c55f4d866b | /tvm_cudnn/lstm.py | 391ebf12c076b61203b4fde8f61cb7e7788ae2b7 | [] | no_license | ybai62868/MixPrecisionTensorCore | afb73883593f5c93618d1a626eebb9837e630e2d | 9466d378186adb21156b7e50636f74e5144539e4 | refs/heads/main | 2023-08-14T11:12:11.634726 | 2021-10-18T15:02:27 | 2021-10-18T15:02:27 | 401,226,540 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,989 | py | from __future__ import print_function
import argparse
import time
import torch
from torch.autograd import Variable
torch.backends.cudnn.benchmark = True
def update_progress(progress):
print("\rProgress: [{0:50s}] {1:.1f}%".format('#' * int(progress * 50),
progress * 100), end="")
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument('--seconds', type=int, default=15)
parser.add_argument('--dry_runs', type=int, default=50)
parser.add_argument('--runs', type=int, default=50)
parser.add_argument('--num_layers', type=int, default=1)
parser.add_argument('--hidden_size', default=640, type=int)
parser.add_argument('--half', action='store_true', dest='half')
args = parser.parse_args()
hidden_size = args.hidden_size
input = Variable(torch.randn(750, args.batch_size,
hidden_size).cuda()) # seq_length based on max deepspeech length 15 seconds
model = torch.nn.LSTM(hidden_size, hidden_size, num_layers=args.num_layers).cuda()
if args.half:
input = input.half()
model = model.half()
model.eval()
def run_benchmark():
for n in range(args.dry_runs):
output, (hx, cx) = model(input)
# grad = output.data.clone().normal_()
# output.backward(grad)
update_progress(n / (float(args.dry_runs) - 1))
print('\nDry runs finished, running benchmark')
avg_fwd_time = 0
torch.cuda.synchronize()
for n in range(args.runs):
torch.cuda.synchronize()
start = time.time()
output, (hx, cx) = model(input)
torch.cuda.synchronize()
end = time.time()
fwd_time = end - start
avg_fwd_time += fwd_time
return avg_fwd_time * 1000 / float(args.runs)
if args.half:
print("Running half precision benchmark")
else:
print("Running standard benchmark")
avg_fwd_time = run_benchmark()
print('\n')
print("Avg Forward time: %.2fms " % avg_fwd_time) | [
"ybai62868@gmail.com"
] | ybai62868@gmail.com |
d872427d19cab578ba3812d427c71e3f1ce07cee | efea54ec2c6b63ca8904fb3fcbee94102aa256ed | /AprilCookpff/1.py | 3cad934b5a7b7e8eff188c6aa3f4ffa12dc55f2b | [] | no_license | ArefinMizan/Codechef-Solutions | 427198e736da8089001818b96109ab7a2e637497 | 01dd0caab636c3c9d39be87ee57ba867f3ea4c87 | refs/heads/master | 2023-03-15T23:00:13.347656 | 2020-01-20T09:59:17 | 2020-01-20T09:59:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | def main():
for _ in range(int(input())):
(n,m) = map(int, input().split())
print((n-1)*(m-1))
if __name__ == '__main__':
main() | [
"dillu9878@gmail.com"
] | dillu9878@gmail.com |
45b19b1cab318c1d3e4c9a7783e0ebccc5e46929 | ff5d86192ad048737716528d4d59e0bc506e0bfd | /76.py | d4a3a4d3ce102fc5e8151c223ffe9deb4eddfdb7 | [] | no_license | ggddessgxh/aron | 4c7d0b42ee8a8ef9c8edf5e2528beb36cf5b632f | 69c42a0269d46d88287bc753a4e860d3ea5311f7 | refs/heads/master | 2020-04-17T11:46:54.915655 | 2019-01-31T13:36:27 | 2019-01-31T13:36:27 | 166,555,227 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | a = 809
for i in range(10, 100):
b = i * a
if b >= 1000 and b <= 10000 and 8 * i < 100 and 9 * i >= 100:
print(i)
print(i*809)
| [
"1249440711@qq.com"
] | 1249440711@qq.com |
c2329e1d0a37e88a0fcbfb5d6a743b80e8753c28 | df3853b41ed05d86f5bcd992fcc265f637c67784 | /big_deal/test2/14.py | d79e788612e926b9cf62a3a53eddc0a537b10ca5 | [] | no_license | KseniaMIPT/Adamasta | 6ab0121519581dbbbf6ae788d1da85f545f718d1 | e91c34c80834c3f4bf176bc4bf6bf790f9f72ca3 | refs/heads/master | 2021-01-10T16:48:31.141709 | 2016-11-23T21:02:25 | 2016-11-23T21:02:25 | 43,350,507 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,143 | py | def digraph_from_input():
N = int(input())
digraph = {}
for i in range(N-1):
line = input().split()
if line[1] not in digraph:
digraph[line[1]] = {line[0]}
else:
digraph[line[1]].add(line[0])
if line[0] not in digraph:
digraph[line[0]] = set()
return digraph
digraph = digraph_from_input()
start_node = str(input())
def bfs_fire(g, start, fired=set(), tree =[]):
"""ะคัะฝะบัะธั ะฒัะดะตะปัะตั ะพััะพะฒะพะต ะดะตัะตะฒะพ ะผะตัะพะดะพะผ ะพะฑั
ะพะดะฐ ะฒ ัะธัะธะฝั.
:param g: ะพัะฝะพะฒะฝะพะน ะณัะฐั
:param start: ะฝะฐัะฐะปัะฝะฐั ะฒะตััะธะฝะฐ
:param fired: ะผะฝะพะถะตััะฒะพ ัะถะต ะธะผะตััะธั
ัั ะฒ ะณัะฐัะต ะฒะตััะธะฝ
:return tree: ะพััะพะฒะพะต ะดะตัะตะฒะพ
"""
fired.add(start)
queue = [start]
while queue:
current = queue.pop(0)
for neighbour in g[current]:
if neighbour not in fired:
fired.add(neighbour)
queue.append(neighbour)
tree.append([current, neighbour])
return tree
tree = bfs_fire(digraph, start_node)
| [
"ksenia22.11@yandex.ru"
] | ksenia22.11@yandex.ru |
344bf52717197625def8b9d5930b228ba011b004 | 27db9295cde6fe07ae4888e3a9c151864642c673 | /blobs.py | f3c880b683470662d5e89f90f7171551e532be87 | [] | no_license | dariodotti/tracking_traj_experiment_indoor_outdoor_dataset | 2ec9d558c8276b9327505c27c9ab0b04d27bb9ad | 0acd8c3f8f138844ee93c4291111dd6fa9f31666 | refs/heads/master | 2021-01-12T10:25:59.095397 | 2016-12-14T11:20:14 | 2016-12-14T11:20:14 | 76,453,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,894 | py | import cv2
import pickle
import numpy as np
from multiprocessing.dummy import Pool as ThreadPool
from os import listdir
from os.path import isfile, join
import img_proc as my_img_proc
import main as main_camera017
def my_function(k):
print k
temp_track = []
temp_track_append= temp_track.append
map(lambda i: temp_track_append(file_content[i]) if ids[i] == k else False,xrange(len(file_content)))
return temp_track
def read_data_tracklets(file,multiThread):
global file_content
with open(file,'r')as f:
file_content = f.read().split('\n')
global ids
ids =map(lambda line: int(line.split(' ')[0]),file_content)
keys = list(set(ids))
keys = sorted(keys,key=lambda x: x)
print len(keys)
####MULTI-THREAD VERSION######
if multiThread:
cores = 6
pool = ThreadPool(cores)
print 'n cores: '+str(cores)
tracklets = pool.map(lambda k: my_function(k) ,keys)
#close the pool and wait for the work to finish
pool.close()
pool.join()
###########################
else:
# keys= keys[:500]
tracklets = []
tracklets_append = tracklets.append
for k in keys:
print k
temp_track = []
temp_track_append= temp_track.append
map(lambda i: temp_track_append(file_content[i]) if ids[i] == k else False,xrange(len(file_content)))
tracklets_append(temp_track)
return tracklets
def get_coordinate_points(occurance):
frames =map(lambda line: str(line.split(' ')[0]),occurance)
center_xs = map(lambda line: int(float(line.split(' ')[1])),occurance)
center_ys = map(lambda line: int(float(line.split(' ')[2])),occurance)
bb_width = map(lambda line: int(float(line.split(' ')[3])),occurance)
bb_height = map(lambda line: int(float(line.split(' ')[4])),occurance)
#list_points = []
#list_points_append = list_points.append
#map(lambda c: list_points_append((xs[c],ys[c])),xrange(0,len(xs)))
#apply filter to cancel noise
#x_f,y_f =my_img_proc.median_filter(list_points)
return frames,center_xs,center_ys,bb_width,bb_height
def main():
##divide image into patches(polygons) and get the positions of each one
scene = cv2.imread('C:/Users/dario.dotti/Documents/LOST_dataset/camera017.jpg')
list_poly = my_img_proc.divide_image(scene)
mypath= 'C:/Users/dario.dotti/Documents/LOST_dataset/8_2013-12_2012_camera001/pedestrian_cars/training/blobs/'
only_files=[f for f in listdir(mypath) if isfile(join(mypath, f))]
for f in only_files:
day = f.split('_')[0]
month = f.split('_')[1]
my_file= ''.join([mypath,f])
slices=read_data_tracklets(my_file,0)
with open('C:/Users/dario.dotti/Documents/LOST_dataset/8_2013-12_2012_camera001/pedestrian_cars/training/blobs_forTraining/blobs_org_by_frames_'+day+'_'+month+'.txt', 'wb') as handle:
pickle.dump(slices,handle)
return False
with open('C:/Users/dario.dotti/Documents/LOST_dataset/22_9_2014-1_10_2013_camera017/pedestrian_cars/classification/blobs_org_by_frames_7_2.txt', 'rb') as handle:
slices = pickle.load(handle)
for n,slice in enumerate(slices):
temp_img = scene.copy()
frames,center_xs,center_ys,bb_width,bb_height = get_coordinate_points(slice)
for i in range(0,len(center_ys)):
if bb_width[i] > 15 or bb_height[i] >15:
vertex_1 = (center_xs[i]-(bb_width[i]/2)),(center_ys[i]-(bb_height[i]/2))
vertex_2 = (center_xs[i]+(bb_width[i]/2)),(center_ys[i]+(bb_height[i]/2))
cv2.rectangle(temp_img,vertex_1,vertex_2,0,1)
cv2.putText(temp_img,frames[0],(30,30),cv2.FONT_HERSHEY_SIMPLEX, 1, 0)
cv2.imshow('ciao',temp_img)
cv2.waitKey(0)
if __name__ == '__main__':
main() | [
"dario.dotti@maastrichtuniversity.nl"
] | dario.dotti@maastrichtuniversity.nl |
6343e86e13ef3b29cc0d65db953cb6ba85f7283a | d8f9b8131cfac411bf551a20e9a5b863160ffb79 | /PreProcessing.py | 76985ef3acbd2cd67be4f32d2d4c04f3a3344065 | [] | no_license | artificial-inteligence/AnacondaTest | dbc59b923a7de843ae3adb81b354c73da5a12e4c | eebde6a95d6f6f65593c5a1e4e50f9296a917dbc | refs/heads/master | 2020-04-21T07:41:30.847875 | 2019-02-11T16:52:32 | 2019-02-11T16:52:32 | 169,398,594 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | # openCV
import cv2
import numpy as np
from matplotlib import pyplot as plt
from skimage.color import rgb2gray
class PreProcessor:
def __init__(self):
pass
def applybilateralfilter(self, img):
# apply bilateral filter on image
blur = cv2.bilateralFilter(img, 9, 75, 75)
return blur
def applygreyscale(self, img):
greyscale = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return greyscale
#
# # display image
# plt.subplot(121), plt.imshow(img), plt.title('Original')
# plt.xticks([]), plt.yticks([])
# plt.subplot(122), plt.imshow(blur), plt.title('Blurred')
# plt.xticks([]), plt.yticks([])
# plt.show()
| [
"stbbap@gmail.com"
] | stbbap@gmail.com |
7cfa51f0591a736e57700f3cb0a8d61f4217297e | f3af403b0f17ba952bdca1554d5d7bcba0b95c05 | /virtual/bin/flask | 4272a265fad124463ed730b2b231cdae1d819d57 | [
"MIT"
] | permissive | Daniel-darnell/Pitchworld | 3157256470b49a24e770718a3cc2dbe713ac475a | 7b9b53a0bbf0a6c191189c5780fdcaabcf89b398 | refs/heads/master | 2023-01-06T02:52:44.270973 | 2020-11-02T09:14:31 | 2020-11-02T09:14:31 | 309,213,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | #!/home/moringa/Desktop/FullStack/Projects/Pitch/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"darnelldanny1997@gmail.com"
] | darnelldanny1997@gmail.com | |
26464ce47e44f9c4fe06a0efc68c7c7a9c866238 | f3d3ba2921e65a352e6f78fe02f04ddb8a55a8cd | /data/presets.py | 3f876e79971ed19e57a852fb53b9cf01c931a3aa | [] | no_license | ejtalbot/piscripts | ac9f68abce1c2c0711cfb7187cae42fa396feee8 | b5f86edaa8d748108b8316c2b21c79bc2d029071 | refs/heads/main | 2023-07-02T22:23:02.573466 | 2021-08-07T02:16:08 | 2021-08-07T02:16:08 | 341,776,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | snake_templates = {
"rainbow": ["red", "orange_red", "yellow", "electric_green", "blue", "violet"],
"purple_pink": ["pink_orange", "magenta", "purple_pizzazz", "violet"],
"hot": ["red", "persian_red", "orange_red", "selective_yellow"],
"cool": ["aquamarine", "teal", "blue", "violet"],
}
| [
"erikjamestalbot@gmail.com"
] | erikjamestalbot@gmail.com |
abf58fb31e51c78bb90abe08fcf94e44fc5f36c0 | 1985d1a7462d537e1f43055e3c75d91145407ff9 | /Next_Permutation.py | fcc699c978f678ede7468f2b601e8c68627e87c9 | [] | no_license | yeonnseok/algorithm_practice | d95425e59b7b579a70dbbd932e4fb691c57f4534 | c1468f23b2c077ecadac1fa843180674b6ea3295 | refs/heads/master | 2020-04-28T08:51:32.728010 | 2019-04-05T03:20:44 | 2019-04-05T03:20:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | def swap(list, a, b):
temp = list[b]
list[b] = list[a]
list[a] = temp
def next_permutation(c_list, n):
i = n - 1
while c_list[i - 1] >= c_list[i]:
i -= 1
if i <= 0: return False
j = n - 1
while c_list[j] <= c_list[i - 1]:
j -= 1
swap(c_list, j, i - 1)
j = n - 1
while i < j:
swap(c_list, j, i)
i += 1
j -= 1
return c_list
c_list = [7, 2, 3, 6, 5, 4, 1]
n = len(c_list)
print(next_permutation(c_list, n))
| [
"smr603@snu.ac.kr"
] | smr603@snu.ac.kr |
efb691981ff05fe7bcb03faa225d88b4bee1bde0 | 084d1b9cb341a1b943f95e98ee3cf680df502ba9 | /Products/mediaPage/tests/base.py | b0e818b0d28196ee7fc5c4b6020c8236190fd002 | [] | no_license | intk/Products.mediaPage | 629aa7c8f98e308b536f997cafbab177ba6ae1a5 | a3f4b0c900565b438593888a3009f8e7e4867792 | refs/heads/master | 2016-09-06T13:57:17.209247 | 2014-09-18T08:56:37 | 2014-09-18T08:56:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,990 | py | """Test setup for integration and functional tests.
When we import PloneTestCase and then call setupPloneSite(), all of
Plone's products are loaded, and a Plone site will be created. This
happens at module level, which makes it faster to run each test, but
slows down test runner startup.
"""
from Products.Five import zcml
from Products.Five import fiveconfigure
from Testing import ZopeTestCase as ztc
from Products.PloneTestCase import PloneTestCase as ptc
from Products.PloneTestCase.layer import onsetup
# When ZopeTestCase configures Zope, it will *not* auto-load products
# in Products/. Instead, we have to use a statement such as:
# ztc.installProduct('SimpleAttachment')
# This does *not* apply to products in eggs and Python packages (i.e.
# not in the Products.*) namespace. For that, see below.
# All of Plone's products are already set up by PloneTestCase.
@onsetup
def setup_product():
"""Set up the package and its dependencies.
The @onsetup decorator causes the execution of this body to be
deferred until the setup of the Plone site testing layer. We could
have created our own layer, but this is the easiest way for Plone
integration tests.
"""
# Load the ZCML configuration for the example.tests package.
# This can of course use <include /> to include other packages.
fiveconfigure.debug_mode = True
import Products.mediaPage
zcml.load_config('configure.zcml', Products.mediaPage)
fiveconfigure.debug_mode = False
# We need to tell the testing framework that these products
# should be available. This can't happen until after we have loaded
# the ZCML. Thus, we do it here. Note the use of installPackage()
# instead of installProduct().
# This is *only* necessary for packages outside the Products.*
# namespace which are also declared as Zope 2 products, using
# <five:registerPackage /> in ZCML.
# We may also need to load dependencies, e.g.:
# ztc.installPackage('borg.localrole')
ztc.installPackage('Products.mediaPage')
# The order here is important: We first call the (deferred) function
# which installs the products we need for this product. Then, we let
# PloneTestCase set up this product on installation.
setup_product()
ptc.setupPloneSite(products=['Products.mediaPage'])
class TestCase(ptc.PloneTestCase):
"""We use this base class for all the tests in this package. If
necessary, we can put common utility or setup code in here. This
applies to unit test cases.
"""
class FunctionalTestCase(ptc.FunctionalTestCase):
"""We use this class for functional integration tests that use
doctest syntax. Again, we can put basic common utility or setup
code in here.
"""
def afterSetUp(self):
roles = ('Member', 'Contributor')
self.portal.portal_membership.addMember('contributor',
'secret',
roles, [])
| [
"andreslb1@gmail.com"
] | andreslb1@gmail.com |
f92c4c2d30adeab12a1909fe55ab12ef7f60d039 | 0bff1f5481f5f83d2053a165839489a1f787e433 | /tweets/models.py | 0f0457fe9ac4c66c0ead9896570035fa931205ed | [] | no_license | tong1yi/my-django-twitter | d92b81a5f74f1251fae273ee11cb07851bd97565 | c1909b7f541fe8062bed6e5add068b0e855cfec9 | refs/heads/main | 2023-06-06T06:03:39.629167 | 2021-06-24T03:14:45 | 2021-06-24T03:14:45 | 380,594,421 | 0 | 0 | null | 2021-06-26T20:54:36 | 2021-06-26T20:54:35 | null | UTF-8 | Python | false | false | 3,888 | py | from django.db import models
from django.contrib.auth.models import User
from utils.time_helpers import utc_now
from django.contrib.contenttypes.models import ContentType
from likes.models import Like
from tweets.constants import TweetPhotoStatus, TWEET_PHOTO_STATUS_CHOICES
# https://stackoverflow.com/questions/35129697/difference-between-model-fieldsin-django-and-serializer-fieldsin-django-rest
# Create your models here.
class Tweet(models.Model):
user = models.ForeignKey(
User,
on_delete=models.SET_NULL,
null=True,
help_text="This user refers to the user who posts this tweet.",
verbose_name=u"่ฐๅไบ่ฟไธชๅธๅญ",
)
content = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True) # ๆๆถๅบ๏ผvagrant/serverๆๅจ็ๆถๅบ๏ผ
# Metaๆฏไธไธช้
็ฝฎไฟกๆฏใ
# ๅจๅๅปบTweets่ฟไธชmodel็ๆถๅไผๆ นๆฎ้
็ฝฎไฟกๆฏๅปๅๅปบใ
class Meta:
# ่ๅ็ดขๅผ compound index/composite index
# ็ธๅฝไบๅจๆฐๆฎๅบไธญๅปบ็ซไบไธไธชๆ็ไธๅฐ็่กจๅ๏ผ่ฟไธช่กจๅไธญไธๅ
ฑๆ3ๅใ
# [
# ('user', 'created_at', 'id'),
# ...
# ]
# ๅปบ็ซไบ็ดขๅผไน่ฆ่ฟ่กmakemigrationๅmigrate
index_together = (
('user', 'created_at'),
)
# ๅจTweet็ธๅ
ณ็ๆๆquerysetไธญๅฆๆๆฒกๆๆๅฎorderby็ๆถๅ๏ผ้ป่ฎค็ๆฏไธ้ข่ฟไธชorderingใ
# ๅณ๏ผๅชไผๅฝฑๅorderby็้ป่ฎคๆๅบ่กไธบใ
# ordering ไธไผๅฏนๆฐๆฎๅบไบง็ๅฝฑๅใ
ordering = ('user', '-created_at')
@property
def hours_to_now(self):
# datetime.now()ไธๅธฆๆถๅบไฟกๆฏ๏ผ้่ฆๅขๅ ไธutc็ๆถๅบไฟกๆฏใ
return (utc_now()-self.created_at).seconds // 3600
def __str__(self):
# ๅฝๆง่ก print(tweet instance) ็ๆถๅไผๆพ็คบ็ๅ
ๅฎน
return f'{self.created_at} {self.user}: {self.content}'
@property
def like_set(self):
# ๆพๅฐtweetไธๆๆ็็น่ตใ
return Like.objects.filter(
content_type=ContentType.objects.get_for_model(Tweet),
object_id=self.id,
).order_by('-created_at')
class TweetPhoto(models.Model):
# ๅพ็ๅจๅชไธช Tweet ไธ้ข
tweet = models.ForeignKey(Tweet, on_delete=models.SET_NULL, null=True)
# ่ฐไธไผ ไบ่ฟๅผ ๅพ็๏ผ่ฟไธชไฟกๆฏ่ฝ็ถๅฏไปฅไป tweet ไธญ่ทๅๅฐ๏ผไฝๆฏ้ๅค็่ฎฐๅฝๅจ Image ้ๅฏไปฅๅจ
# ไฝฟ็จไธๅธฆๆฅๅพๅค้ๅ๏ผๆฏๅฆๆไธชไบบ็ปๅธธไธไผ ไธไบไธๅๆณ็็
ง็๏ผ้ฃไน่ฟไธชไบบๆฐไธไผ ็็
ง็ๅฏไปฅ่ขซๆ ่ฎฐ
# ไธบ้็นๅฎกๆฅๅฏน่ฑกใๆ่
ๆไปฌ้่ฆๅฐ็ฆๆไธช็จๆทไธไผ ็ๆๆ็
ง็็ๆถๅ๏ผๅฐฑๅฏไปฅ้่ฟ่ฟไธช model ๅฟซ้
# ่ฟ่ก็ญ้
user = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
# ๅพ็ๆไปถ
file = models.FileField()
order = models.IntegerField(default=0)
# ๅพ็็ถๆ๏ผ็จไบๅฎกๆ ธ็ญๆ
ๅต
status = models.IntegerField(
default=TweetPhotoStatus.PENDING,
choices=TWEET_PHOTO_STATUS_CHOICES,
)
# ่ฝฏๅ ้ค(soft delete)ๆ ่ฎฐ๏ผๅฝไธไธช็
ง็่ขซๅ ้ค็ๆถๅ๏ผ้ฆๅ
ไผ่ขซๆ ่ฎฐไธบๅทฒ็ป่ขซๅ ้ค๏ผๅจไธๅฎๆถ้ดไนๅ
# ๆไผ่ขซ็ๆญฃ็ๅ ้คใ่ฟๆ ทๅ็็ฎ็ๆฏ๏ผๅฆๆๅจ tweet ่ขซๅ ้ค็ๆถๅ้ฉฌไธๆง่ก็ๅ ้ค็้ๅธธไผ่ฑ่ดนไธๅฎ็
# ๆถ้ด๏ผๅฝฑๅๆ็ใๅฏไปฅ็จๅผๆญฅไปปๅกๅจๅๅฐๆ
ขๆ
ขๅ็ๅ ้คใ
has_deleted = models.BooleanField(default=False)
deleted_at = models.DateTimeField(null=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
index_together = (
('user', 'created_at'),
('has_deleted', 'created_at'),
('status', 'created_at'),
('tweet', 'order'),
)
def __str__(self):
return f'{self.tweet_id}: {self.file}'
| [
"wilburzjh@gmail.com"
] | wilburzjh@gmail.com |
bbadb5f2b9ced965d16217dd49cadebc56d5713c | 2a720f618609e6e28a01cba5f915c5b52285db53 | /donghyeon's python/python.review(1).py | 04d0623a491368075bdddff1b1625e2910522e80 | [] | no_license | kai3n/fastcampus | 87f923dda0712a42644f67581650ccd99a1cd2e8 | 9363c948b50e1789a64f58ce206d49d71d93c801 | refs/heads/master | 2021-01-10T09:34:56.222510 | 2016-02-16T11:22:02 | 2016-02-16T11:22:02 | 49,542,285 | 11 | 4 | null | 2016-01-13T13:45:25 | 2016-01-13T02:01:23 | Python | UTF-8 | Python | false | false | 23,920 | py | '''ํ์ด์ฌ ์ ๋ฆฌ ํ์ผ
1 . ํ์ด์ฌ์์ ๋ณ์ ํน์ ๋ฆฌํฐ๋ด๊ฐ์ ํ์
์ ์๊ณ ์ถ๋ค๋ฉด type()๋ฅผ ์ฌ์ฉํ๋ฉด๋๋ค''' #tpye()
a= 7
print('----1๋ฒ----')
print(type(a))
print(type(58))
print(type(99.9))
print(type('abc'))
"""2 . ํ์ด์ฌ์ ์ฐ์ฐ์ ํน์ง ์์๋ณด๊ธฐ""" #์ฐ์ฐ์ํน์ง
print('----2๋ฒ----')
print(5 + 8) #๋ง์
์ฐ์ฐ์
print(90 - 10) #๋บ์
์ฐ์ฐ์
print(4 * 7) #๊ณฑ์
์ฐ์ฐ์
print(7 / 2) #๋๋์
์ฐ์ฐ์
print(7 // 2) # ๋๋์
์ ํํ ๋๋จธ์ง๊ฐ์ ๋ฒ๋ฆฐ์ฒด ๊ฒฐ๊ณผ ๋์ด
print(7 % 3) # ๋๋์
์ ํ์ ๋๋จธ์ง์ ๊ฒฐ๊ณผ๊ฐ๋ง ๋์ด
print(3 ** 4) #3์ 4์ ๊ณฑ์ ์๋ฏธํจ
""" 3. ํ๋ณํ (๋ค๋ฅธ๋ฐ์ดํฐํ์
์ ์ ์ํ์ผ๋ก ๋ณํ ์ํฌ๋ ค๋ฉด int()ํจ์๋ฅผ ์ฌ์ฉํ๋ค ์ดํจ์๋ ์์์ ์ ๋ฒ๋ฆฌ๊ณ ์ ์๋ฅผ ๋ฐํํ๋ค""" #ํ๋ณํ
print("----3๋ฒ------")
print(int(True))
print(int(False))
print(int(98.6))
print(int(1.0e4))
print(int(4+7.0)) #<< ์ซ์ ํ์
์ ์ ์ด๋ ์๋์ผ๋ก ํ๋ณํ!(ํ์ด์ฌ์ ์๋ํจ)
#๋ถ๋์์์ ์๋ก ํ๋ณํ
print(float(True))
print(float(98))
print(float('1.0e4'))
#str() ๋ฅผ ์ด์ฉํ์ฌ ๋ฐ์ดํฐํ์
์ ๋ฌธ์์ด๋ก ๋ณํ
print(str(102))
print(str(True)) # ์ฌ๊ธฐ์ boolean ๊ฐ์ด๋ ํ๋ฆฐํธ๋ True ๊ฐ์ ๋ค๋ฅด๋ค
""" 4. ๋ฌธ์์ด """ #๋ฌธ์
#๋ฌธ์ ์์ ๋ฐฑ์ฌ๋ ์(\)๊ธฐํธ๋ฅผ ๋ถ์์ผ๋ก์จ ํน๋ณํ ์๋ฏธ๋ฅผ ์ค์์๋ค.
palindrome = 'A man,\nA plan,\nA canal:\nPanama.' #\n ์ new line ์ด๋์๋ฆฌ๋ก ํ๋ฆฐํธ๋ ๋ ๋ค์์ค๋ก ์ฎ๊ฒจ์ง๋ค.
print('---4๋ฒ----')
print(palindrome)
# 4-1. \t = tab
print('------4-1---')
print('\tabc')
print('a\tbc')
print('ab\tc')
print('abc\t')
#4-2. ๊ฒฐํฉํ๊ณ ๋ณต์ ํ๊ธฐ
print('Release the Kraken!' + 'At once@') # ๊ฒฐํฉ
start = "Na" * 4 + '\n'
middle = "Hey" * 3 + '\n'
end = 'Goodbye'
print('-------4-2-------')
print(start+start+middle+end)
#4-3. ๋ฌธ์ ์ถ์ถ
letters = 'abcdefghijklnmopqrstuvwxyz'
print('------4-3------')
print(letters[0]) # [] ์์ ๋ณด๊ณ ์ถ์ ์ธ๋ดํธ๋ฅผ ๋ฃ๋๋ค
print(letters[-2])
print(letters[25])
#4-4 ์ฌ๋ผ์ด์ค (๋ฌธ์์ด์์ ๋ฌธ์์ด ์ผ๋ถ๋ฅผ ์ถ์ถํ๋ค)
# [:] - ์ฒ์๋ถํฐ ๋๊น์ง ์ ์ฒด ์ํ์ค ์ถ์ถ
# [start:] - start ์คํ์
(์๋ฆฌ)๋ถํฐ ๋๊น์ง ์ํ์ค๋ฅผ ์ถ์ถ
# [:end] - ์ฒ์๋ถํฐ end-1 ์คํ์
(์๋ฆฌ) ๊น์ง ์ํ์ค ์ถ์ถ
# [start:end] - start ์คํ์
๋ถํฐ end-1 ์คํ์
๊น์ง ์ํ์ค๋ฅผ ์ถ์ถ
# [start:end:step] - step ๋งํผ ๋ฌธ์๋ฅผ ๊ฑด๋ ๋ฐ๋ฉด์ start ์คํ์
๋ถํฐ end-1 ์คํ์
๊น์ง ์ํ์ค ์ถ์ถ
print('--------4-4-------')
print(letters[:])
print(letters[20:])
print(letters[:20])
print(letters[-3:])
print(letters[::7])
print(letters[4:20:3])
print(letters[19::4])
#4-5 len() << ๊ธธ์ด์๋ ค์ฃผ๋
print('------4-5-----')
print(len(letters),':'," ์ํ๋ฒณ a~z ๋ช๊ฐ์ธ์ง")
#4-6 ๋ฌธ์์ด ๊ฒฐํฉํ๊ณ ๋ถ๋ฆฌํ๋ join() , split() ํจ์
print('-------4-6---------')
crypto_list = ['Yetl', 'Bigfoot', 'Loch Ness Monster']
crypto_string = ','.join(crypto_list)
print("Found and signing book deals:", crypto_string)
todos = 'get gloves, get mask, give cat vitamins,call ambulance'
print('------------------')
print(todos.split(',')), print('์ผํ๋ฅผ ๊ธฐ์ค์ผ๋ก ๋ถ๋ฅํจ ')
print(todos.split()), print('๊ณต๋ฐฑ์ ๊ธฐ์ค์ผ๋ก ๋ถ๋ฅํจ')
#4-7 ๋ฌธ์์ด์ ์์ ๋กญ๊ฒ ๋ค๋ค๋ณด์
# .startswith(' ') = ' '๋ก ์์ํ๋๊ฐ
# .endswith(' ') = ' ' ๋ก ๋๋๋๊ฐ
# .find(' ') = ' ' ๊ฐ์ด ์ฒซ๋ฒ์งธ๋ก ๋์ค๋๊ฐ ์ฐพ๊ธฐ
# .rfind(' ') = ' ' ๋ง์ง๋ง์ผ๋ก ๋์ค๋๊ฐ ์ฐพ๊ธฐ
# .count(' ') = ' ' ๋ช๋ฒ๋์ค๋๊ฐ
# .isalbum() = ' ' ์ํ๋ฑ๊ณผ ์ซ์๋ก๋ง ์ด๋ฃจ์ด์ ธ์๋๊ฐ
poem = "All that doth flow we cannot liquid nameor else would fire " \
"and water be the same; But that is liquid Which is moist and " \
"wetFire that property can never get.Then 'tis not cold tha doth " \
"the fire put outBut 'tis the wet that makes it die, no doubt.'"
print('-----------4-7------------')
#์ฒ์ 13์ ์ถ๋ ฅ
print(poem[:13])
# ์ด์๋ ๋ช๊ธ์๋ก ๋์๋๊ฐ?
print(len(poem))
#์ด์๋ All ๋ก ์์ํ๋๊ฐ ?
print(poem.startswith('All'))
#์ด์๋ That's all,folks! ๋ก ๋๋๋๊ฐ?
print(poem.endswith("That's all,folks!"))
#์ด์์์ ์ฒซ๋ฒ์งธ๋ก the ๊ฐ ๋์ค๋ ์คํ์
์?
print(poem.find("the"))
#์ด์์์ ๋ง์ง๋ง์ผ๋ก the ๊ฐ ๋์ค๋ ์คํ์
์?
print(poem.rfind("the"))
#์ธ๊ธ์ the ๊ฐ ๋ช๋ฒ ๋์ค๋๊ฐ ?
print(poem.count("the"))
# ์ด์๋ ๊ธ์์ ์ซ์๋ก๋ง ์ด๋ฃจ์ด์ ธ ์๋๊ฐ?
print(poem.isalnum())
#4-8
# .strip('.') ๋ง์นจํ ์ ๊ฑฐ ์๊ณผ ๋์์๋ถํฐ ์ฐพ๋๋ค ๋ฐ๋ก ๋ชป์ฐพ์ผ๋ฉด ์คํ์๋จ
# .capitalize() ์์์๋๊ฑฐ ๋๋ฌธ์ ๋ฐ๊พธ๊ธฐ
# .title() ๋ชจ๋ ๋จ์ด ์ฒซ๊ธ์ ๋๋ฌธ์๋ก๋ฐ๊พธ๊ธฐ
# .upper() ์ ์ฒด ๋๋ฌธ์
# .lower() ์ ์ฒด ์๋ฌธ์
# .replace('a','b') ๊ธฐ์กด์ ์๋ a๋ฅผ b๋ก ๋ฐ๊ฟ
setup = 'a duck goes into a bar...'
print('------4-8------')
#','๋ค ์ ๊ฑฐ ํด๋ณด๊ธฐ
print(setup.strip('.'))
print(setup.strip('into')) #์ฌ์ด์ ๋ into ๋ฅผ ์ฐพ์์์๋๊ฑธ๋ก๋ด์ ๋ฐ๋ก์๊ณผ ๋ค์ ์์น๋ง ์ฑ๋ฆฝ๋๋๋ฏ
# ์ฒซ๋จ์ด๋ฅผ ๋๋ฌธ์๋ก ๋ง๋ค๊ธฐ
print(setup.capitalize())
# ๋ชจ๋ ๋จ์ด์ ์ฒซ๊ธ์๋ฅผ ๋ค ๋๋ฌธ์๋ก
print(setup.title())
# ๊ธ์๋ฅผ ๋ชจ๋ ๋๋ฌธ์๋ก
print(setup.upper())
# ๊ธ์๋ฅผ ๋ชจ๋ ์๋ฌธ์๋ก
print(setup.lower())
# duck ๋ฅผ marmoset ์ผ๋ก ๋ฐ๊พธ๊ธฐ
print(setup.replace('duck','marmoset'))
"""5 ํ์ด์ฌ์์๋ ๋๊ฐ์ง ๋ค๋ฅธ ์ํ์ค(์์๋๋ก ์ ์ฅ๋๋) ๊ตฌ์กฐ๊ฐ ์๋ค. ํํ๊ณผ ๋ฆฌ์คํธ์ด๋ค. ํ์ด์ฌ์ ์ ์ด๋๊ฐ์ง๋ฅผ #๋ฆฌ์คํธ
๋ชจ๋ ํฌํจํ๊ณ ์์๊น?
ํํ์ ๋ถ๋ณํ๋ค. ์ฆ ํํ์ ํญ๋ชฉ์ ํ ๋นํ๊ณ ์ด๋ฅผ ๋ฐ๊ฟ์ ์๋ค. - ํ์ฉ :์ ๋๋ฐ๋๋ฉด์๋๋๊ฐ์ ๋ง๋ค๋
๋ฆฌ์คํธ๋ ํญ๋ชฉ์ ํ ๋นํ๊ณ ์์ ๋กญ๊ฒ ์์ ํ๊ฑฐ๋ ์ญ์ ํ ์ ์๋ค.
์ฐ์ ๋ฆฌ์คํธ๋ถํฐ
"""
#5๋ฆฌ์คํธ : ํญ๋ชฉํ ๋นํ ์์ ๋กญ๊ฒ ์์ , ๋ฆฌ์คํธ๋ ๋ฐ์ดํฐ๋ฅผ ์์ฐจ์ ์ผ๋ก ํ์
ํ๋๋ฐ ์ ์ฉํ๋ค. ํนํ ๋ฐ์ดํฐ์ ์์๊ฐ ๋ฐ๋์ ์๋ค๋
#์ ์์ ์ ์ฉํ๋ค. ๋ฆฌ์คํธ๋ฅผ ๋ฌถ์๋ [ ] <<์ฌ์ฉํ๋ค
empty_list = []
weekdays = ['MON','TUE','WED','THU','FRI']
big_birds = ['emu','ostrich','cassowary']
first_name = ['graham','john','terry','Michael']
another_empty_list = list()
print('----------5------------')
print(empty_list)
print(weekdays)
print(big_birds)
print(first_name)
print(another_empty_list)
# 5-1 ๋ฐ์ดํฐ ํ์
>> ๋ฆฌ์คํธ
# 5-2 ํํ >> ๋ฆฌ์คํธ
# 5-3 split()์ผ๋ก๋๋ ๋ฌธ์์ด >> ๋ฆฌ์คํธ
print('---------5-1------')
print(list('cat'))
print('---------5-2------')
a_tuple = ('ready','fire','aim')
print(list(a_tuple))
print('---------5-3------')
birth_day = '9/1/1993'
print(birth_day.split('/')) #split() ์์ฒด๊ฐ ๋ฆฌ์คํธ๋ฅผ ๋ง๋ค์ด์ฃผ๋๊ฑฐ๊ฐ์.
# 5-4 ๋ฆฌ์คํธ๋ ์คํ์
์ผ๋ก ํ๋์ ํน์ ๊ฐ์ ์ถ์ถํ ์ ์๋ค .
# A=[์คํ์
๋๋ฒ]
print('---------5-4------')
marxes = ['groucho','Chico', 'Harpo'] # ์ธ๋ฑ์ค 1,2,3 ์ด์๋๋ผ 0, 1, 2 ์ด๋ฐ์
print(marxes[2])
print(marxes[-1])
print(marxes[1])
'''5-5
๋ฆฌ์คํธ๋ ๋ค์๊ณผ ๊ฐ์ด ๋ฆฌ์คํธ ๋ฟ๋ง์๋๋ผ ๋ค๋ฅธํ์
์ ์์๋ ํฌํจํ ์ ์๋ค. (๋ฆฌ์คํธ ์์ ๋ฆฌ์คํธ ์ค๋ณต๊ฐ๋ฅ)
๊ทธ๋ฆฌ๊ณ ์ธ๋ฑ์ค ์ถ์ถ์ name_of_list[index_1][index_2] ์ด๋ฐ์ '''
print('---------5-5------')
small_birds = ['hummingbird','finch']
extinct_birds = ['dodo','passenger pigeon','Norwegian Blue']
carol_birds = [3,'French hens','2','turtledoves']
all_birds = [small_birds, extinct_birds, 'rmacaw', carol_birds]
print(all_birds)
print(all_birds[1][0]) #dodo >> ๋์ธ๋ฑ์ค ์ฌ์ฉํด์ ์ถ์ถํ๊ธฐ .
""" 5-6
์คํ์
์ผ๋ก ํญ๋ชฉ์ ์ป์ด์ ๋ฐ๊ฟ์์๋ค.
์ฌ๋ผ์ด์ค๋ก ํญ๋ชฉ ์ถ์ถํ๊ธฐ
append()ํจ์๋ฅผ ์ฌ์ฉํ์ฌ ๋ฆฌ์คํธ์ ๋์ ํญ๋ชฉ์ถ๊ฐํ๊ธฐ """
print('---------5-6------')
marxes = ['Groucho','Chico','Harpo']
marxes[2] = 'Wanda' #marxes ๋ฆฌ์คํธ์ 2๋ฒ์งธ ์ธ๋ฑ์ค์๋ฆฌ์์ค๋ ํญ๋ชฉ์ 'wanda'๋ก ๋ฐ๊ฟ
print(marxes)
print(marxes[0:2]) # ์ฌ๋ผ์ด์ค๋ก ์ด๋ฒ์๋ด์์๋ ํญ๋ชฉ๋ง ์ถ์ถํ๊ธฐ
marxes.append('Zeppo') #๊ฐ์ฅ์๋ฆฌ์ 'Zeppo' ์ถ๊ฐํ๊ธฐ
print(marxes)
""" 5-7
Extend() ๋ฅผ ์ฌ์ฉํ์ฌ ๋ค๋ฅธ๋ฆฌ์คํธ๋ฅผ ๋ณํฉํด๋ณด์
+= ๋ก๋ ๋ณํฉํ ์์๋ค.
append() ๋ฅผ ์ฌ์ฉํ๋ฉด ํญ๋ชฉ์ ๋ณํฉํ์ง์๊ณ ๋ฆฌ์คํธ ์ ์ฒด๊ฐ ์ถ๊ฐ๋๋ค.
**extend() ์ append() ๊ฐ ๊ฐ๊ฐ ์ด๋ค๊ฒฐ๊ณผ๋ฅผ ์ถ์ถํ๋์ง ํ์ธํ์**
insert() ๋ก ํญ๋ชฉ์ถ๊ฐํ๊ธฐ """
print('---------5-7------')
marxes = ['Groucho','Chico','Harpo','Zeppo']
others = ['Gummo','Karl']
marxes.extend(others)
print(marxes)
print('--------cf------')
marxes = ['Groucho','Chico','Harpo','Zeppo']
others = ['Gummo','Karl']
marxes += others
print(marxes)
print('------cf_1-----')
marxes = ['Groucho','Chico','Harpo','Zeppo']
others = ['Gummo','Karl']
marxes.append(others)
print(marxes)
print('------cf_2-----')
marxes = ['Groucho','Chico','Harpo','Zeppo']
marxes.insert(3,'Gummo') # insert(์์น,์ถ๊ฐํ ํญ๋ชฉ)
print(marxes)
print('------cf_3-----')
# cf_1 ์์ ๋ฆฌ์คํธ ์์์๋ ๋ฆฌ์คํธ์ ํญ๋ชฉ์ถ๊ฐํ๊ธฐ
marxes =['Groucho', 'Chico', 'Harpo', 'Zeppo', ['Gummo', 'Karl']]
marxes[4].insert(1,'๊ฐ์')
print(marxes)
""" 5-8
์คํ์
์ผ๋ก ํญ๋ชฉ์ญ์ ํ๊ธฐ del name_of_list[index]
์ญ์ ํ ํญ๋ชฉ์ ์ธ๋ฑ์ค๋ฅผ ๋ชจ๋ฅผ๋ remove(" ")
ํญ๋ชฉ์ ๊ฐ์ ธ์ค๋ ๋์์ ๊ทธํญ๋ชฉ์ ์ญ์ ํ๋ pop() """
print('---------5-8------')
marxes = ['Groucho', 'Chico', 'Harpo', 'Zeppo', 'Gummo', 'Karl']
del marxes[-1]
print(marxes)
print('------cf-1-------')
marxes = ['Groucho', 'Chico', 'Harpo', 'Zeppo', 'Gummo', 'Karl']
marxes.remove("Karl")
print(marxes)
print('-----cf-2----------')
marxes = ['Groucho', 'Chico', 'Harpo', 'Zeppo', 'Gummo', 'Karl']
print(marxes.pop()) #ํญ๋ชฉ์ ๊ฐ์ ธ์ค๋ ๋์์ ์ญ์ ํจ , pop() ๊ดํธ์์ ์๋ฌด๊ฒ๋ ์์๋ ๋ง์ง๋งํญ๋ชฉ์ ๊ฐ์ ธ์ค๊ณ ์ฌ๋ผ์ง
print(marxes)
""" 5-9
ํญ๋ชฉ๊ฐ์ ๋ฆฌ์คํธ ์คํ์
์ ์๊ณ ์ถ๋ค๋ฉด index()๋ฅผ ์ฌ์ฉํ๋ฉด๋๋ค.
๋ฆฌ์คํธ์ ์ด๋ค๊ฐ์ ์กด์ฌ๋ฅผ ํ์ธํ ๋ ค๋ฉด in ์ ์ฌ์ฉํ๋ฉด๋๋ค
๋ฆฌ์คํธ์์ ๊ฐ์ด ์ ์ด๋ ํ๋์ด๋ฉด ์กด์ฌํ๋ฉด in ์ true ๋ฅผ ๋ฐํํ๋ค. ์ฆ(๊ฐ์๊ฐ์ด 2๊ฐ ์ด์์์ด๋ Ture ๋ฅผ ๋ฐํํจ
๋ฆฌ์คํธ์์ ํญ๋ชฉ์๋ฅผ ์๊ณ ์ถ๋ค๋ฉด len() ํจ์๋ฅผ์ฐ์ """
print('---------5-9------')
marxes = ['Groucho', 'Chico', 'Harpo', 'Zeppo','Zeppo']
print(marxes.index('Chico'),':'," ์ธ๋ฑ์ค๋๋ฒ") #์ธ๋ฑ์ค ํ์ธ
print('Groucho' in marxes) # ์กด์ฌ์ฌ๋ถํ์ธ
print('BoB' in marxes)
print('Zeppo' in marxes) # ๋์ผํ ํญ๋ชฉ์ด 2๊ฐ์ด์์ด์ฌ๋ True ๋ฅผ ๋ฐํํจ
print(len(marxes))
""" 5-10
sort ์ sorted ์ ์ฐจ์ด
๊ธฐ๋ณธ์ ์ผ๋ก ๋๊ธฐ๋ฅ์ ๋ฆฌ์คํธ ์์ฒด๋ฅผ ๋ด๋ถ์ ์ธ ์์(์ซ์: ์ค๋ฆ์ฐจ์ , ๋ฌธ์ : ์ํ๋ฒณ์ )์์ํด ์ ๋ ฌํด์ค๋ค
sort : ๊ธฐ์กด์ ๋ฆฌ์คํธ ๊ฐ ์ฆ ์์๊ฐ ๋ฐ๋ [[ name_of_list.sort() ]]
sorted ๊ธฐ์กด์ ์๋ ์์๋ ์๋ฐ๋ ์ฆ ์ ๋ ฌ๋ ๋ณต์ฌ๋ณธ์ ๋ฐํํจ [[ list_copied = sorted(name_of_list
"""
print('---------5-10------')
A = ['b','c','a']
A.sort()
print(A)
print('------cf_1----')
A = ['b','c','a']
B = sorted(A)
print(B)
print(A)
""" 5-11 b = a ์ ๊ด๊ณ์๋ํด
์์ ํ ์๋ก์ด ๋ฆฌ์คํธ๋ก ๋ณต์ฌํ๊ณ ์ถ์ผ๋ฉด name_of_list.copy() , new_list = list(name_of_list)
"""
print('----------5-11------')
a = [1,2,3]
b = a # a ๋ฅผ ๋น์ ๋ฃ๋๋ค๋ ๋ป์, ๋ฐ์ดํฐ๋ฅผ ๋ฃ์์๋ ๊ฐ์ ๋ฉ๋ชจ๋ฆฌ์์ ์ ์ฅ๋จ
print(b)
a[0] = 'surprise'
print(a) # ๊ทธ๊ฒฐ๊ณผ๋ก a ์ ๊ฐ๋ ๋ณ๊ฒฝ๋จ
print(b)
"""5-12
copy() ํจ์
list() ๋ณํ ํจ์
์ฌ๋ผ์ด๋ [:]
"""
print('--------5-12-----')
a = [1,2,3]
b = a.copy() #name_of_list.copy()
c = list(a) # list(name_of_list)
d = a[:]
a[0] = 'integer lists are boring'
print(a) # b,c,d ๊ฐ์ ๋ํดํธ๋ฅผ ์ ์งํ๋ค.
print(b)
print(c)
print(d)
""" 6_1 , 6_2, 6_3 6_4 ํํ
ํํ!! ์์์ ์ธ ํญ๋ชฉ์ ์ํ์ค, ๋ฆฌ์คํธ์ ๋ค๋ฅด๊ฒ ํํ์ ๋ถ๋ณํ๋ค. ์ฆ ํํ์ ์ ์ํํ์๋
์ถ๊ฐ, ์ญ์ , ์์ ์ ํ ์์๋ค๋ ๊ฒ์ ์๋ฏธํ๋ค.๊ทธ๋ฌ๋ฏ๋ก ํํ์ ์์์ ๋ฆฌ์คํธ๋ผ๊ณ ํ ์์๋ค.
-ํํ์์ฑ
-ํํ์ธํจํน
-๊ฐ๊ตํ ํํ
-๋ค๋ฅธ๊ฐ์ฒด๋ฅผ ํํ๋ก ๋ง๋ค๊ธฐ
"""
print('------6-1-----')
empty_tuple = ()
print(empty_tuple)
print('------cf_1----')
one_marx = 'Groucho'
one_marx_tuple = 'Groucho', # ํ๋ ์ด์์ ์์๊ฐ ์๋ ํํ์ ๋ง๋ค๊ธฐ ์ํด์๋ ๊ฐ ์์ ๋ค์ ์ฝค๋ง(,)๋ฅผ ๋ถ์ธ๋ค.
print(one_marx) # ํ๊ฐ๋ง ์์๋๋ ๋ค์ , ๊ฐ์๋ค๋๊ฒ ํํ
print(one_marx_tuple)
print('------cf_2-----')
marx_tuple = 'Groucho','Chico','Harpo' #๋๊ฐ ์ด์์ ์์๊ฐ ์์๊ฒฝ์ฐ, ๋ง์ง๋ง ์์์๋ ์ฝค๋ง๋ฅผ ๋ถ์ด์ง ์๋๋ค.
print(marx_tuple)
marx_tuple = ('Groucho','Chico','Harpo') #ํ์ด์ฌ์ ํํ์ ์ถ๋ ฅํ ๋ ๊ดํธ๋ฅผ ํฌํจํ๋ค.
print(marx_tuple)
print('-------6_2----')
marx_tuple = ('Groucho','Chico','Harpo')
a,b,c = marx_tuple # ํํ์ ํ ๋ฒ์ ์ฌ๋ฌ ๋ณ์๋ฅผ ํ ๋นํ ์ ์๋ค. -ํํ ์ธํจํน-
print(a)
print(b)
print(c)
print('------6_3-----')
password = '1234567810'
icecream = 'tuttifrutti'
password,icecream = icecream,password # ํ๋ฌธ์ฅ์์ ๊ฐ์ ๊ตํํ๊ธฐ์ํด ์์๋ณ์๋ฅผ ์ฌ์ฉํ์ง ์๊ณ ํํ์ ์ฌ์ฉํ ์์๋ค.
print(password)
print(icecream)
A = [1] #๋ฆฌ์คํธ๋๋๋ค ?
B = [2]
A,B=B,A
print(A , '๋ฆฌ์คํธ ๊ฐ๋ณ๊ฒฝ')
print(B , "๋ฆฌ์คํธ ๊ฐ๋ณ๊ฒฝ")
print('------6_4-----')
marx_list = ['Groucho','Chico','Harpo'] #list>>tuple tulpe() ์ ๋ค๋ฅธ๊ฐ์ฒด๋ฅผ ํํ๋ก ๋ง๋ค์ด์ค๋ค.
tuple(marx_list)
""" ํํ๊ณผ ๋ฆฌ์คํธ
๋ฆฌ์คํธ๋ฅผ ๋์ ํด์ ํํ์ ์ฌ์ฉํ ์๊ฐ ์๋ค. ํ์ง๋ง ํํ์ ๋ฆฌ์คํธ์ append(),insert()๋ฑ๊ณผ ๊ฐ์ ํจ์๊ฐ์๊ณ
ํจ์์ ์๊ฐ ๋งค์ฐ ์ ๋ค. ํํ์ ์์ฑํ ํ์๋ ์์ ํ ์๊ฐ ์๊ธฐ ๋๋ฌธ์ด๋ค ๊ทธ๋ฌ๋ฉด ๋ฆฌ์คํธ๋ฅผ ์ฌ์ฉํ๋ฉด ๋์ง,
์ํํ์ ์ฌ์ฉํ ๊น?
- ํํ์ ๋ ์ ์ ๊ณต๊ฐ์ ์ฌ์ฉํ๋ค
- ์ค์๋ก ํํ ํญ๋ชฉ์ด ์์๋ ์ผ๋ ค๊ฐ ์๋ค.
- ํํ์ ๋์
๋๋ฆฌ ํค๋ก ์ฌ์ฉํ ์์๋ค.
- Named tuple ์ ๊ฐ์ฒด์ ๋จ์ํ ๋์์ด ๋ ์ ์๋ค.
- ํจ์์ ์ธ์๋ค์ ํํ๋ก ์ ๋ฌ๋๋ค .**** #๊ทผ๋ฐ ์ผ๋ฐ์ ์ผ๋ก ๋ฆฌ์คํธ์ ๋์
๋๋ฆฌ๋ฅผ ๋ง์ด์
"""
"""๋์
๋๋ฆฌ
๋์
๋๋ฆฌ๋ ๋ฆฌ์คํธ์ ๋น์ทํ๋ค. ๋ค๋ฅธ ์ ์ ํญ๋ชฉ์ ์์๋ฅผ ๋ฐ์ง์ง ์์ผ๋ฉฐ (there is no index) 0๋๋ 1๊ณผ๊ฐ์ ์คํ์
์ผ๋ก ํญ๋ชฉ
์์ ํํ ์ ์๋ค. ๋์ ๊ฐ์ ์์ํ๋ ๊ณ ์ ํ ํค(๋ณดํต์ ๋ฌธ์์ด)๋ฅผ ์ ์ฅํ๋ค. ์ดํค๋ ๋๋ถ๋ถ ๋ฌธ์์ด์ด์ง๋ง,
๋ถ๋ณํ๋ ํ์ด์ฌ์ ์ด๋ค ํ์
์ด ๋ ์์๋ค. ๋์
๋๋ฆฌ๋ ๋ณ๊ฒฝ ๊ฐ๋ฅํ๋ฏ๋ก ํค-๊ฐ ์์๋ฅผ ์ถ๊ฐ, ์ญ์ , ์์ ํ ์์๋ค.
๋ค๋ฅธ ์ธ์ด์์๋ ๋์
๋๋ฆฌ๋ฅผ ์ฐ๊ด ๋ฐฐ์ด ํด์ ํด์๋งต ์ด๋ผ๊ณ ๋ถ๋ฅธ๋ค
"""
""" 7-1 7-2
๋์
๋๋ฆฌ ์์ฑํ๊ธฐ
๋์
๋๋ฆฌ ๋ณํํ๊ธฐ
"""
print('---------7-1------')
empty_dict = {}
print(empty_dict)
bierce = {
"day" : "A period of twenty-four hours, mostly misspent",
"positive" : "Mistaken at the top of one's voice",
"misfortune" : "The kind of fortune that never misses",
}
print(bierce)
print('-------7-2------')
# dict() < ์ฌ์ฉ = ๋์
๋๋ฆฌ๋ก ๋ณํ
lol = [['a','b'],['c','d'],['e','f']] #๋ฆฌ์คํธ๋ก๋ ๋ฆฌ์คํธ
lot = [('a','b'),('c','d'),('e','f')] #ํํ๋ก๋ ๋ฆฌ์คํธ
tol = (['a','b'],['c','d'],['e','f']) #๋ฆฌ์คํธ๋ก๋ ํํ
los = ['ab','cd','ef'] #๋ฌธ์์ด๋ก๋ ๋ฆฌ์คํธ
tos = ('ab','cd','ef') #๋ฌธ์์ด๋ก๋ ํํ
print(lol)
print(lot)
print(tol)
print(los)
print(tos)
print(dict(lol))
print(dict(lot))
print(dict(tol))
print(dict(los))
print(dict(tos))
"""7-3 7-4 ๋์
๋๋ฆฌ
๋์
๋๋ฆฌ์ ํญ๋ชฉ์ถ๊ฐํ๊ธฐ
๋์
๋๋ฆฌ์ ํญ๋ชฉ์ ์ถ๊ฐํ๋ ๊ฒ์ ๊ฐ๋จํ๋ค. ํค์ ์ํด ์ฐธ์กฐ๋๋ ํญ๋ชฉ์ ๊ฐ์ ํ ๋นํ๋ฉด ๋๋ค.
ํค๊ฐ ์ด๋ฏธ ์๋ ๊ฒฝ์ฐ๋ ๊ทธ๊ฐ์ ์๊ฐ์ผ๋ก ๋์ฒด๋๋ค.
-ํญ๋ชฉ์ถ๊ฐ
-update() ํจ์์ ๋น๊ตํด๋ณด๊ธฐ
"""
print('----------7-3-------')
pythons = {
'Chapman' : 'Graham',
'Cleese' : 'John',
'Idle' : 'Eric',
'Jones' : 'Terry',
'Palin' : 'Michael',
}
print(pythons)
print('------------------------------------------------------------------------------------------------------------')
pythons["Gilliam"] = "Gerry" #๊ฐ๋ณ๊ฒฝ name_of_dict['key'] = 'value'
print(pythons)
print('------------------------------------------------------------------------------------------------------------')
pythons["Gilliam"] = "Terry" #๋์
๋๋ฆฌ์ ํค๋ค์ ๋ฐ๋์ ์ ์ผํด์ผํจ, ๋ง์ฝ ๊ฐ์ ํค๋ฅผ ๋๋ฒ ์ด์ ์ฌ์ฉํ๋ฉด ๋ง์ง๋ง ๊ฐ์ด ๋ฎ์ด์
print(pythons)
print('---------7-4---------')
pythons = {
'Chapman' : 'Graham',
'Cleese' : 'John',
'Giliam' : 'Terry',
'Idle' : 'Eric',
'Jones' : 'Terry',
'Palin' : 'Michael',
}
print(pythons)
print('-----------------------')
others= {'Marx':'Groucho','Howard':'Moe'}
pythons.update(others) #name_of_dict.update(new_dict)
print(pythons) #๋ค๋ฅธ ๋์
๋๋ฆฌ๋ฅผ ๊ฒฐํฉํ ๋ ์ฌ์ฉํจ
""" 7-5
1.ํค์ del๋ก ํญ๋ชฉ์ญ์ ํ๊ธฐ. del name_of_dict['key']
2.๋ชจ๋ ํญ๋ชฉ์ญ์ ํ๊ธฐ. name_of_dict()
3.in ์ผ๋ก ํค๋ฉค๋ฒ์ญ ํ
์คํธํ๊ธฐ 'key' in name_of_dict
4.์๋ฌ ๋ฐฉ์ง๋ฅผ ์ํด in ์ ์ฌ์ฉํ์
(A['key'] ๋ํค๋ฅผ์ฐพ์์์์ง๋ง ๋ง์ฝ ํค๊ฐ์์ผ๋ฉด traceback ์ค๋ฅ๊ฐ๋จ
๊ทธ๋์ ์ผ๋งํ๋ฉด in์ ์จ์ ์๋ฌ๋ฅผ ๋ฐฉ์งํ์.
5. get() ํจ์ ์ฌ์ฉํ๋๋ฐฉ๋ฒ ํค๊ฐ ์กด์ฌํ์ง ์์๋ ์ต์
๊ฐ์ ์ง์ ํด์ ์ด๋ฅผ ์ถ๋ ฅํ๋ค.
"""
print('-----------7-5-1---------')
del pythons['Marx']
print(pythons) #'Marx'์ ๊ทธ์ value ๊ฐ ์ญ์ ํ๊ธฐ
del pythons['Howard']
print(pythons)
print('---------7-5-2-----------')
pythons.clear() # ๋ชจ๋ ํญ๋ชฉ์ญ์ A.clear()
print(pythons)
print('--------7-5-3------------')
pythons = {'A':'a','B':'b','C':'c'}
print('A' in pythons) #'key' in A
print('B' in pythons) #๊ฒฐ๊ณผ๋ True
print('c' in pythons) # False
print('-----------7-5-4----------')
print(pythons['A'])
#print(pythons['Z']) << ๋์
๋๋ฆฌ์ ํค๊ฐ ์กด์ฌํ์ง ์์ผ๋ฉด ์๋ฌ๊ฐ๋จ !!!
#์ด๋ฌํ ์ ๋ฌ๋ฅผ ๋ฐฉ์งํ๊ธฐ์ํด in ์ผ๋ก ํค์ ๋ํ ๋ฉค๋ฒ์ญ ํ
์คํธ๋ฅผ ํ๋๋ฐฉ๋ฒ์ด๋ค.
print('---------7-5-5-------------')
print(pythons.get('B'))
print(pythons.get('Z','Not a python')) # ๋ง์ฝ ํค๊ฐ ๋์
๋๋ฆฌ์ ์์๋ ์ค๋ฅ๊ฐ์๋๊ณ ์ต์
๊ฐ์ ํธ์ถํจ
print(pythons.get('Z')) # ์ต์
๊ฐ์ ์ง์ ํ์ง์์ผ๋ฉด None์ ์ป์
""" 7-6
1. ๋ชจ๋ ํค ๊ฐ์ ธ์ค๊ธฐ name_of_dict.key()
2. ๋ชจ๋ ๊ฐ ๊ฐ์ ธ์ค๊ธฐ name_of_dict.value()
3. ๋ชจ๋ ์์ ํค์๊ฐ ๊ฐ์ ธ์ค๊ธฐ name_of_dict.items()
"""
signals = {'green':'go','yellow':'go faster','red':'stop'}
print('-------7-6-1--------')
print(signals.keys())
print(list(signals.keys())) #๋์
๋๋ฆฌ๋ฅผ ๋ฆฌ์คํธ๋ก ๋ณํํ๊ธฐ์ํด
print('-------7-6-2--------')
print(list(signals.values()))
print('-------7-6-3--------')
print(list(signals.items()))
"""์
์
์ ๊ฐ์ ๋ฒ๋ฆฌ๊ณ ํค๋ง ๋จ์ ๋์
๋๋ฆฌ๋ผ๊ณ ์๊ฐํ๋ฉด๋๋ค. ๋์
๋๋ฆฌ์ ๋ง์ฐฌ๊ฐ์ง๋ก ๊ฐ ํค๋ ์ ์ผํด์ผํ๋ค. ์ด๋ค ๊ฒ์ด ์
์กด์ฌํ๋์ง ์ฌ๋ถ๋ง ํ๋จํ๊ธฐ ์ํด์๋ ์
์์ฌ์ฉํ๋ค ๊ทธ๋ฆฌ๊ณ ์ฌ๊ธฐ์ ์ด๋ค ์ ๋ณด๋ฅผ ์ฒจ๋ถํด์ ๊ทธ ๊ฒฐ๊ณผ๋ฅผ ์ป๊ณ ์ถ์ผ๋ฉด ๋์
๋๋ฆฌ๋ฅผ
์ฌ์ฉํ๋ค.
"""
""" 8-1
์
์ ์์ฑํ ๋๋ set()ํจ์ ํน์ {}์์ ์ฝฅ๋ง๋ก ๊ตฌ๋ถ๋ ํ๋ ์ด์์ ๊ฐ์ ๋ฃ์ผ๋ฉด๋๋ค.
"""
print('--------8-1-----------')
empty_set = set()
print(empty_set)
print('---------')
even_number = {0,2,4,6,8} # ๋์
๋๋ฆฌ ํค์ ๋ง์ฐฌ๊ฐ์ง๋ก ์
์ ์์๊ฐ ์๋ค.
print(even_number)
print('---------')
print(set('letter')) #์ค๋ณต๋ ๊ฐ์ ๋ฒ๋ฆฐ ์
์ ์์ฑํ๋ค [[ set() ]]
""" 8-2
1. ๋ฆฌ์คํธ >> ์
์ผ๋ก
2. ํํ์ ์
์ผ๋ก
3. ๋์
๋๋ฆฌ >> ์
์ผ๋ก (๋์
๋๋ฆฌ์ set()์ ์ฌ์ฉํ๋ฉด ํค๋ง ์ฌ์ฉํ๋ค)
set( ('string') )
( (list) )
( (tuple) )
({'a:b','c:d'})
"""
print('-------8-2-1--------')
print(set(['Dasher','Dancer','Prancer','Mason-Dixon']))
print('-------8-2-2--------')
print(set(("Ummagumma","Echoes","Atom Heart Mother")))
print('-------8-2-3--------')
print(set({"apple":"red","orange":"orange","cherry":"red"})) # ํค๊ฐ๋ง ํธ์ถ
""" 8-3 ์
์ฐ์ฐ
์
์ฐ์ฐ
1. & ์ฐ์ฐ์์ intersection() ํจ์ &:๊ต์งํฉ a&b = a.intersection(b)
2. | ์ฐ์ฐ์์ union() ํจ์ |:ํฉ์งํฉ a|b = a.union(b)
3. - ์ฐ์ฐ์์ difference() ํจ์ - : ์ฐจ์งํฉ a-b = a.different(b)
4. ^ ์ฐ์ฐ์์ symmetric_difference() ํจ์ ^ : ๋์นญ์ฐจ์งํฉ(๊ต์งํฉ์์ ์ธํ๋๋จธ์ง) a^b = a.symmetric_difference(b)
5.<= ์ฐ์ฐ์์ issubset()ํจ์ <= : a์
์ด b์
์ ๋ถ๋ถ์งํฉ a<=b = a.issubset(b)
"""
print('--------8-3-1------------')
a = {1,2}
b = {2,3}
print(a&b)
print(a.intersection(b))
print('--------8-3-2-------------')
print(a|b)
print(a.union(b))
print('--------8-3-3-------------')
print(a-b)
print(a.difference(b))
print('------8-3-4--------------')
print(a^b)
print(a.symmetric_difference(b))
print('-------8-3-5-------------')
print(a <= b)
print(a.issubset(b))
A ={2,3}
B ={2,3,4,5}
print('---8-3-5(cf)')
print(A <= b)
print(A.issubset(B)) | [
"jkoon2013@gmail.com"
] | jkoon2013@gmail.com |
196c4f27e79af13c60f2efd7ff86c0e6b8733c45 | f31391ec70caf12b5c04634c6375f768b7ddc854 | /Full_Project/PyMongo/Main.py | 808e97469b36fdbbec9610da01b09f07a5f9b9e7 | [] | no_license | poklj/Python | 8daebeff851a494b35c3ef0561bd7dfb5ac4ea94 | acbf3b8705220fb7c0afe8ccb40381f9e337838d | refs/heads/master | 2021-08-23T10:53:44.729608 | 2017-12-04T15:38:07 | 2017-12-04T15:38:07 | 112,198,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | from PyMongo import *
def Main():
pass
if __name__ == "__main__":
Main() | [
"compgeek223@gmail.com"
] | compgeek223@gmail.com |
49915c6689b5cfb63c853499fc46782cbfb4e004 | 02d6aa27cffce7620975cc1750b1fdc33cfb4a52 | /gsf/processed/gsf_sub_routine_run_seqs/func_code.py | cf44c59a8295fee0c9b6d81b1e4b41edab648578 | [] | no_license | AlgorithmicAmoeba/picklejar | d8d54216e35b8f10c814a8837b536d480e3ced63 | 8b44829149f39c6e7538b52ae1fae62be3270d93 | refs/heads/master | 2022-12-28T18:33:25.397765 | 2020-10-21T07:43:53 | 2020-10-21T07:43:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 568 | py | # first line: 501
@PickleJar.pickle(path='gsf/processed')
def gsf_sub_routine_run_seqs():
"""Returns the run sequences for the predict, update and resample subroutines
Returns
-------
run_seqss : List
[predict; update; resample] x [N_particles; run_seq]
"""
N_particles_gpu = numpy.array([int(i) for i in 2**numpy.arange(1, 19, 0.5)])
run_seqss = [
predict_subs_run_seq(N_particles_gpu, 100),
update_subs_run_seq(N_particles_gpu, 100),
resample_subs_run_seq(N_particles_gpu, 100)
]
return run_seqss
| [
"29543948+darren-roos@users.noreply.github.com"
] | 29543948+darren-roos@users.noreply.github.com |
8ab113cf60a3a4a75b3d8b50adeeef8e0c253799 | 22b78677bfe20f4c548a8c6cadfaeebcc635a22e | /venv/bin/pip2 | e606e424a07a9bdbdd662dc790e5b6d64708c181 | [] | no_license | mr-kaveh/flasksocialapp | 57778db7bab285d514502d4dd0ef43245a0f1d5c | d9fa096c53b3a202191d2d9e0373ff1b39663421 | refs/heads/master | 2020-04-19T02:33:34.151348 | 2019-02-01T05:12:25 | 2019-02-01T05:12:25 | 167,907,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | #!/home/hossein/myScripts/socialApp/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"mr.hdavoodi@gmail.com"
] | mr.hdavoodi@gmail.com | |
a47988e12caea650f9b6dc78153c6e2a74602047 | 5aa0e5f32d529c3321c28d37b0a12a8cf69cfea8 | /client/local_objects/ClientPlayerManager.py | 8acf4ecba25471df1e138e3be612cc0741d8054f | [] | no_license | sheepsy90/survive | 26495f1ff2d8247fbb9470882f8be9f5272e7f2c | 0eddf637be0eacd34415761b78fc2c9d50bc1528 | refs/heads/master | 2021-01-09T05:55:16.546762 | 2017-02-03T20:15:28 | 2017-02-03T20:15:28 | 80,864,391 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 974 | py | # -*- coding:utf-8 -*-
from client.local_objects.PlayerModel import PlayerModel
class ClientPlayerManager(object):
def __init__(self):
self.players = {}
self.me = None
def add_new_player_position(self, player_id, player_name, position, is_moving, is_me):
if player_id not in self.players:
self.players[player_id] = PlayerModel(player_id, player_name, position, is_moving)
else:
self.players[player_id].update_position(position, is_moving)
if is_me:
self.me = self.players[player_id]
def has_me(self):
return self.me is not None
def get_players(self):
return self.players.values()
def remove_player(self, name):
print "REMOVE PLAYER FROM CLIENT"
del self.players[name]
def get_me(self):
return self.me
def set_my_character_condition(self, blurriness, redness):
self.me.set_character_condition(blurriness, redness) | [
"robert.kessler@klarna.com"
] | robert.kessler@klarna.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.