blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1b8bf4767e7e81816c259accadb336a80b752300 | 1a13cf55de87bf9fd7cd5911ab7bd9d9c1f88241 | /tests/programscache.py | 2382802032fded66be7aa14c186b0adc155ddd14 | [
"Apache-2.0"
] | permissive | jeperez/winreg-kb | c59ead2d593b4ec375b77d7a9c49fbec35b9f156 | a50fcfc89e3fac282f276b12fb67807ddb56ef10 | refs/heads/master | 2021-01-19T11:34:33.293074 | 2017-04-03T06:28:00 | 2017-04-03T06:28:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,086 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Programs Cache information collector."""
import unittest
from dfwinreg import registry as dfwinreg_registry
from winregrc import collector
from winregrc import output_writer
from winregrc import programscache
from tests import test_lib as shared_test_lib
class TestOutputWriter(output_writer.StdoutOutputWriter):
"""Class that defines a test output writer.
Attributes:
text (list[str]): text.
"""
def __init__(self):
"""Initializes an output writer object."""
super(TestOutputWriter, self).__init__()
self.text = []
def WriteText(self, text):
"""Writes text to stdout.
Args:
text: the text to write.
"""
self.text.append(text)
class ProgramsCacheDataParserTest(shared_test_lib.BaseTestCase):
"""Tests for the Programs Cache data parser."""
# TODO: add tests.
class ProgramsCacheCollectorTest(shared_test_lib.BaseTestCase):
"""Tests for the Programs Cache information collector."""
@shared_test_lib.skipUnlessHasTestFile([u'NTUSER.DAT'])
def testCollect(self):
"""Tests the Collect function."""
registry_collector = collector.WindowsRegistryCollector()
test_path = self._GetTestFilePath([u'NTUSER.DAT'])
registry_collector.ScanForWindowsVolume(test_path)
self.assertIsNotNone(registry_collector.registry)
collector_object = programscache.ProgramsCacheCollector()
test_output_writer = TestOutputWriter()
collector_object.Collect(registry_collector.registry, test_output_writer)
test_output_writer.Close()
# TODO: fix test.
self.assertEqual(test_output_writer.text, [])
def testCollectEmpty(self):
"""Tests the Collect function on an empty Registry."""
registry = dfwinreg_registry.WinRegistry()
collector_object = programscache.ProgramsCacheCollector()
test_output_writer = TestOutputWriter()
collector_object.Collect(registry, test_output_writer)
test_output_writer.Close()
self.assertEqual(len(test_output_writer.text), 0)
if __name__ == '__main__':
unittest.main()
| [
"joachim.metz@gmail.com"
] | joachim.metz@gmail.com |
c2708ee7ab95a79f4a154cf48f91ac0a4fc2e003 | 9a00a586da1f78e2136e8576fe68a83664313f68 | /food/users/routes.py | 7c174cf83c7df5551550547ba47061a002a778b9 | [] | no_license | moinmir/Food-for-Thought | b76648c2053bd21a6221c87bd562c6664d198016 | 1da536449dbcf91296898f2a29cba34d619d2e88 | refs/heads/main | 2022-12-26T02:13:35.591555 | 2020-10-10T18:16:10 | 2020-10-10T18:16:10 | 300,952,958 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,010 | py | from flask import render_template, url_for, flash, redirect, request, Blueprint
from flask_login import login_user, current_user, logout_user, login_required
from food import db, bcrypt
from food.models import User, Campaign
from food.users.forms import (
RegistrationForm,
LoginForm,
UpdateAccountForm,
RequestResetForm,
ResetPasswordForm,
)
from food.users.utils import save_picture, send_reset_email
users = Blueprint("users", __name__)
@users.route("/register", methods=["GET", "POST"])
def register():
if current_user.is_authenticated:
return redirect(url_for("main.home"))
form = RegistrationForm()
if form.validate_on_submit():
# created user and adding to database
hashed_password = bcrypt.generate_password_hash(form.password.data).decode(
"utf-8"
)
user = User(
username=form.username.data,
email=form.email.data,
password=hashed_password,
urole=form.urole.data,
)
db.session.add(user)
db.session.commit()
flash(
message="Your account has been created! You can now log in",
category="success",
)
return redirect(url_for("users.login"))
return render_template("register.html", title="Register", form=form)
@users.route("/login", methods=["GET", "POST"])
def login():
if current_user.is_authenticated:
return redirect(url_for("main.home"))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data):
login_user(user, remember=form.remember.data)
next_page = request.args.get("next")
return (
redirect(url_for(next_page[1:]))
if next_page
else redirect(url_for("main.home"))
)
else:
flash("Login Unsuccessful. Please check email and password.", "danger")
return render_template("login.html", title="Login", form=form)
@users.route("/logout")
def logout():
logout_user()
return redirect(url_for("main.home"))
@users.route("/account", methods=["GET", "POST"])
@login_required
def account():
form = UpdateAccountForm()
if form.validate_on_submit():
if form.picture.data:
current_user.image_file = save_picture(form.picture.data)
current_user.username = form.username.data
current_user.email = form.email.data
current_user.urole = form.urole.data
db.session.commit()
flash("Your account has been updated!", "success")
return redirect(url_for("users.account"))
elif request.method == "GET":
form.username.data = current_user.username
form.email.data = current_user.email
form.urole.data = current_user.urole
image_file = url_for("static", filename="profile_pics/" + current_user.image_file)
return render_template(
"account.html", title="Account", image_file=image_file, form=form
)
@users.route("/user/<string:username>")
def user_campaigns(username):
page = request.args.get("page", 1, type=int)
user = User.query.filter_by(username=username).first_or_404()
campaigns = (
Campaign.query.filter_by(owner=user)
.order_by(Campaign.date_posted.desc())
.paginate(per_page=5, page=page)
)
return render_template("user_campaigns.html", campaigns=campaigns, user=user)
@users.route("/reset_password", methods=["GET", "POST"])
def reset_request():
if current_user.is_authenticated:
return redirect(url_for("main.home"))
form = RequestResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
send_reset_email(user)
flash("An email has been sent with instructions to reset your password", "info")
return redirect(url_for("users.login"))
return render_template("reset_request.html", title="Reset Password", form=form)
@users.route("/reset_password/<token>", methods=["GET", "POST"])
def reset_token(token):
if current_user.is_authenticated:
return redirect(url_for("main.home"))
user = User.verify_reset_token(token)
if user is None:
flash("That is an invalid or expired token", "warning")
return redirect(url_for("users.reset_request"))
form = ResetPasswordForm()
if form.validate_on_submit():
# created user and adding to database
hashed_password = bcrypt.generate_password_hash(form.password.data).decode(
"utf-8"
)
user.password = hashed_password
db.session.commit()
flash(
message="Your password has been updated! You can now log in",
category="success",
)
return redirect(url_for("users.login"))
return render_template("reset_token.html", title="Reset Password", form=form)
| [
"moin@princeton.edu"
] | moin@princeton.edu |
9c5ff2715749470a7b98c0feea06a408bf368b99 | a7261331ed7e8b80e035c037979d0eb32b1c43ae | /tests/terraform_ci/test_strip_backend.py | e2e27fd2e15d335d757ccbbd9ecb489811859177 | [] | no_license | tamaskozak/terraform-ci | d03baee25c54e7a3f391fa9041ede97209dbcb02 | bc5aec0b8a5caf90e559431dbdb5e21a4032b47a | refs/heads/master | 2020-08-01T08:35:00.400627 | 2020-04-20T04:03:54 | 2020-04-20T04:03:54 | 210,934,296 | 0 | 0 | null | 2019-09-25T20:26:31 | 2019-09-25T20:26:31 | null | UTF-8 | Python | false | false | 439 | py | """Tests for strip_backend() function."""
from os import path as osp
from terraform_ci import strip_backend
def test_strip_backend(tmpdir):
"""Check that main.tf is copied."""
original_tf_dir = tmpdir.mkdir("original")
tf_file = original_tf_dir.join("main.tf")
tf_file.write("")
with strip_backend(str(original_tf_dir)) as tmp_tf_dir:
assert osp.exists(
osp.join(tmp_tf_dir, "main.tf")
)
| [
"noreply@github.com"
] | noreply@github.com |
fc1bfcd8f32729a560c5de4916cce98dd96143b0 | bded5c00b2d7239cc4cbc4908c125bb759ff7de4 | /keras_frcnn/squeezenet.py | 4987d2b28bb539ae2ca2ff8bb6f5635f6bb6179f | [] | no_license | Shiro-LK/keras_frcnn | 08167039ab665cc80be2f2ed655e49acacb5b15c | 4f5a75f3b0161cff4504c2f0113b3f9983ee1324 | refs/heads/master | 2020-03-17T09:28:52.785653 | 2018-07-06T19:04:11 | 2018-07-06T19:04:11 | 133,476,851 | 1 | 0 | null | 2018-05-15T07:26:57 | 2018-05-15T07:26:57 | null | UTF-8 | Python | false | false | 9,774 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 25 15:30:03 2018
@author: shiro
"""
from __future__ import print_function
from __future__ import absolute_import
import keras.backend as K
import warnings
from keras.applications.mobilenet import MobileNet
from keras.models import Model
from keras.layers import Flatten, Dense, Input, Conv2D, MaxPooling2D, Dropout, Concatenate, Activation
from keras.layers import GlobalAveragePooling2D, GlobalMaxPooling2D, TimeDistributed
from keras.engine.topology import get_source_inputs
from keras.utils import layer_utils
from keras.utils.data_utils import get_file
from keras import backend as K
from keras_frcnn.roi_pooling_conv import RoiPoolingConv
import math
from keras.layers import Convolution2D,AveragePooling2D
from keras.layers import BatchNormalization
from keras.applications.imagenet_utils import _obtain_input_shape
WEIGHTS_PATH = 'https://github.com/wohlert/keras-squeezenet/releases/download/v0.1/squeezenet_weights.h5'
def get_weight_path():
if K.image_dim_ordering() == 'th':
print('pretrained weights not available for VGG with theano backend')
return
else:
weights_path = get_file('squeezenet_weights.h5', WEIGHTS_PATH, cache_subdir='models')
return weights_path
def copy_weights(oldmodel, newmodel):
dic_w = {}
for layer in oldmodel.layers:
dic_w[layer.name] = layer.get_weights()
for i, layer in enumerate(newmodel.layers):
if layer.name in dic_w and layer.name != 'softmax' and layer.name != 'input':
#print(newmodel.layers[i].get_weights()[0].shape)
#print(newmodel.layers[i].get_weights()[0][:,:,0,0])
newmodel.layers[i].set_weights(dic_w[layer.name])
print(layer.name)
#print(newmodel.layers[i].get_weights()[0][:,:,0,0])
return newmodel
def _fire(x, filters, name="fire", BN=True, trainable=False):
sq_filters, ex1_filters, ex2_filters = filters
squeeze = Convolution2D(sq_filters, (1, 1), activation='relu', trainable=trainable, padding='same', name=name + "_squeeze1x1")(x)
expand1 = Convolution2D(ex1_filters, (1, 1), activation='relu', trainable=trainable, padding='same', name=name + "_expand1x1")(squeeze)
expand2 = Convolution2D(ex2_filters, (3, 3), activation='relu', trainable=trainable, padding='same', name=name + "_expand3x3")(squeeze)
x = Concatenate(axis=-1, name=name)([expand1, expand2])
if BN == True:
x = BatchNormalization(name=name+'_bn')(x)
return x
def _fire_td(x, filters, name="fire_td", BN=True, trainable=False):
sq_filters, ex1_filters, ex2_filters = filters
squeeze = TimeDistributed(Convolution2D(sq_filters, (1, 1), activation='relu', trainable=trainable, padding='same', name=name + "_squeeze1x1"), name='TimeDistributed_' + name + "_squeeze1x1")(x)
expand1 = TimeDistributed(Convolution2D(ex1_filters, (1, 1), activation='relu', trainable=trainable, padding='same', name=name + "_expand1x1"), name='TimeDistributed_' + name + "_expand1x1")(squeeze)
expand2 = TimeDistributed(Convolution2D(ex2_filters, (3, 3), activation='relu', trainable=trainable, padding='same', name=name + "_expand3x3"), name='TimeDistributed_' + name + "_expand3x3")(squeeze)
x = Concatenate(axis=-1, name=name)([expand1, expand2])
if BN == True:
x = TimeDistributed(BatchNormalization(name=name+'_bn'), name='TimeDistributed_' + name + '_bn')(x)
return x
def SqueezeNet(include_top=True, weights="imagenet", input_tensor=None, input_shape=None, pooling=None, classes=1000, channels=3, trainable=False):
if weights not in {'imagenet', None}:
raise ValueError('The `weights` argument should be either '
'`None` (random initialization) or `imagenet` '
'(pre-training on ImageNet).')
if weights == 'imagenet' and include_top and classes != 1000:
raise ValueError('If using `weights` as imagenet with `include_top`'
' as true, `classes` should be 1000')
# Determine proper input shape
input_shape = _obtain_input_shape(input_shape,
default_size=224,
min_size=48,
data_format=K.image_data_format(),
require_flatten=False)
if input_tensor is None:
img_input = Input(shape=input_shape)
else:
if not K.is_keras_tensor(input_tensor):
img_input = Input(tensor=input_tensor, shape=input_shape)
else:
img_input = input_tensor
if channels != 3:
x = Convolution2D(64, kernel_size=(3, 3), strides=(2, 2), padding="same", trainable=trainable, activation="relu", name='conv1_channels_'+str(channels))(img_input)
else:
x = Convolution2D(64, kernel_size=(3, 3), strides=(2, 2), padding="same", trainable=trainable, activation="relu", name='conv1')(img_input)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), name='maxpool1', padding="valid")(x)
x = BatchNormalization(name= 'maxpool1_bn')(x)
x = _fire(x, (16, 64, 64), name="fire2", trainable=trainable)
x = _fire(x, (16, 64, 64), name="fire3", BN = False, trainable=trainable)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), name='maxpool3', padding="valid", trainable=trainable)(x)
x = BatchNormalization(name= 'maxpool3_bn')(x)
x = _fire(x, (32, 128, 128), name="fire4", trainable=trainable)
x = _fire(x, (32, 128, 128), name="fire5", BN=False, trainable=trainable)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), name='maxpool5', padding="valid")(x)
x = BatchNormalization(name= 'maxpool5_bn')(x)
x = _fire(x, (48, 192, 192), name="fire6", trainable=trainable)
x = _fire(x, (48, 192, 192), name="fire7", trainable=trainable)
if include_top:
x = Dropout(0.5, name='dropout9')(x)
x = Convolution2D(classes, (1, 1), padding='valid', name='conv10')(x)
x = AveragePooling2D(pool_size=(13, 13), name='avgpool10')(x)
x = Flatten(name='flatten10')(x)
x = Activation("softmax", name='softmax')(x)
# else:
# if pooling == "avg":
# x = GlobalAveragePooling2D(name="avgpool10")(x)
# else:
# x = GlobalMaxPooling2D(name="maxpool10")(x)
# model = Model(img_input, x, name="squeezenet")
# if weights == 'imagenet':
# weights_path = get_file('squeezenet_weights.h5',
# WEIGHTS_PATH,
# cache_subdir='models')
#
# model.load_weights(weights_path, by_name=True)
return x
def divide(x):
x = math.ceil(x/2)
#print(x)
x = math.floor(x/2)+x%2 -1
#print(x)
x = math.floor(x/2)+x%2 -1
#print(x)
x = math.floor(x/2)+x%2 -1
#print(x)
return int(x)
def get_img_output_length(width, height):
def get_output_length(input_length):
return divide(input_length)
return get_output_length(width), get_output_length(height)
def nn_base(input_tensor=None, trainable=False, channels=3):
# Determine proper input shape
if K.image_dim_ordering() == 'th':
input_shape = (channels, None, None)
else:
input_shape = (None, None, channels)
if input_tensor is None:
img_input = Input(shape=input_shape)
else:
if not K.is_keras_tensor(input_tensor):
img_input = Input(tensor=input_tensor, shape=input_shape)
else:
img_input = input_tensor
x = SqueezeNet(input_tensor=img_input, include_top=False, weights=None, channels=channels, trainable=trainable)
#x.summary()
return x
def rpn(base_layers, num_anchors):
x = Conv2D(512, (3, 3), padding='same', activation='relu', kernel_initializer='normal', name='rpn_conv1')(
base_layers)
x_class = Conv2D(num_anchors, (1, 1), activation='sigmoid', kernel_initializer='uniform', name='rpn_out_class')(x)
x_regr = Conv2D(num_anchors * 4, (1, 1), activation='linear', kernel_initializer='zero', name='rpn_out_regress')(x)
return [x_class, x_regr, base_layers]
def classifier_layers(x_, trainable=False):
x = _fire_td(x_, (64, 256, 256), name="fire8_td", trainable=trainable)
x = _fire_td(x, (64, 256, 256), name="fire_td9", trainable=trainable)
return x
def classifier(base_layers, input_rois, num_rois, nb_classes=21, trainable=False):
# compile times on theano tend to be very high, so we use smaller ROI pooling regions to workaround
if K.backend() == 'tensorflow':
pooling_regions = 7
input_shape = (num_rois, 7, 7, 512)
elif K.backend() == 'theano':
pooling_regions = 7
input_shape = (num_rois, 512, 7, 7)
out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])
out = classifier_layers(out_roi_pool, trainable=trainable)
out = TimeDistributed(AveragePooling2D(name='Global_average_Pooling_classifier_layer'), name='TimeDistributed_AVG')(out)
out = TimeDistributed(Flatten(name='flatten'), name='TimeDistributed_flatten')(out)
#out = TimeDistributed(Dense(4096, activation='relu', name='fc1'))(out)
#out = TimeDistributed(Dense(4096, activation='relu', name='fc2'))(out)
out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero', name='dense_class'),
name='dense_class_{}'.format(nb_classes))(out)
# note: no regression target for bg class
out_regr = TimeDistributed(Dense(4 * (nb_classes - 1), activation='linear', kernel_initializer='zero', name='dense_regr'),
name='dense_regress_{}'.format(nb_classes))(out)
return [out_class, out_regr]
| [
"kunludovic@europe.essilor.group"
] | kunludovic@europe.essilor.group |
a21dda371ce4e2c2fd2dd233af514f872d8204cd | 0f2b7f4117d03a101ffb5b3646bfd7094c19907b | /Model/PyTorch/Predict Winner Model/Predict_Model.py | a2cb6b3a6e611666510e6b7c2f72f33f4df10a43 | [] | no_license | antoniojkim/Ultimate-Tic-Tac-Toe-Zero | f046b82cfd6c22c59380d98f8008c5c8f5173d91 | 66548ae61347a9726bd888da817a22c3f5f50ba1 | refs/heads/master | 2021-06-09T08:43:21.700796 | 2021-02-06T21:07:26 | 2021-02-06T21:07:26 | 110,311,636 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,690 | py |
import platform
import sys
if platform.platform() == "Windows":
sys.path.append("..\\..\\..\\")
else:
sys.path.append("../../../")
import torch
import torch.nn.functional as F
import numpy as np
from numba import jit
import random
from UTTT import *
'''
Policy Idea:
Simple Artificial Neural Network with the following feature inputs:
Feature: | Num Features:
=====================================================|=======================
Current Length | 1 = 1
Whether or not they have the quadrants | 9 * 2 = 18
Whether or not they have a pair of quadrants | 9C2 * 2 = 72
Whether or not they can take a quadrant | 9 * 2 = 18
Whether or not they win if they take the quadrant | 9 * 2 = 18
Whether or not they have the square in the quadrant | 81 * 2 = 162
Whether or not they have a pair within a quadrant | (9 * 9C2) * 2 = 648
|
Total: | 18+72+162+648+18 = 918
'''
@jit(cache=True)
def extract_features(quadrants, board):
features = []
features.extend([
quadrant == P1 for quadrant in quadrants
])
features.extend([
quadrant == P2 for quadrant in quadrants
])
features.extend([
P1 == q1 == q2 for i, q1 in enumerate(quadrants) for j, q2 in enumerate(quadrants) if i < j
])
features.extend([
P2 == q1 == q2 for i, q1 in enumerate(quadrants) for j, q2 in enumerate(quadrants) if i < j
])
features.extend([
board[i][j] == P1 for i in range(9) for j in range(9)
])
features.extend([
board[i][j] == P2 for i in range(9) for j in range(9)
])
features.extend([
board[i][j] == board[i][k] == P1 for i in range(9) for j in range(9) for k in range(j)
])
features.extend([
board[i][j] == board[i][k] == P2 for i in range(9) for j in range(9) for k in range(j)
])
return np.array(features)*1.0
input_size = 900
class Predict_Model(torch.nn.Module):
def __init__(self, state_dict_path=None):
super(Predict_Model, self).__init__()
self.fc1__0_15 = torch.nn.Linear(input_size, 256).double()
self.fc1__15_30 = torch.nn.Linear(input_size, 256).double()
self.fc1__30_45 = torch.nn.Linear(input_size, 256).double()
self.fc1__45_60 = torch.nn.Linear(input_size, 256).double()
self.fc1__60_ = torch.nn.Linear(input_size, 256).double()
self.fc2 = torch.nn.Linear(256, 3).double()
self.softmax = torch.nn.Softmax(dim=-1)
if state_dict_path is not None:
self.load_state_dict(torch.load(state_dict_path))
self.eval()
def save_weights(self, state_dict_path: str):
torch.save(self.state_dict(), state_dict_path)
def forward(self, x, length):
if length < 15:
x = F.relu(self.fc1__0_15(x))
elif 15 <= length < 30:
x = F.relu(self.fc1__15_30(x))
elif 30 <= length < 45:
x = F.relu(self.fc1__30_45(x))
elif 45 <= length < 60:
x = F.relu(self.fc1__45_60(x))
else:
x = F.relu(self.fc1__60_(x))
x = F.relu(self.fc2(x))
return self.softmax(x)
def predict(self, x, length):
return self.forward(torch.tensor(torch.from_numpy(x), dtype=torch.double), length).detach().numpy()
if __name__ == "__main__":
# tensor = torch.tensor(torch.from_numpy(np.random.rand(1002)), dtype=torch.double)
# tensor = np.random.randint(2, size=900)
model1 = Predict_Model()
features = extract_features(np.zeros(9), np.zeros((9, 9)))
prediction = model1.predict(features)
# print(prediction)
# print(list(model1.parameters())[0][0])
# model1.save_weights("../ModelInstances/predict1/predict1_model")
# model2 = UTTT_Model()
# print(model2.forward(tensor))
# # print(list(model2.parameters())[0][0])
# model2.save_weights("./ModelInstances/uttt_conv1_model2")
# output_tensor = torch.from_numpy(np.array([0.5, 0.5]))
# criterion = torch.nn.BCELoss()
# optimizer = torch.optim.SGD(model1.parameters(), lr=0.001, momentum=0.9)
# # zero the parameter gradients
# optimizer.zero_grad()
# # forward + backward + optimize
# outputs = model1.forward(tensor)
# loss = criterion(outputs, output_tensor)
# loss.backward()
# optimizer.step()
# print(loss.item())
| [
"antoniok9130@gmail.com"
] | antoniok9130@gmail.com |
8ed747a448740c96f0fa5303b1fcd8b932cbf842 | ad1c5d99e808418f4a61e9a066767f39e18eecfa | /Main.py | c4cc3603ac3e35b6edcfbf8b0b3af9cbda941c00 | [] | no_license | chowshovisu/research_paper_classification | 8c84f8c67ab0471ad2d5c0305a77d89071ac2569 | e02b94a43799ac5fc5221dc245db2a2567787139 | refs/heads/main | 2023-08-30T01:17:34.333500 | 2021-10-30T02:06:46 | 2021-10-30T02:06:46 | 422,752,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,470 | py | import Read_Data
import Pre_Processing
import SVM_Classification
from sklearn.model_selection import train_test_split
def check_for_new_test(classifier):
while 1:
new_str = str(input('Give a sample input string to Test:'))
if new_str == '\n':
break
tokenized_input = Pre_Processing.tokenize_preprocess_corpus([new_str])
predicted_output = classifier.predict(tokenized_input)
print('The Prediction Is: {}'.format(predicted_output))
if __name__ == '__main__':
# reads and processes the csv file
csv_dataframe = Read_Data.read_csv(file_name='rt-polaritydata/Plus_800.csv', separator=',')
reports, labels = Read_Data.process_data(data=csv_dataframe)
unique_list = sorted(list(set(labels)))
for item in unique_list:
print('{} = {}'.format(item, labels.count(item)))
# tokenize and pre-process reports
tokenized_reports = Pre_Processing.tokenize_preprocess_corpus(reports)
print(tokenized_reports[0])
# # Divide the reports and labels into Training and Test Documents
train_reports,test_reports,train_labels,test_labels = train_test_split(tokenized_reports, labels, test_size = 0.33, random_state=42)
# Do the classification!!
classifier=SVM_Classification.SVM_Normal(trainDoc=train_reports, trainClass=train_labels,
testDoc=test_reports, testClass=test_labels, tfIdf=True)
check_for_new_test(classifier)
| [
"chowshov@isu.edu"
] | chowshov@isu.edu |
d1e235f90fe632da6056a1699aa45cd1791dfaf4 | f607b81e445c3e38f74ad635f97314c036c9f1d8 | /newjarvis.py | a1c9db184a35b3f502c44f02221baec26ade3351 | [] | no_license | Shivansh1200/Python_world | 655d3bf327dd0d9ebaf86c9907098273f689fe3c | 19fc07bdf3d73055f4d1d4fc4eacc4b678e1fc1d | refs/heads/master | 2023-07-06T12:56:43.680624 | 2021-08-02T19:36:59 | 2021-08-02T19:36:59 | 259,650,431 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,429 | py | import pyttsx3
import datetime
import speech_recognition as sr
import wikipedia
import webbrowser
import os
import smtplib
import random
import time
import requests
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[1].id)
def speak(audio):
# It speaks a given string
engine.say(audio)
engine.runAndWait()
def wishMe():
# Wish according to the time.
hour = int(datetime.datetime.now().hour)
if hour >= 0 and hour < 12:
speak("Good Morning!")
elif hour >= 12 and hour < 16:
speak("Good Afternoon!")
else:
speak("Good Evening!")
speak("I am Jarvis Sir. Please tell me how may I help you.")
def takeCommand():
# It takes microphone input from user and returns string output
r = sr.Recognizer()
with sr.Microphone() as source:
clear()
print("Listening....\r", end="")
r.pause_threshold = 1
audio = r.listen(source) # it converts the audio input into string
try:
clear()
print("Recognizing...\r", end="")
query = r.recognize_google(audio, language='en-in')
# query = r.recognize_sphinx(audio) #instead of that we can use this is offline but accuray very poor
print(f"User said: {query}\r", end="")
time.sleep(2)
except:
clear()
print("Say that again please....\r", end="")
time.sleep(1)
return "None"
return query
def sendEmail(to, content):
# It sends an email
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login('singh_821916@student.nitw.ac.in',
'keep_your_password') # Enter your password
server.sendmail('singh_821916@student.nitw.ac.in', to, content)
server.close()
def findReceiver(name):
contacts = {"abhay": "yourcontacts@gmail.com",
"abhishek": "navodayanabhishek@gmail.com", "vishal": "yourcontacts@gmail.com"}
try:
receiverGmail = contacts[name]
return receiverGmail
except:
return 0
def givenews():
apiKey = '49e391e7066c4158937096fb5e55fb5d'
url = f"https://newsapi.org/v2/top-headlines?country=in&apiKey={apiKey}"
r = requests.get(url)
data = r.json()
data = data["articles"]
flag = True
count = 0
for items in data:
count += 1
if count > 10:
break
print(items["title"])
to_speak = items["title"].split(" - ")[0]
if flag:
speak("Today's top ten Headline are : ")
flag = False
else:
speak("Next news :")
speak(to_speak)
def clear():
# To clear the console after each command
_ = os.system('cls')
if __name__ == '__main__':
wishMe()
while True:
# if 1:
query = takeCommand().lower()
# Logic for executing tasks based on query
if "how are you" in query:
speak("I'm fine sir, how can i help you ?")
elif "who are you" in query:
speak("I'm jarvis desktop assistant made by Mr Abhishek.")
elif 'wikipedia' in query:
# sentences=2 means return first two string
results = wikipedia.summary(query, sentences=2)
speak("According to wikipedia")
# print("According to wikipedia")
# print(results)
speak(results)
elif 'open word' in query or 'open ms word' in query or 'ms word' in query:
wordPath = "C:\\ProgramData\\Microsoft\\Windows\\Start Menu\\Programs\\Word.lnk"
os.startfile(wordPath)
elif 'open youtube' in query:
webbrowser.open('http://www.youtube.com')
elif 'open google' in query:
webbrowser.open('https://www.google.co.in/')
elif 'open stackoverflow' in query:
webbrowser.open('https://stackoverflow.com/')
elif 'play music' in query or 'play song' in query or 'play some music' in query or 'play another music' in query or 'change song' in query or 'next song' in query:
music_dir = 'C:\\Pen Drive\\MP3\\Old songs'
songs = os.listdir(music_dir)
os.startfile(os.path.join(
music_dir, songs[random.randint(0, len(songs)-1)]))
elif 'the time' in query or 'time' in query:
strTime = datetime.datetime.now().strftime("%H:%M:%S")
speak(f"Sir, the time is {strTime}")
elif 'open code' in query or 'open visual studio' in query:
codePath = "C:\\Users\\Abhishek Pratap\\AppData\\Local\\Programs\\Microsoft VS Code\\Code.exe"
os.startfile(codePath)
elif 'email to' in query or 'send a mail' in query or 'mail to' in query:
# This will send mail only if there is any matching name in last of query
# like "email to abhishek" or "mail to abhishek" or "send a mail to my freind abhishek"
# notice last word in all strings contain a name which is exist as key in contacts (line 72)
receiver = query.split(" ")[len(query.split(" "))-1]
to = findReceiver(receiver)
if to != 0:
try:
speak("What is your message ?")
content = takeCommand()
# to = "navodayanabhishek@gmail.com"
sendEmail(to, content)
speak("Email has been sent")
except Exception as e:
print(e)
speak(
"Sorry bro, something went wrong and i am not able to send your email right now.")
elif 'headlines' in query or 'news' in query or 'headline' in query:
givenews()
elif 'jarvis quit' in query or 'exit' in query or 'close' in query:
speak("Thanks for using Jarvis!!!")
exit()
elif 'awesome' in query or 'wow' in query or 'amazing' in query or 'wonderful' in query:
speak("Thank you sir, i am here for you")
elif 'what' in query or 'who' in query or 'where' in query or 'can you' in query:
webbrowser.open(f"https://www.google.com/search?&q={query}")
speak(wikipedia.summary(query, sentences=2)) | [
"noreply@github.com"
] | noreply@github.com |
c7cc1d54cf51603fe23648a534d153efafdaa73b | fa351318b655b98c62b10a5f6fa675c878b4dbaa | /ck101.py | 34f12b736b963b38c2e7253ee80182a6b4a202cf | [
"Apache-2.0"
] | permissive | nsdown/DriveIt | e236acdec25e6d7d82e268875a5e64b98aeb9d08 | 1f001c8f54bd8a864070182aec6532519b874ce4 | refs/heads/master | 2021-01-17T23:10:32.755938 | 2016-01-29T14:35:05 | 2016-01-29T14:35:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,213 | py | from base import SharedBase
import re
class Ck101(SharedBase):
def __init__(self, url):
self.flyleaf_url = url
self.flyleaf_data = self.get_data(self.flyleaf_url).decode('utf-8')
def get_name(self):
name = re.findall(r'<li><h1\sitemprop="name">(.+?)<\/h1><\/li>', self.flyleaf_data)[0]
return name
def get_chapter_info(self):
ref_box = {}
chapter_match = re.compile(r'\'詳情頁\-lists\'\,\'.+?\s(\d+?)集\'\,\'\/vols\/(\d+?)\/')
for chapter, cid in chapter_match.findall(self.flyleaf_data):
if chapter not in ref_box.keys():
ref_box[int(chapter)] = int(cid)
return ref_box
def get_page_info(self, cid):
inner_data = self.get_data('http://comic.ck101.com/vols/%s/1' % cid).decode('utf-8')
pages = re.findall(r'第(\d+)頁', inner_data)
return int(pages[-1])
def get_image_link(self, cid, page):
inner_page_data = self.get_data('http://m.comic.ck101.com/vols/%s/%s' % (cid, page)).decode('utf-8')
link_and_extension = re.findall('img src\=\"(http:[^\s]*?(jpg|png|gif))', inner_page_data)[0]
self.ext = link_and_extension[1]
return link_and_extension[0]
def down(self, name, cid, link, parent, page, is_volume=False):
img_data = self.get_data(link, 'http://m.comic.ck101.com/vols/%s/' % cid)
if is_volume is False:
file_path = self.get_path(name, parent, page, self.ext)
else:
file_path = self.get_path(name, 'V' + str(parent), page, self.ext)
with open(file_path, 'wb+') as file:
file.write(img_data)
def is_volume(self):
if re.findall(r'\'詳情頁\-lists\'\,\'.+?\s(\d+?)集\'\,\'\/vols\/(\d+?)\/', self.flyleaf_data):
return True
else:
return False
def get_volume_info(self):
ref_box_volume = {}
chapter_match = re.compile(r'\'詳情頁\-lists\'\,\'.+?\s(\d+?)卷\'\,\'\/vols\/(\d+?)\/')
for chapter, cid in chapter_match.findall(self.flyleaf_data):
if chapter not in ref_box_volume.keys():
ref_box_volume[int(chapter)] = int(cid)
return ref_box_volume
| [
"admin@niconiconi.rocks"
] | admin@niconiconi.rocks |
e6b1231d380c090b085d6fbd5dd39d9f6f7f2b19 | 63db68b84ce01c38ab5ebe902f1c29cae6a97c79 | /crawler/test03.py | c2ff873ad378be2321918b460d14a0d5a91c4c0e | [] | no_license | f1ybird/spider-test | d590870c441222f1a9ea230808644e2ed6cd226b | 7176e92fe1de8f6e8420a076d32f1b7ae800e915 | refs/heads/master | 2020-03-31T05:43:37.155179 | 2018-10-14T13:02:22 | 2018-10-14T13:02:22 | 151,956,832 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,606 | py | # !/usr/bin/python
# -*- coding:UTF-8 -*-
import requests
from lxml import etree
'''
Python爬取大量数据时,如何防止IP被封
以下是爬取猪八戒的被封IP的代码
'''
def getUrl():
for i in range(70):
url = 'http://task.zbj.com/t-ppsj/page{}.html'.format(i + 1)
spiderPage(url)
def spiderPage(url):
if url is None:
return None
htmlText = requests.get(url).text
selector = etree.HTML(htmlText)
divs = selector.xpath('//*[@id="utopia_widget_7"]/div[1]/div')
try:
for div in divs:
title = div.xpath('./a/p[1]/span[2]/text()')
href = div.xpath('./a/@href')
price = div.xpath('./a/p[2]/b/text()')
subTitle = div.xpath('./a/p[3]/text()')
deadline = div.xpath('./a/p[2]/span/text()')
price = price[0] if len(price) > 0 else '' # python的三目运算 :为真时的结果 if 判定条件 else 为假时的结果
title = title[0] if len(title) > 0 else ''
href = href[0] if len(href) > 0 else ''
subTitle = subTitle[0] if len(subTitle) > 0 else ''
deadline = deadline[0] if len(deadline) > 0 else ''
print title, price, 'https:' + href, subTitle, deadline
print '---------------------------------------------------------------------------------------'
# spiderDetail(href)
except:
print '出错'
def spiderDetail(url):
if url is None:
return None
try:
htmlText = requests.get(url).text
selector = etree.HTML(htmlText)
aboutHref = selector.xpath('//*[@id="utopia_widget_10"]/div[1]/div/div/div/p[1]/a/@href')
price = selector.xpath('//*[@id="utopia_widget_10"]/div[1]/div/div/div/p[1]/text()')
title = selector.xpath('//*[@id="utopia_widget_10"]/div[1]/div/div/h2/text()')
contentDetail = selector.xpath('//*[@id="utopia_widget_10"]/div[2]/div/div[1]/div[1]/text()')
publishDate = selector.xpath('//*[@id="utopia_widget_10"]/div[2]/div/div[1]/p/text()')
aboutHref = aboutHref[0] if len(aboutHref) > 0 else '' # python的三目运算 :为真时的结果 if 判定条件 else 为假时的结果
price = price[0] if len(price) > 0 else ''
title = title[0] if len(title) > 0 else ''
contentDetail = contentDetail[0] if len(contentDetail) > 0 else ''
publishDate = publishDate[0] if len(publishDate) > 0 else ''
print aboutHref, price, title, contentDetail, publishDate
except:
print '出错'
if '_main_':
getUrl()
| [
"kevin_love_it@163.com"
] | kevin_love_it@163.com |
cff296cda398e409006e98b2fad275b35328f553 | bad8f98c907afe26bb4e57597f8d1f91489f8bf3 | /main.py | 207b9df2fe66635f6e4b5135a77446b7bd745708 | [] | no_license | Splagoon/ScreenCloud-Uguu | cdb103ccd1e12de6b2c8d50b88799e9ec7bf3d65 | 3be560eaaecb31f646d37b188c2a0187cd367529 | refs/heads/master | 2021-06-11T06:53:22.929986 | 2017-02-06T22:07:08 | 2017-02-06T22:07:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,783 | py | import ScreenCloud
from PythonQt.QtCore import QFile, QSettings, QStandardPaths
from PythonQt.QtGui import QMessageBox
from PythonQt.QtUiTools import QUiLoader
import time, requests
class UguuUploader():
def __init__(self):
self.uil = QUiLoader()
self.loadSettings()
def isConfigured(self):
return True
def loadSettings(self):
settings = QSettings()
settings.beginGroup("uploaders")
settings.beginGroup("uguu.se")
self.nameFormat = settings.value("name-format", "screenshot-%H-%M-%S")
self.copyLink = settings.value("copy-link", "true") in ["true", True]
settings.endGroup()
settings.endGroup()
def saveSettings(self):
settings = QSettings()
settings.beginGroup("uploaders")
settings.beginGroup("uguu.se")
settings.setValue("name-format", self.settingsDialog.group_name.input_name.text)
settings.setValue("copy-link", self.settingsDialog.group_clipboard.checkbox_copy_link.checked)
settings.endGroup()
settings.endGroup()
def getFilename(self):
self.loadSettings()
return ScreenCloud.formatFilename(self.nameFormat)
def showSettingsUI(self, parentWidget):
self.parentWidget = parentWidget
self.settingsDialog = self.uil.load(QFile(workingDir + "/settings.ui"), parentWidget)
self.settingsDialog.group_name.input_name.connect("textChanged(QString)", self.nameFormatEdited)
self.settingsDialog.connect("accepted()", self.saveSettings)
self.updateUi()
self.settingsDialog.open()
def updateUi(self):
self.loadSettings()
self.settingsDialog.group_name.input_name.setText(self.nameFormat)
self.settingsDialog.group_clipboard.checkbox_copy_link.setChecked(self.copyLink)
def upload(self, screenshot, name):
self.loadSettings()
tmpFilename = QStandardPaths.writableLocation(QStandardPaths.TempLocation) + "/" + ScreenCloud.formatFilename(str(time.time()))
screenshot.save(QFile(tmpFilename), ScreenCloud.getScreenshotFormat())
data = {"name": name}
files = {"file": open(tmpFilename, "rb")}
try:
response = requests.post("https://uguu.se/api.php?d=upload-tool", data=data, files=files)
response.raise_for_status()
if self.copyLink:
ScreenCloud.setUrl(response.text)
except RequestException as e:
ScreenCloud.setError("Failed to upload to Uguu.se: " + e.message)
return False
return True
def nameFormatEdited(self, nameFormat):
self.settingsDialog.group_name.label_example.setText(ScreenCloud.formatFilename(nameFormat, False))
| [
"rob@kelle.tt"
] | rob@kelle.tt |
ade057decffd38ff7aa4e2488f4d46cdce9592c0 | 1d7b91472a193718bd569b23d607b5fbc55efce9 | /chapter-3.py | b1b03937da905d6c15b7dc526ccb7b2f36a00a91 | [
"MIT"
] | permissive | PrateekshaRebecca/opencv-computerVision | b990f5cdd79f3adf0798535ee8015f979e63fd8d | b2446c7c99e9a7c8d06c9cfaf096140203408f4b | refs/heads/master | 2023-03-20T04:52:01.434711 | 2020-07-03T16:47:18 | 2020-07-03T16:47:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,826 | py |
#Accessing the webcam
'''
import cv2
cap = cv2.VideoCapture(0)
# Check if the webcam is opened correctly
if not cap.isOpened():
raise IOError("Cannot open webcam")
while True:
ret, frame = cap.read()
frame = cv2.resize(frame, None, fx=0.5, fy=0.5,interpolation=cv2.INTER_AREA)
cv2.imshow('Input', frame)
c = cv2.waitKey(1)
if c == 27:
break
cap.release()
cv2.destroyAllWindows()
'''
#Keyboard Inputs
'''
import argparse
import cv2
def argument_parser():
parser = argparse.ArgumentParser(description="Change color space of the input video stream using keyboard controls. The control keys are: Grayscale - 'g', YUV - 'y', HSV - 'h'")
return parser
if __name__=='__main__':
args = argument_parser().parse_args()
cap = cv2.VideoCapture(0)
# Check if the webcam is opened correctly
if not cap.isOpened():
raise IOError("Cannot open webcam")
cur_char = -1
prev_char = -1
while True:
# Read the current frame from webcam
ret, frame = cap.read()
# Resize the captured image
frame = cv2.resize(frame, None, fx=0.5, fy=0.5,interpolation=cv2.INTER_AREA)
#Listen to the keyboard events
c = cv2.waitKey(1)#returns the ASCII value of the keyboard input
if c == 27:
break
if c > -1 and c != prev_char:
cur_char = c
prev_char = c
if cur_char == ord('g'):
output = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
elif cur_char == ord('y'):
output = cv2.cvtColor(frame, cv2.COLOR_BGR2YUV)
elif cur_char == ord('h'):
output = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
else:
output = frame
cv2.imshow('Webcam', output)
cap.release()
cv2.destroyAllWindows()
'''
#Mouse inputs
'''
import cv2
import numpy as np
def detect_quadrant(event, x, y, flags, param):#x,y coordinate is obtained after mouse clicking
if event == cv2.EVENT_LBUTTONDOWN:
if x > width/2:
if y > height/2:
point_top_left = (int(width/2), int(height/2))
point_bottom_right = (width-1, height-1)
else:
point_top_left = (int(width/2), 0)
point_bottom_right = (width-1, int(height/2))
else:
if y > height/2:
point_top_left = (0, int(height/2))
point_bottom_right = (int(width/2), height-1)
else:
point_top_left = (0, 0)
point_bottom_right = (int(width/2), int(height/2))
cv2.rectangle(img, (0,0), (width-1,height-1), (255,255,255), -1)#white rectangle
cv2.rectangle(img, point_top_left, point_bottom_right, (0,100,0),-1)#green rectangle
if __name__=='__main__':
width, height = 640, 480
img = 255 * np.ones((height, width, 3), dtype=np.uint8)
cv2.namedWindow('Input window')
cv2.setMouseCallback('Input window', detect_quadrant)
while True:
cv2.imshow('Input window', img)
c = cv2.waitKey(10)
if c == 27:break
cv2.destroyAllWindows()
'''
#To see list of all mouse events
'''
import cv2
print([x for x in dir(cv2) if x.startswith('EVENT')])
events=['EVENT_FLAG_ALTKEY', 'EVENT_FLAG_CTRLKEY', 'EVENT_FLAG_LBUTTON', 'EVENT_FLAG_MBUTTON', 'EVENT_FLAG_RBUTTON', 'EVENT_FLAG_SHIFTKEY', 'EVENT_LBUTTONDBLCLK', 'EVENT_LBUTTONDOWN', 'EVENT_LBUTTONUP', 'EVENT_MBUTTONDBLCLK', 'EVENT_MBUTTONDOWN', 'EVENT_MBUTTONUP', 'EVENT_MOUSEHWHEEL', 'EVENT_MOUSEMOVE', 'EVENT_MOUSEWHEEL', 'EVENT_RBUTTONDBLCLK', 'EVENT_RBUTTONDOWN', 'EVENT_RBUTTONUP']
'''
#Interacting with a live video stream
'''
import cv2
import numpy as np
def draw_rectangle(event, x, y, flags, params):
"""
Whenever we draw a rectangle using the
mouse, we basically have to detect three types of mouse events: mouse click, mouse
movement, and mouse button release. This is exactly what we do in this function.
Whenever we detect a mouse click event, we initialize the top left point of the rectangle.
As we move the mouse, we select the region of interest by keeping the current position as
the bottom right point of the rectangle.
Once we have the region of interest, we just invert the pixels to apply the “negative film”
effect. We subtract the current pixel value from 255 and this gives us the desired effect.
When the mouse movement stops and button-up event is detected, we stop updating the
bottom right position of the rectangle. We just keep displaying this image until another
mouse click event is detected.
"""
global x_init, y_init, drawing, top_left_pt, bottom_right_pt
if event == cv2.EVENT_LBUTTONDOWN:
drawing = True
x_init, y_init = x, y
elif event == cv2.EVENT_MOUSEMOVE:
if drawing:
top_left_pt = (min(x_init, x), min(y_init, y))
bottom_right_pt = (max(x_init, x), max(y_init, y))
img[y_init:y, x_init:x] = 255 - img[y_init:y, x_init:x]
elif event == cv2.EVENT_LBUTTONUP:
drawing = False
top_left_pt = (min(x_init, x), min(y_init, y))
bottom_right_pt = (max(x_init, x), max(y_init, y))
img[y_init:y, x_init:x] = 255 - img[y_init:y, x_init:x]
if __name__=='__main__':
drawing = False
top_left_pt, bottom_right_pt = (-1,-1), (-1,-1)
cap = cv2.VideoCapture(0)
# Check if the webcam is opened correctly
if not cap.isOpened():
raise IOError("Cannot open webcam")
cv2.namedWindow('Webcam')
cv2.setMouseCallback('Webcam', draw_rectangle)
while True:
ret, frame = cap.read()
img = cv2.resize(frame, None, fx=0.5, fy=0.5,interpolation=cv2.INTER_AREA)
(x0,y0), (x1,y1) = top_left_pt, bottom_right_pt
img[y0:y1, x0:x1] = 255 - img[y0:y1, x0:x1]
cv2.imshow('Webcam', img)
c = cv2.waitKey(1)
if c == 27:
break
cap.release()
cv2.destroyAllWindows()
'''
#Apply median filter to an image
'''
import cv2
import numpy as np
img = cv2.imread('input.png')
output = cv2.medianBlur(img, 7)#size of kernel related to neighborhood size
cv2.imshow('Input', img)
cv2.imshow('Median filter', output)
cv2.waitKey()
'''
#####################################
# #
# Cartoonizing an image ######
#####################################
import cv2
import numpy as np
def cartoonize_image(img, ds_factor=4, sketch_mode=False):
# Convert image to grayscale
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Apply median filter to the grayscale image to remove salt and pepper noise
img_gray = cv2.medianBlur(img_gray,7)
# Detect edges in the image and threshold it
edges = cv2.Laplacian(img_gray, cv2.CV_8U, ksize=5)
ret, mask = cv2.threshold(edges, 100, 255, cv2.THRESH_BINARY_INV)
# 'mask' is the sketch of the image
#if sketch_mode:return cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)
if sketch_mode:
img_sketch = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)
kernel = np.ones((3,3), np.uint8)
img_eroded = cv2.erode(img_sketch, kernel, iterations=1)
return cv2.medianBlur(img_eroded, 5)
# Resize the image to a smaller size for faster computation
img_small = cv2.resize(img, None, fx=1.0/ds_factor, fy=1.0/ds_factor,
interpolation=cv2.INTER_AREA)
num_repetitions = 10
sigma_color = 5
sigma_space = 7
size = 5
# Apply bilateral filter the image multiple times
for i in range(num_repetitions):
img_small = cv2.bilateralFilter(img_small, size, sigma_color,sigma_space)
img_output = cv2.resize(img_small, None, fx=ds_factor, fy=ds_factor,
interpolation=cv2.INTER_LINEAR)
dst = np.zeros(img_gray.shape)
# Add the thick boundary lines to the image using 'AND' operator
dst = cv2.bitwise_and(img_output, img_output, mask=mask)
return dst
if __name__=='__main__':
cap = cv2.VideoCapture(0)
cur_char = -1
prev_char = -1
while True:
ret, frame = cap.read()
frame = cv2.resize(frame, None, fx=0.5, fy=0.5,interpolation=cv2.INTER_AREA)
c = cv2.waitKey(1)
if c == 27:break
if c > -1 and c != prev_char:
cur_char = c
prev_char = c
if cur_char == ord('s'):
cv2.imshow('Cartoonize', cartoonize_image(frame,sketch_mode=True))
elif cur_char == ord('c'):
cv2.imshow('Cartoonize', cartoonize_image(frame,sketch_mode=False))
else:
cv2.imshow('Cartoonize', frame)
cap.release()
cv2.destroyAllWindows()
| [
"noreply@github.com"
] | noreply@github.com |
58bf54ea5b49b65fb07e0ed13be7583dec589531 | 8575196670d2bb1b5fb6ef41ca165614165441c5 | /app/user/tests/test_user_api.py | 7b0cb57a41198dcfae30e2cd5b5caf048ea93114 | [
"MIT"
] | permissive | pandey-mohan/recipe-app-api | f6a9dce807cfb0161fb62a9977139a1cba268a77 | 154a4289bbeeace4b84e863c5adcea57e66dbab9 | refs/heads/master | 2023-08-22T06:08:43.401723 | 2020-07-12T15:28:52 | 2020-07-12T15:28:52 | 272,213,292 | 0 | 0 | MIT | 2021-09-22T19:14:38 | 2020-06-14T14:00:38 | Python | UTF-8 | Python | false | false | 5,012 | py | from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework.test import APIClient
from rest_framework import status
CREATE_USER_URL = reverse('user:create')
TOKEN_URL = reverse('user:token')
ME_URL = reverse('user:me')
def create_user(**params):
return get_user_model().objects.create_user(**params)
class PublicUserApiTests(TestCase):
"""Test the users API (public)"""
def setUp(self):
self.client = APIClient()
def test_create_valid_user_success(self):
"""Test creating user with valid payload is success"""
payload = {
'email': 'test@gmail.com',
'password': 'testpass',
'name': 'test user'
}
res = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
user = get_user_model().objects.get(**res.data)
self.assertTrue(user.check_password(payload['password']))
self.assertNotIn('password', res.data)
def test_user_exists(self):
"""Test creating user that already exists fails"""
payload = {
'email': 'test@gmail.com',
'password': 'testpass',
'name': 'test'
}
create_user(**payload)
res = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_too_short(self):
"""Test password must be more than 5 chars"""
payload = {
'email': 'test@gmail.com',
'password': 'pw',
'name': 'test'
}
res = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
user_exists = get_user_model().objects.filter(
email=payload['email']
).exists()
self.assertFalse(user_exists)
def test_create_token_for_user(self):
"""Test a token is created for a user"""
payload = {
'email': 'test@gmail.com',
'password': 'testpass'
}
create_user(**payload)
res = self.client.post(TOKEN_URL, payload)
self.assertIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_200_OK)
def test_create_token_invalid_creds(self):
"""test that token is not created if creds are invalid"""
create_user(email='test@gmail.com', password='testpass')
payload = {
'email': 'test@gmail.com',
'password': 'wrong'
}
res = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_no_user(self):
"""Test that token is not generated if user doen't exists"""
payload = {
'email': 'test@gmail.com',
'password': 'testpass'
}
res = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_missing_field(self):
"""Test that email and password is required"""
res = self.client.post(TOKEN_URL,
{'email': 'test@gmail.com', 'password': ''})
self.assertNotIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_user_unauthorised(self):
"""Test that authentication is required for user"""
res = self.client.get(ME_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateUserApiTests(TestCase):
"""Test API that requires authentication"""
def setUp(self):
self.user = create_user(
email='test@gmail.com',
password='testpass',
name='Test user'
)
self.client = APIClient()
self.client.force_authenticate(user=self.user)
def test_reterive_profile_success(self):
"""Test reteriving profile for logged in user"""
res = self.client.get(ME_URL)
self.assertEqual(res.data, {
'name': self.user.name,
'email': self.user.email
})
def test_post_me_not_allowed(self):
"""Test that POST is not allowed on the me url"""
res = self.client.post(ME_URL, {})
self.assertEqual(res.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_update_user_profile(self):
"""Test updating the user profile for authenticated user"""
payload = {
'name': 'New name',
'password': 'newpassword'
}
res = self.client.patch(ME_URL, payload)
self.user.refresh_from_db()
self.assertEqual(self.user.name, payload['name'])
self.assertTrue(self.user.check_password(payload['password']))
self.assertEqual(res.status_code, status.HTTP_200_OK)
| [
"mohan.pandey@telekom-digital.com"
] | mohan.pandey@telekom-digital.com |
58a033ee28badd70d250749748d034a404e2e97b | f1f2c5333585c650f8a5e08b3bac16e5449b1948 | /puzzle/puzzle.py | b8f912847fd8628853d9a3fc072aabf2e97d8a0c | [] | no_license | AMIRmh/AI_search | 7f3e8c6eb1c36807ac106e2195778ffd556eaf33 | 0eee90352fbe347f309ad44616aa7ead21d1da19 | refs/heads/master | 2021-08-20T05:24:09.203453 | 2017-11-28T07:49:06 | 2017-11-28T07:49:06 | 112,302,452 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,636 | py | class state:
def __init__(self):
self.path = []
self.val = [[] for i in range(3)]
self.heuristic = 0
self.paid = 0
def full_paid(self):
return self.heuristic + self.paid
def __lt__(self, other):
return self.full_paid() < other.full_paid()
def __eq__(self, other):
return self.val == other.val
def initialize():
arr = [[] for i in range(3)]
with open("puzzle/input_puzzle", "r") as f:
i = 0
for line in f:
line = line.replace(' ', '').replace('\n', '')
arr[i].extend(line)
i = i + 1
init_state = state()
init_state.val = arr
init_state.heuristic = heuristic(init_state)
return init_state
def actions(state):
actions = []
for i in range(3):
for j in range(3):
if state.val[i][j] == '0':
if j == 0:
actions.append('R')
elif j == 2:
actions.append('L')
else:
actions.extend('LR')
if i == 0:
actions.append('D')
elif i == 2:
actions.append('U')
else:
actions.extend('DU')
return actions
def goal(state):
goal = [['0','1','2'],['3','4','5'],['6','7','8']]
if state.val == goal:
return True
return False
def result(state, action):
for i in range(3):
for j in range(3):
if state.val[i][j] == '0':
if action == 'U':
state.val[i-1][j], state.val[i][j] = state.val[i][j], state.val[i-1][j]
elif action == 'R':
state.val[i][j+1], state.val[i][j] = state.val[i][j], state.val[i][j+1]
elif action == 'D':
state.val[i+1][j], state.val[i][j] = state.val[i][j], state.val[i+1][j]
elif action == 'L':
state.val[i][j-1], state.val[i][j] = state.val[i][j], state.val[i][j-1]
return state
def cost_step(state, action):
return 1
def heuristic(state):
goal = {'0':(0,0),
'1':(0,1),
'2':(0,2),
'3':(1,0),
'4':(1,1),
'5':(1,2),
'6':(2,0),
'7':(2,1),
'8':(2,2)
}
cost = 0
for i in range(3):
for j in range(3):
if state.val[i][j] != '0':
cost = cost + abs(goal.get(state.val[i][j])[0] - i) + abs(goal.get(state.val[i][j])[1] - j)
return cost
| [
"amir.haghollahi@gmail.com"
] | amir.haghollahi@gmail.com |
07149dd3258d1a4bcf5c06dd29bf4920a2913ff3 | 62fba3c7fe99d29985f12d2915853c4269f5d2fc | /test/functional/feature_nulldummy.py | 8c6f5d9e4d7eea4da36d75a1008353a7fd11920f | [
"MIT"
] | permissive | cnccdev/wes_coind | fe88222d5f5ac9f0b91b54d2b97e34700175c912 | a15d64caa24dec050f997fe2031d518ee1d76836 | refs/heads/master | 2020-05-22T23:49:06.733413 | 2019-05-22T01:53:20 | 2019-05-22T01:53:20 | 186,550,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,442 | py | #!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test NULLDUMMY softfork.
Connect to a single node.
Generate 2 blocks (save the coinbases for later).
Generate 427 more blocks.
[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block.
[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block.
[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block.
"""
from test_framework.blocktools import create_coinbase, create_block, create_transaction, add_witness_commitment
from test_framework.messages import CTransaction
from test_framework.script import CScript
from test_framework.test_framework import WescoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str
import time
NULLDUMMY_ERROR = "non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero) (code 64)"
def trueDummy(tx):
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
assert(len(i) == 0)
newscript.append(b'\x51')
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
tx.rehash()
class NULLDUMMYTest(WescoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
# This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through
# normal segwit activation here (and don't use the default always-on behaviour).
self.extra_args = [['-whitelist=127.0.0.1', '-vbparams=segwit:0:999999999999', '-addresstype=legacy', "-deprecatedrpc=addwitnessaddress"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.address = self.nodes[0].getnewaddress()
self.ms_address = self.nodes[0].addmultisigaddress(1, [self.address])['address']
self.wit_address = self.nodes[0].addwitnessaddress(self.address)
self.wit_ms_address = self.nodes[0].addmultisigaddress(1, [self.address], '', 'p2sh-segwit')['address']
self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.nodes[0].generate(427) # Block 429
self.lastblockhash = self.nodes[0].getbestblockhash()
self.tip = int("0x" + self.lastblockhash, 0)
self.lastblockheight = 429
self.lastblocktime = int(time.time()) + 429
self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
test1txs = [create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, amount=49)]
txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
test1txs.append(create_transaction(self.nodes[0], txid1, self.ms_address, amount=48))
txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
test1txs.append(create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, amount=49))
txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
self.block_submit(self.nodes[0], test1txs, False, True)
self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = create_transaction(self.nodes[0], txid2, self.ms_address, amount=47)
trueDummy(test2tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
self.block_submit(self.nodes[0], [test2tx], False, True)
self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
test4tx = create_transaction(self.nodes[0], test2tx.hash, self.address, amount=46)
test6txs=[CTransaction(test4tx)]
trueDummy(test4tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test4tx])
self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = create_transaction(self.nodes[0], txid3, self.wit_address, amount=48)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test5tx], True)
self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
for i in test6txs:
self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True)
self.block_submit(self.nodes[0], test6txs, True, True)
def block_submit(self, node, txs, witness = False, accept = False):
block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
block.nVersion = 4
for tx in txs:
tx.rehash()
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
witness and add_witness_commitment(block)
block.rehash()
block.solve()
node.submitblock(bytes_to_hex_str(block.serialize(True)))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert_equal(node.getbestblockhash(), self.lastblockhash)
if __name__ == '__main__':
NULLDUMMYTest().main()
| [
"cncc.dev@gmail.com"
] | cncc.dev@gmail.com |
263884e8abf41927260ccc9371f5b38ee469439b | cf8b3b4c1cf1325c757424a8ea54c6ee53519648 | /Python/3、Tree/pre_in_Tree.py | 68b92268a066f41f25fcadb58f6d64a813ddc423 | [] | no_license | YeQiang1075736553/DataStructureAndAlgorithm | df7dea036dcaed6ef1ea94e7402b3f5a17cc2d8e | 1d6651e49b56980c7ef44e2c679c81038db5a3f8 | refs/heads/master | 2020-03-26T23:43:22.890795 | 2018-08-21T23:18:44 | 2018-08-21T23:18:44 | 145,562,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,130 | py | # -*- coding:UTF-8 -*-
'''
前序序列和中序序列构造二叉树
思路:
参见
编程环境:
Python3.5.2
作者:
CSDN博客:https://my.csdn.net/yeqiang19910412
Github:https://github.com/YeQiang1075736553
日期:
2018.8.13
'''
class Node():
"""节点类"""
def __init__(self,data=None,lchild=None,rchild=None):
self.data = data # 表示数据域
self.lchild = lchild # 表示左子树
self.rchild = rchild # 表示右子树
class BinaryTree():
def __init__(self):
pass
def pre_in_construct_tree(self,pre_order, mid_order):
"""根据前序序列和中序序列构造二叉树"""
if len(pre_order)==0 or len(mid_order)==0:
return None
# 前序遍历的第一个结点一定是根结点
root_data = pre_order[0]
root = Node(root_data)
# for i in range(0,len(mid_order)):
# if root_data == mid_order[i]:
# break
i = mid_order.index(root_data) #上面for循环替代
# 递归构造左子树和右子树
root.lchild = self.pre_in_construct_tree(pre_order[1:1+i],mid_order[:i])
root.rchild = self.pre_in_construct_tree(pre_order[1+i:],mid_order[i+1:])
return root
def pre_order_traverse(self,root):
"""递归实现前序遍历"""
if root == None:
return
print(root.data,end=" ")
self.pre_order_traverse(root.lchild)
self.pre_order_traverse(root.rchild)
def in_order_traverse(self,root):
"""递归实现后序遍历"""
if root == None:
return
self.in_order_traverse(root.lchild)
print(root.data,end=" ")
self.in_order_traverse(root.rchild)
def post_order_traverse(self,root):
"""递归实现后序遍历"""
if root == None:
return
self.post_order_traverse(root.lchild)
self.post_order_traverse(root.rchild)
print(root.data,end=" ")
def level_order_traverse(self, root):
"""队列实现层序遍历"""
if root == None:
return
queue = []
queue.append(root)
while queue:
node = queue.pop(0)
print(node.data, end=" ")
if node.lchild != None:
queue.append(node.lchild)
if node.rchild != None:
queue.append(node.rchild)
if __name__ == '__main__':
str_pre = "abdgcefh"
str_mid = "dgbaechf"
pre_order = list(str_pre)
mid_order = list(str_mid)
binary_tree = BinaryTree()
root = binary_tree.pre_in_construct_tree(pre_order, mid_order)
print("递归实现前序遍历")
binary_tree.pre_order_traverse(root) # 递归实现前序遍历
print("\n")
print("递归实现中序遍历")
binary_tree.in_order_traverse(root) # 递归实现中序遍历
print("\n")
print("递归实现后序遍历")
binary_tree.post_order_traverse(root) # 递归实现后序遍历
print("\n")
print("队列实现层序遍历") # 队列实现层序遍历
binary_tree.level_order_traverse(root)
| [
"1075736553@qq.com"
] | 1075736553@qq.com |
a123b05a0d90ab5de3dad6be2814c36888231339 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/8/uzi.py | aa6e8089d0fcbff6ad8c6136fb49730dbf1ce425 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'uZI':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
3aa830318348cdd53f43460df111b7d29145f640 | 283eb1c6c7ddbb19a0f99715ac8f4c29930de3da | /setup.py | ac5c9c4928bf978cf89b6852cf1cc9667a4b1454 | [
"MIT"
] | permissive | seekingalpha/aws-roleshell | b15db53f5e0e45098c7ea11ba294f14ff6e28bff | d3c8ecbecc9019767eec4c823a2f650b229d30b4 | refs/heads/master | 2020-04-05T23:31:53.396744 | 2016-09-06T09:33:13 | 2016-10-09T12:08:33 | 52,449,900 | 1 | 0 | null | 2016-02-24T14:49:21 | 2016-02-24T14:49:20 | null | UTF-8 | Python | false | false | 201 | py | from setuptools import setup
setup(name='aws-roleshell',
version='0.0.1',
author="Josh Snyder",
py_modules=['aws_roleshell'],
author_email="hashbrowncipher@gmail.com",
)
| [
"josh@code406.com"
] | josh@code406.com |
bfca5740597c9e3e15c4023c95ef63b75418ae47 | 43950752bd0559ca04676c164e08cf398d0f28a6 | /passengers/views.py | f7eb5bf0b1645e8489ef3415d38b7f5ca907ffdd | [] | no_license | Asceryx/Covoiturage | 68ce7dbf10a9eed829cd81e0c9331c8d1a7cb35c | 770503f5a38ab38a579fd84ef919703a2820056a | refs/heads/master | 2020-12-10T00:17:19.199660 | 2020-04-13T14:43:53 | 2020-04-13T14:43:53 | 233,455,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
@login_required()
def passenger(request):
return HttpResponse("Main interface for passenger")
| [
"jhayrault@gmail.com"
] | jhayrault@gmail.com |
d1b54684da8c3cac3eb4b8f014adcc67f8fb93cd | 7025e8602cd7fa4632339b9716c2d7a67d03d906 | /nextpage.py | 78fc2bb0773debb065475dedfcb012564abaf61c | [] | no_license | Likho1969/tkinter-login | b2688de16b1dde4227b1bd19f4f130ee379b8f89 | 59b24a98f8276be078ba3d994832ac899391c7cb | refs/heads/main | 2023-04-30T23:31:52.514144 | 2021-05-24T14:59:28 | 2021-05-24T14:59:28 | 370,390,331 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,563 | py |
from tkinter import * # importing everything in the tkinter module
from tkinter import messagebox # import the messagebox from the tkinter module
root = Tk() # create window
root.title("NextPage") # name the window
root.geometry("600x600") # setting the window size
root.config(bg="dark slate gray") # changing the window background-color
# Python program for implementation of Insertion Sort
# Function to do insertion sort
def insertionSort(arr):
# Loop from the second element of the array until
# the last element
# Traverse through 1 to len(arr)
for i in range(1, len(arr)): # sets up the loop that determines the key_item that the function will position during each iteration
# This is the element we want to position in its
# correct place
key = arr[i] # initializes key_item with the item that the function is trying to place
# Move elements of arr[0..i-1], that are
# greater than key, to one position ahead
# of their current position
# or
# Initialize the variable that will be used to
# find the correct position of the element referenced
# by `key_item`
j = i-1 # initializes a variable that will consecutively point to each element to the left of key item
# Run through the list of items (the left
# portion of the array) and find the correct position
# of the element referenced by `key_item`. Do this only
# if `key_item` is smaller than its adjacent values.
while j >= 0 and key < arr[j]: # compares key_item with each value to its left using a while loop, shifting the elements to make room to place key_item
# Shift the value one position to the left
# and reposition j to point to the next element
# (from right to left)
arr[j + 1] = arr[j]
j -= 1
# When you finish shifting the elements, you can position
# `key_item` in its correct location
arr[j + 1] = key # positions key_item in its correct place after the algorithm shifts all the larger values to the right
# Driver code to test above
arr = [42, 12, 13, 89, 63, 11]
insertionSort(arr)
sorted_list = Label(root, text="11, 12, 13, 42, 63, 89")
sorted_list.place(x=200, y=100)
def exit():
root.destroy()
exit_btn = Button(root, text="Exit Program", borderwidth="10", bg="Aqua", font=("Consolas 15 bold"), command=exit)
exit_btn.place(x=200, y=300)
# starting the app
root.mainloop()
| [
"likhokapesi04@gmail.com"
] | likhokapesi04@gmail.com |
15261973dd39d79408de00f20eb9f216067056de | e56214188faae8ebfb36a463e34fc8324935b3c2 | /test/test_hyperflex_st_platform_cluster_resiliency_info_ref.py | b4d5a1014524481f5a6bfa8835557ef078252d03 | [
"Apache-2.0"
] | permissive | CiscoUcs/intersight-python | 866d6c63e0cb8c33440771efd93541d679bb1ecc | a92fccb1c8df4332ba1f05a0e784efbb4f2efdc4 | refs/heads/master | 2021-11-07T12:54:41.888973 | 2021-10-25T16:15:50 | 2021-10-25T16:15:50 | 115,440,875 | 25 | 18 | Apache-2.0 | 2020-03-02T16:19:49 | 2017-12-26T17:14:03 | Python | UTF-8 | Python | false | false | 2,123 | py | # coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.hyperflex_st_platform_cluster_resiliency_info_ref import HyperflexStPlatformClusterResiliencyInfoRef # noqa: E501
from intersight.rest import ApiException
class TestHyperflexStPlatformClusterResiliencyInfoRef(unittest.TestCase):
"""HyperflexStPlatformClusterResiliencyInfoRef unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testHyperflexStPlatformClusterResiliencyInfoRef(self):
"""Test HyperflexStPlatformClusterResiliencyInfoRef"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.hyperflex_st_platform_cluster_resiliency_info_ref.HyperflexStPlatformClusterResiliencyInfoRef() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"ucs-build@github.com"
] | ucs-build@github.com |
6592fd4e7614f3aa792b9305227977f7af952754 | 6994917b9d22e9e15e578a0e5c75dcf4ce3cb022 | /perfil/migrations/0025_auto_20200724_2157.py | e5dd79e636927224e5bd7b39a7907b3d99b39094 | [] | no_license | linikerunk/rh-ticket | 59ad6411a3d08c90c2704b37ba9bba67ea7f7754 | bd8edd3eb1ea6cfe04fee03a4f41049a84c1e14a | refs/heads/master | 2023-01-06T21:25:06.851369 | 2020-10-29T20:32:53 | 2020-10-29T20:32:53 | 250,346,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 543 | py | # Generated by Django 2.2.9 on 2020-07-25 00:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('perfil', '0024_auto_20200724_2049'),
]
operations = [
migrations.AlterField(
model_name='funcionario',
name='centro_de_custo_link',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='perfil.CentroDeCusto', verbose_name='Centro de Custo link'),
),
]
| [
"linikerenem@gmail.com"
] | linikerenem@gmail.com |
b2e93044857996da4f7864f49e1ad69a6546cb0b | 7c15f211adc9e9eb9f66ccdd570c9f38dff7ea8d | /packages/autorest.python/test/vanilla/version-tolerant/Expected/AcceptanceTests/HttpVersionTolerant/httpinfrastructureversiontolerant/aio/__init__.py | 154a90083e55b0b0e6ba806098e8782f6d24a683 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/autorest.python | cc4bfbf91ae11535731cad37cedd6b733edf1ebd | a00d7aaa3753ef05cb5a0d38c664a90869478d44 | refs/heads/main | 2023-09-03T06:58:44.246200 | 2023-08-31T20:11:51 | 2023-08-31T20:11:51 | 100,315,955 | 47 | 40 | MIT | 2023-09-14T21:00:21 | 2017-08-14T22:58:33 | Python | UTF-8 | Python | false | false | 865 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._client import AutoRestHttpInfrastructureTestService
try:
from ._patch import __all__ as _patch_all
from ._patch import * # pylint: disable=unused-wildcard-import
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AutoRestHttpInfrastructureTestService",
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
| [
"noreply@github.com"
] | noreply@github.com |
890b7cd1fdb1476cebafe5b3f330f5fda0a0a879 | 35498bf65ff403e05c6ffcd06c0fd980b81cc921 | /web_app/app.py | 2ed35dfc0613e71b2a162dfd0384d429edb8fc4d | [] | no_license | teguhteja/flask01 | 6cd297a054071e2dd7c8f9ece18d0a874fe20e3d | 6b589001cf15217ccf9e48958a5f5924aac14a46 | refs/heads/master | 2020-12-15T19:02:00.135111 | 2017-12-15T06:42:38 | 2017-12-15T06:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,154 | py | from flask import Flask, render_template
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
from models import db, Page, Menu
from views import PageModelView
def create_app():
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db.init_app(app)
admin = Admin(app, name='Flask01', template_mode='bootstrap3')
admin.add_view(PageModelView(Page, db.session))
admin.add_view(ModelView(Menu, db.session))
@app.route('/')
@app.route('/<url>')
def index(url=None):
print('here', url)
if url is not None:
# contoh /about
page = Page.query.filter_by(url=url).first()
else:
# contoh /
page = Page.query.filter_by(is_homepage=True).first()
if page is None:
# TODO cute 404
return 'Page not found for {} or homepage not set'.format(url)
contents = 'empty'
if page is not None:
contents = page.contents
menu = Menu.query.order_by('order')
return render_template('index.html', TITLE='Flask-01', CONTENT=contents, menu=menu)
return app | [
"eko@monetizemore.com"
] | eko@monetizemore.com |
f728396dde2da2e70035d43cde8fbfc230b9f0e2 | 1c5d8e6d1fa23ee899862d1f8d5bae58897902bc | /docs/conf.py | e399d1d2f9a27c4e8a70e5d99bf696e317b344b7 | [
"MIT"
] | permissive | mbe9a/EndpointPlotter | 9aa4ffcd8eb64a8864caa97562c5183a8b258a56 | 037cf1b71c75b659b0fde48fca0c4b0eb8395fa7 | refs/heads/master | 2020-12-08T04:42:05.070206 | 2020-08-21T16:02:08 | 2020-08-21T16:02:08 | 232,888,268 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,584 | py | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'Oxford Endpoint Plotter'
copyright = '2020, Michael Eller'
author = 'Michael Eller'
# The full version, including alpha/beta/rc tags
release = '0.0.0'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
# 'autoapi.extension',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.extlinks',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
# 'sphinx.ext.linkcode'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
autodoc_default_options = {
'special-members': '__init__',
'member-order': 'bysource'
}
# autoapi_dirs = ['../']
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "../../../%s.py" % filename
| [
"mbe9a@virginia.edu"
] | mbe9a@virginia.edu |
457b3c5c3ef7b6c87f2a2cb3e30e182a396b8713 | 7eadfc1711278719d4f02cb506f1d1df88cc66af | /model/supervised/cnn.py | 4110058df864f680190dd1ee7dbbb410a1114a2a | [] | no_license | fagan2888/Trading_by_Imitation_Learning | 33a88060e45e38d83b9b0972072cc1bcddcf0bdc | 0f6820609c64dd0a1e697ec2ac4566b60478025d | refs/heads/master | 2020-11-30T23:06:21.280800 | 2019-05-16T06:27:01 | 2019-05-16T06:27:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,477 | py | """
Train a supervised CNN model using optimal stock as label
"""
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D
from keras.models import load_model
from keras.optimizers import Adam
from ..base_model import BaseModel
from utils.data import normalize
import numpy as np
import tensorflow as tf
class StockCNN(BaseModel):
def __init__(self, nb_classes, window_length, weights_file='weights/cnn.h5'):
self.model = None
self.weights_file = weights_file
self.nb_classes = nb_classes
self.window_length = window_length
def build_model(self, load_weights=True):
""" Load training history from path
Args:
load_weights (Bool): True to resume training from file or just deploying.
Otherwise, training from scratch.
Returns:
"""
if load_weights:
self.model = load_model(self.weights_file)
print('Successfully loaded model')
else:
self.model = Sequential()
self.model.add(
Conv2D(filters=32, kernel_size=(1, 3), input_shape=(self.nb_classes, self.window_length, 1),
activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Conv2D(filters=32, kernel_size=(1, self.window_length - 2), activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Flatten())
self.model.add(Dense(64, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(64, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(self.nb_classes, activation='softmax'))
self.model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=1e-3),
metrics=['accuracy'])
print('Built model from scratch')
self.model._make_predict_function()
self.graph = tf.get_default_graph()
def train(self, X_train, Y_train, X_val, Y_val, verbose=True):
continue_train = True
while continue_train:
self.model.fit(X_train, Y_train, batch_size=128, epochs=10, validation_data=(X_val, Y_val),
shuffle=True, verbose=verbose)
save_weights = input('Type True to save weights\n')
if save_weights:
self.model.save(self.weights_file)
continue_train = input("True to continue train, otherwise stop training...\n")
print('Finish.')
def evaluate(self, X_test, Y_test, verbose=False):
return self.model.evaluate(X_test, Y_test, verbose=verbose)
def predict(self, X_test, verbose=False):
return self.model.predict(X_test, verbose=verbose)
def predict_single(self, observation):
""" Predict the action of a single observation
Args:
observation: (num_stocks + 1, window_length)
Returns: a single action array with shape (num_stocks + 1,)
"""
obsX = observation[:, -self.window_length:, 3:4] / observation[:, -self.window_length:, 0:1]
obsX = normalize(obsX)
obsX = np.expand_dims(obsX, axis=0)
with self.graph.as_default():
return np.squeeze(self.model.predict(obsX), axis=0)
| [
"noreply@github.com"
] | noreply@github.com |
a467cc78728963d989a66e2ae338212b606e652f | 037d5d18b9b81205305e158d7d9fdad131d318cb | /tests/test_custom_version_base_class.py | cc5e981dbda96d06ba8914b794c8350a37b1e0a2 | [] | permissive | kvesteri/sqlalchemy-continuum | ee7acf2c961b27eab3dd8f61598d9159d801ee21 | a7a6bd7952185b1f82af985c0271834d886a617c | refs/heads/master | 2023-08-24T09:14:33.515416 | 2022-11-17T05:41:09 | 2023-07-24T23:37:12 | 10,312,759 | 479 | 134 | BSD-3-Clause | 2023-09-12T20:07:04 | 2013-05-27T10:30:55 | Python | UTF-8 | Python | false | false | 1,114 | py | import sqlalchemy as sa
from sqlalchemy_continuum import version_class
from tests import TestCase
class TestCommonBaseClass(TestCase):
def create_models(self):
class TextItem(self.Model):
__tablename__ = 'text_item'
__versioned__ = {}
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
class ArticleVersionBase(self.Model):
__abstract__ = True
class Article(self.Model):
__tablename__ = 'article'
__versioned__ = {
'base_classes': (ArticleVersionBase, )
}
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
self.TextItem = TextItem
self.Article = Article
self.ArticleVersionBase = ArticleVersionBase
def test_each_class_has_distinct_translation_class(self):
class_ = version_class(self.TextItem)
assert class_.__name__ == 'TextItemVersion'
class_ = version_class(self.Article)
assert class_.__name__ == 'ArticleVersion'
assert issubclass(class_, self.ArticleVersionBase)
| [
"konsta.vesterinen@gmail.com"
] | konsta.vesterinen@gmail.com |
e1fee3842a2ba41bf122a82af2236ea8f8fad717 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/v_sniper/sniper-master/cron.py | 39485faf71647c6676d5605dd32b483c4befdc2b | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 3,777 | py | #!/usr/bin/env python
""" This represents the cronjob that runs to check for course openings"""
from flaskext.mail import Message
import urllib
from models import db, Snipe
from soc import Soc
from app import mail, app
import datetime
from collections import namedtuple
soc = Soc()
EMAIL_SENDER = "Course Sniper <sniper@rutgers.io>"
Section = namedtuple('Section', ['number', 'index'])
def poll(subject, result=False):
""" Poll a subject for open courses. """
app.logger.warning("Polling for %s" % (subject))
# get all the course data from SOC
courses = soc.get_courses(subject)
# build information about which courses/sections are currently open.
open_data = {}
if courses is not None:
for course in courses:
course_number = course['courseNumber']
# remove leading zeroes
if course_number.isdigit():
course_number = str(int(course_number))
open_data[course_number] = []
for section in course['sections']:
section_number = section['number']
if section_number.isdigit():
section_number = str(int(section_number))
# section is open
if section['openStatus']:
open_data[course_number].append(Section(section_number, section['index']))
# all of these course numbers are open
open_courses = [course for course, open_sections in open_data.iteritems() if open_sections]
if result:
return open_data
if open_courses:
# Notify people that were looking for these courses
snipes = Snipe.query.filter(Snipe.course_number.in_(open_courses), Snipe.subject==str(subject))
for snipe in snipes:
for section in open_data[snipe.course_number]:
if section.number == snipe.section:
notify(snipe, section.index)
else:
app.logger.warning('Subject "%s" has no open courses' % (subject))
else:
app.logger.warning('Subject "%s" is not valid' % (subject))
def notify(snipe, index):
""" Notify this snipe that their course is open"""
course = '%s:%s:%s' % (snipe.subject, snipe.course_number, snipe.section)
if snipe.user.email:
attributes = {
'email': snipe.user.email,
'subject': snipe.subject,
'course_number': snipe.course_number,
'section': snipe.section,
}
# build the url for prepopulated form
url = 'http://sniper.rutgers.io/?%s' % (urllib.urlencode(attributes))
register_url = 'https://sims.rutgers.edu/webreg/editSchedule.htm?login=cas&semesterSelection=12017&indexList=%s' % (index)
email_text = 'A course (%s) that you were watching looks open. Its index number is %s. Click the link below to register for it!\n\n %s \n\n If you don\'t get in, visit this URL: \n\n %s \n\n to continue watching it.\n\n Send any feedback to sniper@rutgers.io' % (course, index, register_url, url)
# send out the email
message = Message('[Course Sniper](%s) is open' %(course), sender=EMAIL_SENDER)
message.body = email_text
message.add_recipient(snipe.user.email)
message.add_recipient(snipe.user.email)
mail.send(message)
db.session.delete(snipe)
db.session.commit()
app.logger.warning('Notified user: %s about snipe %s' % (snipe.user, snipe))
if __name__ == '__main__':
# get all the courses that should be queried.
app.logger.warning("----------- Running the Cron %s " % (str(datetime.datetime.now())))
subjects = db.session.query(Snipe.subject).distinct().all()
for subject in subjects:
poll(subject[0])
| [
"659338505@qq.com"
] | 659338505@qq.com |
e4f5036c71ab319072332f4c65c6f40154c2c1c7 | 54d408d9be3a99f2d2f02c48fd092e96eb267e6f | /src/Libraries.py | 62ceb49d25e199b77158fa871e05513295e22d23 | [] | no_license | liipeandre/pacman | b2a73eb1c25076c9c69f3b388059f4d8f9b8b907 | aba31e90195902abd56db5fc5b72f959398b56b1 | refs/heads/master | 2023-04-01T14:19:51.693538 | 2021-04-08T21:13:06 | 2021-04-08T21:13:06 | 151,121,246 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | # blbliotecas utilizadas
from pygame import *
from random import shuffle, choice
from os import system, environ
from typing import *
from threading import Thread
from numpy import * | [
"andrefelipepereira@hotmail.com"
] | andrefelipepereira@hotmail.com |
1794efdeb3c983dd7ab79c42f207bfedf837745d | adb1cab5223ecfe8643f40b846c270b072ff9912 | /tests/components/isy994/test_config_flow.py | bf08e6526ba10874ec7c574d39df66d53a54b675 | [
"Apache-2.0"
] | permissive | Chris-V/home-assistant | ec7e139595d0412323acad267800673d0b1545a8 | 7224012016b8f0965081edd546fa200902432f71 | refs/heads/dev | 2023-03-11T01:43:00.458758 | 2021-05-13T09:15:02 | 2021-05-13T09:15:02 | 72,964,787 | 0 | 0 | Apache-2.0 | 2023-02-22T06:15:14 | 2016-11-06T02:08:03 | Python | UTF-8 | Python | false | false | 13,770 | py | """Test the Universal Devices ISY994 config flow."""
from unittest.mock import patch
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components import dhcp, ssdp
from homeassistant.components.isy994.config_flow import CannotConnect
from homeassistant.components.isy994.const import (
CONF_IGNORE_STRING,
CONF_RESTORE_LIGHT_STATE,
CONF_SENSOR_STRING,
CONF_TLS_VER,
CONF_VAR_SENSOR_STRING,
DOMAIN,
ISY_URL_POSTFIX,
UDN_UUID_PREFIX,
)
from homeassistant.config_entries import SOURCE_DHCP, SOURCE_IMPORT, SOURCE_SSDP
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
MOCK_HOSTNAME = "1.1.1.1"
MOCK_USERNAME = "test-username"
MOCK_PASSWORD = "test-password"
# Don't use the integration defaults here to make sure they're being set correctly.
MOCK_TLS_VERSION = 1.2
MOCK_IGNORE_STRING = "{IGNOREME}"
MOCK_RESTORE_LIGHT_STATE = True
MOCK_SENSOR_STRING = "IMASENSOR"
MOCK_VARIABLE_SENSOR_STRING = "HomeAssistant."
MOCK_USER_INPUT = {
CONF_HOST: f"http://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
CONF_TLS_VER: MOCK_TLS_VERSION,
}
MOCK_IMPORT_WITH_SSL = {
CONF_HOST: f"https://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
CONF_TLS_VER: MOCK_TLS_VERSION,
}
MOCK_IMPORT_BASIC_CONFIG = {
CONF_HOST: f"http://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
}
MOCK_IMPORT_FULL_CONFIG = {
CONF_HOST: f"http://{MOCK_HOSTNAME}",
CONF_USERNAME: MOCK_USERNAME,
CONF_PASSWORD: MOCK_PASSWORD,
CONF_IGNORE_STRING: MOCK_IGNORE_STRING,
CONF_RESTORE_LIGHT_STATE: MOCK_RESTORE_LIGHT_STATE,
CONF_SENSOR_STRING: MOCK_SENSOR_STRING,
CONF_TLS_VER: MOCK_TLS_VERSION,
CONF_VAR_SENSOR_STRING: MOCK_VARIABLE_SENSOR_STRING,
}
MOCK_DEVICE_NAME = "Name of the device"
MOCK_UUID = "CE:FB:72:31:B7:B9"
MOCK_VALIDATED_RESPONSE = {"name": MOCK_DEVICE_NAME, "uuid": MOCK_UUID}
PATCH_CONFIGURATION = "homeassistant.components.isy994.config_flow.Configuration"
PATCH_CONNECTION = "homeassistant.components.isy994.config_flow.Connection"
PATCH_ASYNC_SETUP = "homeassistant.components.isy994.async_setup"
PATCH_ASYNC_SETUP_ENTRY = "homeassistant.components.isy994.async_setup_entry"
async def test_form(hass: HomeAssistant):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(
PATCH_ASYNC_SETUP, return_value=True
) as mock_setup, patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
) as mock_setup_entry:
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == f"{MOCK_DEVICE_NAME} ({MOCK_HOSTNAME})"
assert result2["result"].unique_id == MOCK_UUID
assert result2["data"] == MOCK_USER_INPUT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_host(hass: HomeAssistant):
"""Test we handle invalid host."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": MOCK_HOSTNAME, # Test with missing protocol (http://)
"username": MOCK_USERNAME,
"password": MOCK_PASSWORD,
"tls": MOCK_TLS_VERSION,
},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "invalid_host"}
async def test_form_invalid_auth(hass: HomeAssistant):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(PATCH_CONFIGURATION), patch(
PATCH_CONNECTION,
side_effect=ValueError("PyISY could not connect to the ISY."),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass: HomeAssistant):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(PATCH_CONFIGURATION), patch(
PATCH_CONNECTION,
side_effect=CannotConnect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_existing_config_entry(hass: HomeAssistant):
"""Test if config entry already exists."""
MockConfigEntry(domain=DOMAIN, unique_id=MOCK_UUID).add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class:
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_import_flow_some_fields(hass: HomeAssistant) -> None:
"""Test import config flow with just the basic fields."""
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(PATCH_ASYNC_SETUP, return_value=True), patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
):
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=MOCK_IMPORT_BASIC_CONFIG,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_HOST] == f"http://{MOCK_HOSTNAME}"
assert result["data"][CONF_USERNAME] == MOCK_USERNAME
assert result["data"][CONF_PASSWORD] == MOCK_PASSWORD
async def test_import_flow_with_https(hass: HomeAssistant) -> None:
"""Test import config with https."""
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(PATCH_ASYNC_SETUP, return_value=True), patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
):
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=MOCK_IMPORT_WITH_SSL,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_HOST] == f"https://{MOCK_HOSTNAME}"
assert result["data"][CONF_USERNAME] == MOCK_USERNAME
assert result["data"][CONF_PASSWORD] == MOCK_PASSWORD
async def test_import_flow_all_fields(hass: HomeAssistant) -> None:
"""Test import config flow with all fields."""
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(PATCH_ASYNC_SETUP, return_value=True), patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
):
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=MOCK_IMPORT_FULL_CONFIG,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_HOST] == f"http://{MOCK_HOSTNAME}"
assert result["data"][CONF_USERNAME] == MOCK_USERNAME
assert result["data"][CONF_PASSWORD] == MOCK_PASSWORD
assert result["data"][CONF_IGNORE_STRING] == MOCK_IGNORE_STRING
assert result["data"][CONF_RESTORE_LIGHT_STATE] == MOCK_RESTORE_LIGHT_STATE
assert result["data"][CONF_SENSOR_STRING] == MOCK_SENSOR_STRING
assert result["data"][CONF_VAR_SENSOR_STRING] == MOCK_VARIABLE_SENSOR_STRING
assert result["data"][CONF_TLS_VER] == MOCK_TLS_VERSION
async def test_form_ssdp_already_configured(hass: HomeAssistant) -> None:
"""Test ssdp abort when the serial number is already configured."""
await setup.async_setup_component(hass, "persistent_notification", {})
MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: f"http://{MOCK_HOSTNAME}{ISY_URL_POSTFIX}"},
unique_id=MOCK_UUID,
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_SSDP},
data={
ssdp.ATTR_SSDP_LOCATION: f"http://{MOCK_HOSTNAME}{ISY_URL_POSTFIX}",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "myisy",
ssdp.ATTR_UPNP_UDN: f"{UDN_UUID_PREFIX}{MOCK_UUID}",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_form_ssdp(hass: HomeAssistant):
"""Test we can setup from ssdp."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_SSDP},
data={
ssdp.ATTR_SSDP_LOCATION: f"http://{MOCK_HOSTNAME}{ISY_URL_POSTFIX}",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "myisy",
ssdp.ATTR_UPNP_UDN: f"{UDN_UUID_PREFIX}{MOCK_UUID}",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {}
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(
PATCH_ASYNC_SETUP, return_value=True
) as mock_setup, patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
) as mock_setup_entry:
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == f"{MOCK_DEVICE_NAME} ({MOCK_HOSTNAME})"
assert result2["result"].unique_id == MOCK_UUID
assert result2["data"] == MOCK_USER_INPUT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_dhcp(hass: HomeAssistant):
"""Test we can setup from dhcp."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_DHCP},
data={
dhcp.IP_ADDRESS: "1.2.3.4",
dhcp.HOSTNAME: "isy994-ems",
dhcp.MAC_ADDRESS: MOCK_UUID,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {}
with patch(PATCH_CONFIGURATION) as mock_config_class, patch(
PATCH_CONNECTION
) as mock_connection_class, patch(
PATCH_ASYNC_SETUP, return_value=True
) as mock_setup, patch(
PATCH_ASYNC_SETUP_ENTRY,
return_value=True,
) as mock_setup_entry:
isy_conn = mock_connection_class.return_value
isy_conn.get_config.return_value = None
mock_config_class.return_value = MOCK_VALIDATED_RESPONSE
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
MOCK_USER_INPUT,
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == f"{MOCK_DEVICE_NAME} ({MOCK_HOSTNAME})"
assert result2["result"].unique_id == MOCK_UUID
assert result2["data"] == MOCK_USER_INPUT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
| [
"noreply@github.com"
] | noreply@github.com |
db70f690f06a6f4c0580e1bdd538a5ad29836c6b | 8ea657e6fc53378038093cb5a3a914786fe98ad4 | /basicConcepts.py | beea7bff13d184f854653e63a284905e9c969d74 | [] | no_license | KianaAguilar/Concept_Python | 2319f91ed714fb8eb9c39c1bfa8d026c6308c12d | 8386e1fc092e38dfd5ef98cd088da8880508b00e | refs/heads/main | 2023-02-22T00:05:49.103672 | 2021-01-25T01:53:35 | 2021-01-25T01:53:35 | 332,602,854 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 312 | py |
# como hacer comentarios? con un numeral! #
print(3+2)
# operadores matematicos como +,-,/,*,resto, cociente
print((3+3)-5)
print((6*5)/2)
#operador exponenciales
print(2**5)
#operador de cociente
print(20//4)
#operador resto
print(20 % 6)
#Booleanos , variable de dos estados
isSwitchActivated = False;
| [
"58793146+KianaAguilar@users.noreply.github.com"
] | 58793146+KianaAguilar@users.noreply.github.com |
8e962c66a9d91dae37bddea35a9bff26c992c521 | 7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0 | /1101-1200/1110-Delete Nodes And Return Forest/1110-Delete Nodes And Return Forest.py | 3219a3ad56f0c557fe3ebbf9025c4afa1c801801 | [
"MIT"
] | permissive | jiadaizhao/LeetCode | be31bd0db50cc6835d9c9eff8e0175747098afc6 | 4ddea0a532fe7c5d053ffbd6870174ec99fc2d60 | refs/heads/master | 2021-11-05T04:38:47.252590 | 2021-10-31T09:54:53 | 2021-10-31T09:54:53 | 99,655,604 | 52 | 28 | MIT | 2020-10-02T12:47:47 | 2017-08-08T05:57:26 | C++ | UTF-8 | Python | false | false | 803 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def delNodes(self, root: TreeNode, to_delete: List[int]) -> List[TreeNode]:
td = set(to_delete)
result = []
def dfs(root, prevDel):
if root is None:
return None
if root.val in td:
dfs(root.left, True)
dfs(root.right, True)
return None
else:
if prevDel == True:
result.append(root)
root.left = dfs(root.left, False)
root.right = dfs(root.right, False)
return root
dfs(root, True)
return result
| [
"jiadaizhao@gmail.com"
] | jiadaizhao@gmail.com |
526bb90dae918179636a5a32825b05ab796d9e57 | f9629630eaa5367f6943aa35def2e4f3fd9aaeb2 | /server/home_network.py | 86e4ce0630ce15b04aae61ba921b4f3178cd68fb | [] | no_license | maxtkl/marioKart | 723b5f7a4c7dccda88fd687939963bc0d25fdd84 | 3e55c2ad2d61228676d97637edb506f7412f2046 | refs/heads/master | 2020-04-16T12:12:33.680895 | 2019-01-14T00:15:58 | 2019-01-14T00:15:58 | 165,568,911 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | from struct import pack
from struct import unpack
from struct import calcsize as sizeof
from struct import error
CONNECT_TO_CAR = 1
MOVE_CAR = 2
GET_MALUS = 3
SEND_MALUS = 4
FORWARD = 10
BACKWARD = 11
LEFT = 12
RIGHT = 13
def create_packet(op_code, data):
return pack("!I{}s".format(len(data)), op_code, data.encode())
def process_packet(packet):
try:
return unpack("!I{}s".format(len(packet)-sizeof("!I")), packet)
except error as e:
print("Packet Format Error : {}".format(packet))
return None, None
| [
"noreply@github.com"
] | noreply@github.com |
2555d67c9356f76316ca075fb7052e0bc3678ccc | 0c43ae8365998144ebc23156c12768711114e6f9 | /web_flask/4-number_route.py | 1aa7ec504db5b6beaf7e14f834086abdbf92165a | [] | no_license | Nesgomez9/AirBnB_clone_v2 | 74e343ade1c418b49c8ebaee79f6319f8e971ff6 | 055c4e92c819fd0e9dec369e687c1601f243f02c | refs/heads/master | 2021-05-19T12:50:35.656686 | 2020-04-23T03:33:41 | 2020-04-23T03:33:41 | 251,707,487 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | #!/usr/bin/python3
from flask import Flask
app = Flask(__name__)
@app.route("/", strict_slashes=False)
def hello_route():
return "Hello HBNB!"
@app.route("/hbnb", strict_slashes=False)
def hbnb_route():
return "HBNB"
@app.route("/c/<text>", strict_slashes=False)
def c_route(text):
text = text.replace("_", " ")
return "C {}".format(text)
@app.route("/python/<text>", strict_slashes=False)
@app.route("/python", strict_slashes=False)
def python_route(text="is cool"):
text = text.replace("_", " ")
return "Python {}".format(text)
@app.route("/number/<int:n>", strict_slashes=False)
def number_route(n):
return "{:d} is a number".format(n)
if __name__ == "__main__":
app.run(host="0.0.0.0", port="5000")
| [
"nicolico99@hotmail.com"
] | nicolico99@hotmail.com |
4db824ca06fcb183b3cdd4afb8c1407541610ba5 | d8c1f119d1349dd8ad2e48619a8c258967cd9a31 | /Baekjun/Dijkstra/10282. 해킹.py | 57994f6df0e2f7d52580d8b02723c1259ebd1a75 | [] | no_license | Seonghyeony/DataStructure-Algorithm | c7c006ee705b68fc4d2d04dc6baaf0aeb80fc83e | 4121289cafd0050bda408934fcb14d88052c956f | refs/heads/master | 2023-04-12T16:30:17.039109 | 2021-05-08T10:31:05 | 2021-05-08T10:31:05 | 286,371,604 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,060 | py | import heapq
def dijkstra(graph, n, start):
distances = [float('inf') for _ in range(n + 1)]
distances[start] = 0
queue = []
heapq.heappush(queue, [0, start])
while queue:
current_distance, current_node = heapq.heappop(queue)
if distances[current_node] < current_distance:
continue
for adjacent, adjacent_distance in graph[current_node]:
distance = current_distance + adjacent_distance
if distance < distances[adjacent]:
distances[adjacent] = distance
heapq.heappush(queue, [distance, adjacent])
count = 0
ret = float('-inf')
for i in range(1, n + 1):
if distances[i] != float('inf'):
count += 1
ret = max(ret, distances[i])
print(count, ret)
test_case = int(input())
for _ in range(test_case):
n, d, c = map(int, input().split())
adj = [[] for _ in range(n + 1)]
for _ in range(d):
a, b, s = map(int, input().split())
adj[b].append([a, s])
dijkstra(adj, n, c)
| [
"sunghyun7949@naver.com"
] | sunghyun7949@naver.com |
20dc5890b9b93d045ce94089c746abe19eb71702 | 8a526c3f6d3eedfd309a17d1fbf185385e779472 | /字典和集合/字典/带计数的字典.py | bb80bfbea08e99481799da3ef0a8e77e8778a5f9 | [] | no_license | link9c/FluentPython | a8eda93a27f3dd4e0cdb7c6ac76a2a3eceb03d5a | 059c9dfeff0115a2361a6ff4bc4ea24ab9113a5f | refs/heads/master | 2020-11-28T05:19:35.959694 | 2019-12-26T13:51:41 | 2019-12-26T13:51:41 | 229,714,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | import collections
ct = collections.Counter('abcdeeaa')
print(ct)
ct.update('aaa')
print(ct)
print(ct.most_common(2))
| [
"chenyulei@MicroPort.local"
] | chenyulei@MicroPort.local |
18376e8e4a3c1295d113d89e9a7f184fafa79207 | 48e3facc2aca286beae29d9f3fe8713c0a603064 | /insertSubcommentReaction.py | 083d2087bb05685f37391e3b7c0a7a1d6e2f8023 | [] | no_license | marie-elia/facebook-mongodb-database | 84011bece0a5b459d297ce88d5921376f019ad35 | fb804a10478e84144852c26a913def2a8e53b4be | refs/heads/master | 2020-07-28T13:39:47.188011 | 2019-09-19T00:24:29 | 2019-09-19T00:24:29 | 209,427,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 834 | py | import datetime
import pymongo
from pymongo import MongoClient
from bson.objectid import ObjectId
client = MongoClient()
client = MongoClient('localhost', 27017)
db = client.facebook.accounts
id = "firstpost"
number1 = {
"name" : "Marie Elia",
"pics.img_id": "firstimage"
}
number2 = {
"name" : "Marie Elia",
"posts.post_id": id
}
React1 = {
"reaction_id": "subreactimg",
"reaction": "Angry",
"reactor":"Marie Elia"
}
React2 = {
"reaction_id": "subreactpost",
"reaction": "Angry",
"reactor":"Marie Elia"
}
update1 = {
"$push": {"pics.0.comments.0.subcomments.0.reactions" : React1}
}
update2 = {
"$push": {"posts.0.comments.0.subcomments.0.reactions" : React2}
}
db.update_one(number1, update1)
db.update_one(number2, update2)
| [
"noreply@github.com"
] | noreply@github.com |
ae0940b35e81f01bdb421943a6cebeb0d4228588 | 627c0492f21d5fc4a35469cf4297f53e5ef887ac | /webpersonal/webpersonal/settings.py | f955fed803bd05ff0fd117823e408d3d8160af1b | [] | no_license | rulomoscoso/django3Webs | 50a278a1ceb94b9f84ae80caf5d58add208b5049 | 04337c45e1cba20f7c81645ab0dbd00f3cc54b96 | refs/heads/main | 2023-02-20T23:43:05.261057 | 2021-01-20T14:59:50 | 2021-01-20T14:59:50 | 329,947,011 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,232 | py | """
Django settings for webpersonal project.
Generated by 'django-admin startproject' using Django 2.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2zvw_mguae)kq$0=*8vk82@=he6-kri%htp12k$%jh1$=ijlz('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core',
'portfolio.apps.PortfolioConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'webpersonal.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'webpersonal.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'es'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
# Media Files
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
| [
"raul.moscoso84@gmail.com"
] | raul.moscoso84@gmail.com |
645ad372e31431d01fda2367f9d0be99efc010dd | 4a65275fcf23a4b881aa730ece5495956fa596ed | /venv/Scripts/easy_install-3.8-script.py | 491617aed90a381b0c120e01f6785df40e3a4a71 | [] | no_license | Abhijit-ops/WebApp | 57f8cc5a0c48a590ac02e66da4438d2e00d1b1e1 | 0d9d9edaa0077a08dffbeacfa54e2380b269c025 | refs/heads/master | 2022-12-28T03:15:56.547527 | 2020-09-17T21:00:12 | 2020-09-17T21:00:12 | 296,429,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | #!C:\Users\Abhijit\PycharmProjects\WebApp\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.8'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.8')()
)
| [
"abhijitkalita1984@gmail.com"
] | abhijitkalita1984@gmail.com |
5e14ac1175f45b85ceb256c7a8522a73237bc1f4 | 2aa84f9553a0593593afff720f7dfd8c6df3adde | /tests/test_query_parser.py | 3c2d26f25e088261c212386f85d9a8ce25602370 | [] | no_license | Pavel-Guseynov/sqlalchemy-searchable | c492f37e10e0e9054914af1f20cf799a58b9e8aa | 6baa13193f2f2a39ba96b231ee7f88843bdd6fd0 | refs/heads/master | 2021-07-25T14:02:07.876195 | 2017-11-06T16:17:05 | 2017-11-06T16:17:05 | 109,388,493 | 0 | 0 | null | 2017-11-03T11:35:01 | 2017-11-03T11:35:01 | null | UTF-8 | Python | false | false | 1,746 | py | # -*- coding: utf-8 -*-
from pyparsing import ParseException
from pytest import raises
from sqlalchemy_searchable.parser import SearchQueryParser
class TestSearchQueryParser(object):
def setup_method(self, method):
self.parser = SearchQueryParser()
def test_unicode(self):
assert self.parser.parse(u'안녕가は') == u'안녕가は:*'
def test_empty_string(self):
with raises(ParseException):
self.parser.parse('')
def test_or(self):
assert self.parser.parse('star or wars') == 'star:* | wars:*'
def test_multiple_ors(self):
assert self.parser.parse('star or or or wars') == 'star:* | wars:*'
def test_space_as_and(self):
assert self.parser.parse('star wars') == 'star:* & wars:*'
def test_multiple_spaces_as_and(self):
assert (
self.parser.parse('star wars luke') ==
'star:* & wars:* & luke:*'
)
def test_parenthesis(self):
assert self.parser.parse('(star wars) or luke') == (
'(star:* & wars:*) | luke:*'
)
def test_or_and(self):
assert (
self.parser.parse('star or wars luke or solo') ==
'star:* | wars:* & luke:* | solo:*'
)
def test_empty_parenthesis(self):
with raises(ParseException):
assert self.parser.parse('()')
def test_nested_parenthesis(self):
assert self.parser.parse('((star wars)) or luke') == (
'(star:* & wars:*) | luke:*'
)
def test_not(self):
assert self.parser.parse('-star') == (
'! star:*'
)
def test_not_with_parenthesis(self):
assert self.parser.parse('-(star wars)') == '! (star:* & wars:*)'
| [
"konsta.vesterinen@gmail.com"
] | konsta.vesterinen@gmail.com |
65cc4b40f81149e478236ca1e329f99ffc8fcb82 | e63c1e59b2d1bfb5c03d7bf9178cf3b8302ce551 | /uri/uri_python/ad_hoc/p2456.py | fd6b70e0c37682c826aa6690b0340d8b59f32ede | [] | no_license | GabrielEstevam/icpc_contest_training | b8d97184ace8a0e13e1c0bf442baa36c853a6837 | 012796c2ceb901cf7aa25d44a93614696a7d9c58 | refs/heads/master | 2020-04-24T06:15:16.826669 | 2019-10-08T23:13:15 | 2019-10-08T23:13:15 | 171,758,893 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | py | entry = input().split(" ")
a = int(entry[0])
b = int(entry[1])
c = int(entry[2])
d = int(entry[3])
e = int(entry[4])
if a < b and b < c and c < d and d < e:
print('C')
elif a > b and b > c and c > d and d > e:
print('D')
else:
print('N') | [
"gabrielestevam@hotmail.com"
] | gabrielestevam@hotmail.com |
d28d63f6fdadbd9ee21a1e5ac93114fe39836063 | e8bf6c148196a5a9d85f02ca6e392dbc580c9caa | /servico/migrations/0004_auto_20190728_1156.py | 92fcdbcbbabbe4b29f7a152b4958fd41b1930a46 | [] | no_license | elisvaldol/servicos | 8cf34b4daf52f05c47286e3c4faf63418d689ca4 | a6e7d164cf9a5f3aaf6058760ac76d33738f075e | refs/heads/master | 2022-05-31T17:41:59.629090 | 2019-07-30T02:46:22 | 2019-07-30T02:46:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,952 | py | # Generated by Django 2.2.3 on 2019-07-28 14:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('servico', '0003_auto_20190728_1121'),
]
operations = [
migrations.RenameField(
model_name='cliente',
old_name='cpf_cnpj',
new_name='CPf',
),
migrations.RenameField(
model_name='cliente',
old_name='cidade',
new_name='Telefone1',
),
migrations.RenameField(
model_name='servicos',
old_name='data_criacao',
new_name='Data_criacao',
),
migrations.RenameField(
model_name='servicos',
old_name='data_execucao',
new_name='Data_execucao',
),
migrations.RenameField(
model_name='servicos',
old_name='descricao',
new_name='Descricao',
),
migrations.RenameField(
model_name='servicos',
old_name='obs',
new_name='Obs',
),
migrations.RenameField(
model_name='servicos',
old_name='valor',
new_name='Valor',
),
migrations.AddField(
model_name='cliente',
name='Cidade',
field=models.CharField(default=1, max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='cliente',
name='Telefone2',
field=models.CharField(default=1, max_length=10),
preserve_default=False,
),
migrations.AlterField(
model_name='cliente',
name='Bairro',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='cliente',
name='Endereço',
field=models.CharField(max_length=100),
),
]
| [
"elisvaldolj@gmail.com"
] | elisvaldolj@gmail.com |
683dad2c7f40652de53cc3bd61a9377094975e2a | 4f856a87be2ca95330416d8a1d461a03b8590674 | /django_rs/rs/migrations/0004_session_number_users.py | 29846dd78db4d11d0bbabcfc64273dd6f5e7cd10 | [] | no_license | oscarcorreag/PhD-code | ea71f3b7cdbd0e42f9f0a141790f73b1bfdd13bb | 2a1a9bb22f5cd0332f6cf8491be9fa801966e89a | refs/heads/master | 2021-06-26T12:47:20.497517 | 2020-11-04T12:03:56 | 2020-11-04T12:03:56 | 143,695,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 493 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-10-19 00:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rs', '0003_sessionuser_vehicle'),
]
operations = [
migrations.AddField(
model_name='session',
name='number_users',
field=models.SmallIntegerField(default=0),
preserve_default=False,
),
]
| [
"oscarcorreag@gmail.com"
] | oscarcorreag@gmail.com |
042595c5cf1ec0ffd2c74728f0032e11e1534636 | 778092d2415fb6e812790c45e9e02f7572ef78e7 | /data-node.py | d861638e98c5f70eb542e929767de001bc9a71df | [] | no_license | JpGallegos/simple_dfs | 0ea0302ab2edb393eee2bd3f5fa0e712dcf7a9f9 | 8e7066bf8799d3c359f3b6a0cc6c7556ff29a76b | refs/heads/master | 2016-09-11T06:14:35.071906 | 2014-12-12T17:55:10 | 2014-12-12T17:55:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,830 | py | ###############################################################################
#
# Filename: data-node.py
# Author: Jose R. Ortiz and ... (hopefully some students contribution)
#
# Description:
# data node server for the DFS
#
from Packet import *
import sys
import socket
import SocketServer
import uuid
import os
import os.path
def usage():
print """Usage: python %s <server> <port> <data path> <metadata port,default=8000>""" % sys.argv[0]
sys.exit(0)
def register(meta_ip, meta_port, data_ip, data_port):
"""Creates a connection with the metadata server and
register as data node
"""
# Establish connection
# Fill code
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
response = "NAK"
sp = Packet()
while response == "NAK":
sp.BuildRegPacket(data_ip, data_port)
sock.connect((meta_ip, meta_port))
sock.sendall(sp.getEncodedPacket())
response = sock.recv(1024)
if response == "DUP":
print "Duplicate Registration"
if response == "NAK":
print "Registratation ERROR"
finally:
sock.close()
class DataNodeTCPHandler(SocketServer.BaseRequestHandler):
def handle_put(self, p):
"""Receives a block of data from a copy client, and
saves it with an unique ID. The ID is sent back to the
copy client.
"""
rawfname, fsize = p.getFileInfo()
dirname = os.path.dirname(rawfname)
fname = os.path.basename(rawfname)
self.request.send("OK")
# Generates an unique block id.
blockid = str(uuid.uuid1())
# Open the file for the new data block.
if not os.path.exists(dirname):
os.mkdir(dirname)
fp = open(rawfname, 'bw')
# Receive the data block.
# Send the block id back
# Fill code
p = Packet()
p.BuildGetDataBlockPacket(blockid)
fp.close()
def handle_get(self, p):
# Get the block id from the packet
blockid = p.getBlockID()
# Read the file with the block id data
# Send it back to the copy client.
# Fill code
def handle(self):
msg = self.request.recv(1024)
print msg, type(msg)
p = Packet()
p.DecodePacket(msg)
cmd = p.getCommand()
if cmd == "put":
self.handle_put(p)
elif cmd == "get":
self.handle_get(p)
if __name__ == "__main__":
META_PORT = 8000
if len(sys.argv) < 4:
usage()
try:
HOST = sys.argv[1]
PORT = int(sys.argv[2])
DATA_PATH = sys.argv[3]
if len(sys.argv > 4):
META_PORT = int(sys.argv[4])
if not os.path.isdir(DATA_PATH):
print "Error: Data path %s is not a directory." % DATA_PATH
usage()
if not os.path.exists(DATA_PATH):
os.mkdir(DATA_PATH)
except:
usage()
register("localhost", META_PORT, HOST, PORT)
server = SocketServer.TCPServer((HOST, PORT), DataNodeTCPHandler)
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
| [
"jp.gallegos.diaz@gmail.com"
] | jp.gallegos.diaz@gmail.com |
16eb4f0e51d45f39b17d70bcf2a407765c928ad8 | 5bd4893a793ed739127f15becd9558cacf461540 | /scripts/hit_endpoint.py | b86fbea7d7419186ef9c482e8f1b00b6d7f17c8a | [] | no_license | hauensteina/ahn-repo | d3aa665eeef846e426b866d587e8649c8283e74c | 93bd7c54548a083f39510fc562c9e7540c4f672a | refs/heads/master | 2023-07-24T05:34:51.289699 | 2023-07-13T16:10:25 | 2023-07-13T16:10:25 | 99,860,476 | 0 | 1 | null | 2023-07-15T01:33:35 | 2017-08-09T23:20:28 | Python | UTF-8 | Python | false | false | 739 | py | #!/usr/bin/env python
# Python 3 script hitting a REST endpoint
# AHN, Jun 2019
import requests
from pdb import set_trace as BP
URL = 'https://ahaux.com/leela_server/select-move/leela_gtp_bot?tt=1234'
ARGS = {'board_size':19,'moves':[],'config':{'randomness':0.5,'request_id':'0.6834311059880898'}}
#-------------
def main():
res = hit_endpoint( URL, ARGS)
print( res)
# Hit an endpoint with a POST request
#----------------------------------------
def hit_endpoint( url, args):
try:
resp = requests.post( url, json=args)
res = resp.json()
return res
except Exception as e:
print( 'ERROR: hit_endpoint() failed: %s' % str(e))
sys.exit(1)
if __name__ == '__main__':
main()
| [
"hauensteina@gmail.com"
] | hauensteina@gmail.com |
782877b0172ab41578299f3423b6a6d8d69965e9 | fccffc0de27d30fc508201bcac5ecb189c109075 | /portfolio/admin.py | 014c4d41db0297d16a868e5804aec42da57d2b82 | [
"MIT"
] | permissive | robalford/ahi_django | 7fa847a73fc5fa0f5d822c751b19992dc049d1dd | 3e32be3a90140774c34f29f9f907e8217afe788b | refs/heads/master | 2021-06-13T10:22:18.496744 | 2017-02-12T23:14:58 | 2017-02-12T23:14:58 | 80,779,352 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,371 | py | from django.contrib import admin
from imagekit.admin import AdminThumbnail
from .models import LandingPage, AboutPage, Project, Photo, Press
class LandingPageAdmin(admin.ModelAdmin):
admin_thumbnail = AdminThumbnail(image_field='thumbnail')
list_display = ('__str__', 'admin_thumbnail')
readonly_fields = (
'admin_thumbnail',
)
class PhotoInline(admin.TabularInline):
model = Photo
admin_thumbnail = AdminThumbnail(image_field='thumbnail')
list_display = ('__str__', 'admin_thumbnail')
readonly_fields = (
'admin_thumbnail',
)
fields = (
'photo',
'admin_thumbnail',
'orientation',
'display_order',
'credit',
)
class ProjectAdmin(admin.ModelAdmin):
admin_thumbnail = AdminThumbnail(image_field='thumbnail')
list_display = ('__str__', 'admin_thumbnail')
readonly_fields = (
'admin_thumbnail',
)
fields = (
'title',
'slug',
'display_order',
'photo',
'admin_thumbnail',
'photo_credit',
'text',
'architect',
'awards',
)
prepopulated_fields = {"slug": ("title",)}
inlines = [
PhotoInline,
]
admin.site.register(LandingPage, LandingPageAdmin)
admin.site.register(AboutPage)
admin.site.register(Project, ProjectAdmin)
admin.site.register(Press)
| [
"robertsalford@gmail.com"
] | robertsalford@gmail.com |
f10137f2cd422b97ed7dcd9b1260ef629779f649 | 5638b76686262e5c9f1a8540de83dcd7a17d61f3 | /api/urls.py | e37f1ce62c302e9860ffd98a53313287600b2c99 | [] | no_license | whitehat0917/test-rest-api | 0802e66611018073ac3bfd1f5c8849fbaa4f5281 | e3a4d3f158c3e282246eea5a6ddd4a95a76f99a1 | refs/heads/main | 2023-03-29T06:26:09.320138 | 2021-03-29T15:35:40 | 2021-03-29T15:35:40 | 352,693,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'users', views.users, name='index'),
] | [
"whitehat0917@gmail.com"
] | whitehat0917@gmail.com |
8952aa9f1b61af489e11144d5f44fa1ef4518d3c | 962d42197c56346d348d334dd7435224d4aed99d | /Inventationery/apps/Inventory/migrations/0012_auto_20151227_1216.py | fbed11045d9ab2d771817012eb1c4368885e13e6 | [
"BSD-3-Clause"
] | permissive | huarmenta/Inventationery | e5750274c0b281208fa1f33590f0402349f7e8da | 1bf9ee2c56492ab66947886590b7ec17fa3a6195 | refs/heads/master | 2021-05-30T08:31:15.368712 | 2016-01-06T03:23:45 | 2016-01-06T03:23:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,732 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Vendor', '0001_initial'),
('Inventory', '0011_auto_20151226_2308'),
]
operations = [
migrations.CreateModel(
name='EcoResProductModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('BarCode', models.CharField(default=None, max_length=50, null=True, blank=True)),
('StdUnit', models.CharField(default=b'pza.', max_length=20, null=True, blank=True)),
('SalesUnit', models.CharField(default=b'pza.', max_length=20, null=True, blank=True)),
('PurchUnit', models.CharField(default=b'pza.', max_length=20, null=True, blank=True)),
('Notes', models.TextField(max_length=100, null=True, blank=True)),
('DefaulLocation', models.ForeignKey(related_name='DefaultLocation', blank=True, to='Inventory.LocationModel', null=True)),
('Item', models.ForeignKey(default=None, blank=True, to='Inventory.ItemModel', null=True)),
('LastVendor', models.ForeignKey(default=None, blank=True, to='Vendor.VendorModel', null=True)),
],
options={
'abstract': False,
},
),
migrations.RenameField(
model_name='movementhistorymodel',
old_name='Note',
new_name='Notes',
),
]
| [
"h.alexarmenta@gmail.com"
] | h.alexarmenta@gmail.com |
35bf4c7a9b21ab23ef52b6c6f0f8175b0648633a | 61afd923551491846ae827821f55c4fb5fd04c98 | /packages/levylab_lib_levylab_instruments/levylab_lib_levylab_instruments-1.4.2.53.spec | 04f92bcc228f4e1c73706452894072f1ab9ff991 | [
"BSD-3-Clause"
] | permissive | laserengineer/levylabpitt.github.io | b74b711aff2a5eb1b46f880a1071ac0873f1a9ac | cdf9aeb6faaf136211291ce2232c239229d85bbe | refs/heads/master | 2023-04-29T02:36:48.736236 | 2021-05-14T19:20:40 | 2021-05-14T19:20:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,436 | spec | [Package]
Name="levylab_lib_levylab_instruments"
Version="1.4.2.53"
Release=""
ID=26edd37d85e86c8ffd7cf08a57fa4313
File Format="vip"
Format Version="2017"
Display Name="Instruments"
[Description]
Description="Abstract instrument.lvclass"
Summary=""
License="BSD-3"
Copyright="Copyright (c) 2020, Levylab"
Distribution=""
Vendor="Levylab"
URL=""
Packager="Patrick Irvin"
Demo="FALSE"
Release Notes="[1.4.2.53]\0A- Add "Finished?" boolean to Get Magnet/Temperature/Delay\0A- Change the way the Instrument SMO is configured (dev must override SMO Name/Port/Public API/RC Tyle - constant.vi)"
System Package="FALSE"
Sub Package="FALSE"
License Agreement="TRUE"
[LabVIEW]
close labview before install="FALSE"
restart labview after install="FALSE"
skip mass compile after install="FALSE"
[Platform]
Exclusive_LabVIEW_Version="LabVIEW>=16.0"
Exclusive_LabVIEW_System="ALL"
Exclusive_OS="ALL"
[Script VIs]
PreInstall=""
PostInstall=""
PreUninstall=""
PostUninstall=""
Verify=""
PreBuild=""
PostBuild=""
[Dependencies]
AutoReqProv=FALSE
Requires="jki_lib_state_machine>=2018.0.7.45,jki_statemachineobjects>=1.3.0.56,mgi_lib_application_control>=1.1.1.10,mgi_lib_error_handling>=1.1.1.3,mgi_lib_error_reporter>=1.0.2.5,national_instruments_lib_guid_generator>=1.0.2.3,ni_lib_stm>=3.1.0.9,oglib_appcontrol>=4.1.0.7,oglib_error>=4.2.0.23,oglib_file>=4.0.1.22,oglib_lvdata>=4.2.0.21,oglib_numeric>=4.1.0.8,oglib_string>=4.1.0.12,oglib_time>=4.0.1.3,oglib_variantconfig>=4.0.0.5"
Conflicts=""
[Activation]
License File=""
Licensed Library=""
[Files]
Num File Groups="3"
Sub-Packages=""
Namespaces=""
[File Group 0]
Target Dir="<application>"
Replace Mode="Always"
Num Files=202
File 0="user.lib/Levylab/Levylab Instruments/instrument.lvproj"
File 1="user.lib/Levylab/Levylab Instruments/LICENSE"
File 2="user.lib/Levylab/Levylab Instruments/README.md"
File 3="user.lib/Levylab/Levylab Instruments/SMOs/SCPI/SCPI Decode.vi"
File 4="user.lib/Levylab/Levylab Instruments/SMOs/SCPI/SCPI Encode.vi"
File 5="user.lib/Levylab/Levylab Instruments/SMOs/SCPI/SCPI.lvclass"
File 6="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Close Connection.vi"
File 7="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Connection Monitor - Loop.vi"
File 8="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Connection Monitor - Stop.vi"
File 9="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/onCreate.vi"
File 10="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Open Client Connection.vi"
File 11="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Open Server Connection.vi"
File 12="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Read listener ID.vi"
File 13="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Read Message.vi"
File 14="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Read STM connection info.vi"
File 15="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/RemoteControl.STM.lvclass"
File 16="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/RemoteControl.STM.TestLauncher.vi"
File 17="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Send Message.vi"
File 18="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Client.vi"
File 19="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Client_OO.vi"
File 20="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Client_SM.vi"
File 21="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Server.vi"
File 22="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Server_part_OO.vi"
File 23="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Write listener ID.vi"
File 24="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Write STM connection info.vi"
File 25="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Client - JKI SM or SMO.vi"
File 26="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Close Connection.vi"
File 27="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Connection Monitor - Loop.vi"
File 28="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Connection Monitor - Stop.vi"
File 29="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Create RC Client.vi"
File 30="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Create RC Server.vi"
File 31="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/CreatePrivateEvents.vi"
File 32="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/CreatePublicEvents.vi"
File 33="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/DestroyPrivateEvents.vi"
File 34="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/DestroyPublicEvents.vi"
File 35="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Error Dialog.vi"
File 36="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/onCreate.vi"
File 37="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Open Client Connection.vi"
File 38="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Open Server Connection.vi"
File 39="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Process.vi"
File 40="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Process_Backup.vi"
File 41="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read address.vi"
File 42="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read Commands.vi"
File 43="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read Message.vi"
File 44="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read Port.vi"
File 45="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Remote Client.vi"
File 46="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.Configure.vi"
File 47="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.GetPrivateEvents.vi"
File 48="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.GetPublicEvents.vi"
File 49="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.lvclass"
File 50="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.SendMessageFromProcess.vi"
File 51="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.SendMessageToProcess.vi"
File 52="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.TestLauncher.ConnectionMonitor.vi"
File 53="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.TestLauncher.Server.vi"
File 54="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControlGlobal.vi"
File 55="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Send and Receive Message.vi"
File 56="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Send Message.vi"
File 57="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Time Message Events.vi"
File 58="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/VirtualTestInstrument.vi"
File 59="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Write address.vi"
File 60="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Write Commands.vi"
File 61="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Write Port.vi"
File 62="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/Message--Cluster.ctl"
File 63="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--Cluster.ctl"
File 64="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--RemoteControl.Configure.ctl"
File 65="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--RemoteControl.Reply Message.ctl"
File 66="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--RemoteControl.SendMessageToProcess.ctl"
File 67="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PublicEvents--Cluster.ctl"
File 68="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PublicEvents--RemoteControl.Reply Remote Message.ctl"
File 69="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PublicEvents--RemoteControl.SendMessageFromProcess.ctl"
File 70="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/RC Process Type--Enum.ctl"
File 71="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/Variant with Message ID--cluster.ctl"
File 72="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/CreatePrivateEvents.vi"
File 73="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/DestroyPrivateEvents.vi"
File 74="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Error Log Generator Example 1.vi"
File 75="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Log Error.vi"
File 76="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.GetPrivateEvents.vi"
File 77="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.LogError.vi"
File 78="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.lvclass"
File 79="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.TestLauncher.vi"
File 80="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Process.vi"
File 81="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Typedefs/PrivateEvents--Cluster.ctl"
File 82="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Typedefs/PrivateEvents--Logger.Error.Log Error.ctl"
File 83="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/CreatePrivateEvents.vi"
File 84="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/CreatePublicEvents.vi"
File 85="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/DestroyPrivateEvents.vi"
File 86="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/DestroyPublicEvents.vi"
File 87="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.GetPublicEvents.vi"
File 88="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.lvclass"
File 89="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.TestLauncher.vi"
File 90="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.WriteVariable.vi"
File 91="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.GetPrivateEvents.vi"
File 92="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.ReadVariable.vi"
File 93="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Process.vi"
File 94="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Read from DSC (unused).vi"
File 95="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Write Cluster to DSC.vi"
File 96="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Write to DSC.vi"
File 97="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PrivateEvents--Cluster.ctl"
File 98="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PrivateEvents--Logger.DSC.Set Address.ctl"
File 99="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PrivateEvents--Logger.DSC.Write Variable.ctl"
File 100="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PublicEvents--Cluster.ctl"
File 101="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PublicEvents--Logger.DSC.Read Variable.ctl"
File 102="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Logger.lvclass"
File 103="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Logger.TestLauncher.vi"
File 104="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Process.vi"
File 105="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Read path.vi"
File 106="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Write path.vi"
File 107="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/Handle Error.vi"
File 108="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/LevyLab.lvclass"
File 109="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/LevyLab.TestLauncher.vi"
File 110="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/Process.vi"
File 111="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/API Commands--constant.vi"
File 112="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Close Instrument.vi"
File 113="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Configure Instrument.vi"
File 114="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Create Instrument.vi"
File 115="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/CreatePrivateEvents.vi"
File 116="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/CreatePublicEvents.vi"
File 117="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/DestroyPrivateEvents.vi"
File 118="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/DestroyPublicEvents.vi"
File 119="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/enumerateStaticDependencies.vi"
File 120="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Error Dialog.vi"
File 121="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Get Instrument Dependencies.vi"
File 122="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/getAll.vi"
File 123="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Handle Command.vi"
File 124="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Handle Error.vi"
File 125="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Configuration Window.vi"
File 126="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.GetPrivateEvents.vi"
File 127="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.GetPublicEvents.vi"
File 128="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.lvclass"
File 129="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.MessageFromProcess.vi"
File 130="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.MessageToProcess.vi"
File 131="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Read Configuration File.vi"
File 132="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Read Configuration.vi"
File 133="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.TestLauncher.vi"
File 134="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Write Configuration File.vi"
File 135="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Write Configuration.vi"
File 136="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/List Devices.vi"
File 137="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Open Instrument.vi"
File 138="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Process.vi"
File 139="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Read Configuration Class.vi"
File 140="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Read Hardware Address.vi"
File 141="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Remote Client.vi"
File 142="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO Name - constant.vi"
File 143="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO Port - constant.vi"
File 144="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO Public API - constant.vi"
File 145="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO RC Type - constant.vi"
File 146="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Configuration Class.vi"
File 147="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Configuration Path.vi"
File 148="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Hardware Address.vi"
File 149="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Process Name.vi"
File 150="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write SMO Configuration.vi"
File 151="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/Configuration--Cluster.ctl"
File 152="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/DSC Configuration--Cluster.ctl"
File 153="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/HW Configuration--Cluster.ctl"
File 154="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PrivateEvents--Cluster.ctl"
File 155="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PrivateEvents--Instrument.MessageToProcess.ctl"
File 156="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PublicEvents--Cluster.ctl"
File 157="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PublicEvents--Instrument.get all.ctl"
File 158="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PublicEvents--Instrument.MessageFromProcess.ctl"
File 159="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/SMO Configuration--Cluster.ctl"
File 160="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/SMO RC Type--enum.ctl"
File 161="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Configuration.lvclass"
File 162="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Read Configuration.vi"
File 163="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Write Configuration.vi"
File 164="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Write Path.vi"
File 165="user.lib/Levylab/Levylab Instruments/Instrument Types/VSource/Get Bias Voltage.vi"
File 166="user.lib/Levylab/Levylab Instruments/Instrument Types/VSource/Instrument.VSource.lvclass"
File 167="user.lib/Levylab/Levylab Instruments/Instrument Types/VSource/Set Bias Voltage.vi"
File 168="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Get Data.vi"
File 169="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Instrument.VNA.lvclass"
File 170="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Average.vi"
File 171="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Format.vi"
File 172="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Measurement.vi"
File 173="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Power.vi"
File 174="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Sweep.vi"
File 175="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Conversion--enum.ctl"
File 176="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Format--enum.ctl"
File 177="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Measurement--enum.ctl"
File 178="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Sweep Type--enum.ctl"
File 179="user.lib/Levylab/Levylab Instruments/Instrument Types/Strain/Get Strain.vi"
File 180="user.lib/Levylab/Levylab Instruments/Instrument Types/Strain/Instrument.Strain.lvclass"
File 181="user.lib/Levylab/Levylab Instruments/Instrument Types/Strain/Set Strain.vi"
File 182="user.lib/Levylab/Levylab Instruments/Instrument Types/Optical Delay Line/Get Delay.vi"
File 183="user.lib/Levylab/Levylab Instruments/Instrument Types/Optical Delay Line/Instrument.OpticalDelayLine.lvclass"
File 184="user.lib/Levylab/Levylab Instruments/Instrument Types/Optical Delay Line/Set Delay.vi"
File 185="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Angle.vi"
File 186="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Helium Level.vi"
File 187="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Magnet Field.vi"
File 188="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Nitrogen Level.vi"
File 189="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Pressure.vi"
File 190="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Temperature.vi"
File 191="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Instrument.Cryostat.lvclass"
File 192="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Set Angle.vi"
File 193="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Set Magnet Field.vi"
File 194="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Set Temperature.vi"
File 195="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Wait for Magnet Setpoint.vi"
File 196="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Wait for Temperature Setpoint.vi"
File 197="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Typedefs/Magnet Axis--Enum.ctl"
File 198="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Typedefs/Magnet Mode--Enum.ctl"
File 199="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Typedefs/Rotator Axis--Enum.ctl"
File 200="user.lib/Levylab/Levylab Instruments/Instrument Types/CBridge/Get Capacitance.vi"
File 201="user.lib/Levylab/Levylab Instruments/Instrument Types/CBridge/Instrument.CBridge.lvclass"
[File Group 1]
Target Dir="<menus>/Categories/Levylab"
Replace Mode="Always"
Num Files=18
File 0="_functions_levylab_lib_levylab_instruments_1.mnu"
File 1="_functions_levylab_lib_levylab_instruments_10.mnu"
File 2="_functions_levylab_lib_levylab_instruments_11.mnu"
File 3="_functions_levylab_lib_levylab_instruments_12.mnu"
File 4="_functions_levylab_lib_levylab_instruments_13.mnu"
File 5="_functions_levylab_lib_levylab_instruments_14.mnu"
File 6="_functions_levylab_lib_levylab_instruments_15.mnu"
File 7="_functions_levylab_lib_levylab_instruments_16.mnu"
File 8="_functions_levylab_lib_levylab_instruments_17.mnu"
File 9="_functions_levylab_lib_levylab_instruments_2.mnu"
File 10="_functions_levylab_lib_levylab_instruments_3.mnu"
File 11="_functions_levylab_lib_levylab_instruments_4.mnu"
File 12="_functions_levylab_lib_levylab_instruments_5.mnu"
File 13="_functions_levylab_lib_levylab_instruments_6.mnu"
File 14="_functions_levylab_lib_levylab_instruments_7.mnu"
File 15="_functions_levylab_lib_levylab_instruments_8.mnu"
File 16="_functions_levylab_lib_levylab_instruments_9.mnu"
File 17="functions_Levylab_lib_Levylab_Instruments.mnu"
[File Group 2]
Target Dir="<menus>/Categories/Levylab"
Replace Mode="If Newer"
Num Files=1
File 0="dir.mnu"
| [
"p.irvin@levylab.org"
] | p.irvin@levylab.org |
56f367f795975a518e9c881a9cacb28fbc13c280 | 9d59088bb0fd2827aa3d4ecde4ed38184d74f25b | /basic_learn/hello.py | 936190aae461ee4f7be2f74409dc4247b949a488 | [] | no_license | mrwangsg/py_test | 1de1f9b141b1ccb3da18e19366455a2ec1e74de9 | 80674801da595e48549d72b1c35fdbfee480c9eb | refs/heads/master | 2022-11-10T11:22:55.176884 | 2018-01-21T14:13:02 | 2018-01-21T14:13:02 | 115,670,652 | 0 | 1 | null | 2022-10-25T08:58:50 | 2017-12-29T00:36:48 | Python | UTF-8 | Python | false | false | 25 | py | print("hello, 敏敏!") | [
"wangsg2@yonyou"
] | wangsg2@yonyou |
8563f1c564497428eb080f5585823d7eef26080c | fbda8e738aa75dd0bb3a6b0f323f7279ff231c32 | /base/web_driver_factory.py | 0a7d4196c9ef25849cd814877039a5b15c79591f | [] | no_license | prtv23/Psmi | ccc40c95796d1fcac037baf5701f70ef44ba491b | cdd59484210ecee3a69ff830ec9ac09495799301 | refs/heads/master | 2021-02-07T10:08:30.380803 | 2020-05-08T15:24:18 | 2020-05-08T15:24:18 | 244,012,841 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,058 | py | from selenium import webdriver
import os
class WebDriverFactory:
def __init__(self, browser):
self.browser = browser
def get_driver_instance(self):
# choose the baseURL
base_url = "http://50.108.46.90:6080/"
# browser driver path Firefox & Chrome
cur_work_dir = os.getcwd()
browser_dir = os.path.join(cur_work_dir, 'browser_drivers')
firefox_driver = os.path.join(browser_dir, 'geckodriver.exe')
chrome_driver = os.path.join(browser_dir, 'chromedriver.exe')
if self.browser == 'firefox':
driver = webdriver.Firefox(executable_path=firefox_driver)
elif self.browser == 'chrome':
os.environ["webdriver.chrome.driver"] = chrome_driver
driver = webdriver.Chrome(chrome_driver)
else:
driver = webdriver.Firefox(executable_path=firefox_driver)
# load application url in the browser
driver.maximize_window()
driver.delete_all_cookies()
driver.get(base_url)
return driver
| [
"prithlak@gmail.com"
] | prithlak@gmail.com |
58aff4c1de4e599bea863f9887eb8a58999e695c | 62dc209888fef3c5c4f822bb0846df536d5b1949 | /octoprint_anywhere/utils.py | bc051ad25cb1272812c0af0d194f48c39f89e332 | [
"MIT"
] | permissive | luttapy/OctoPrint-Anywhere | c2bbb8675c397ff59b31259fb9f3bca34b92562c | 05c8b6b92a600fc28ac4206efe63fc67c7aae2c5 | refs/heads/master | 2020-03-31T22:24:35.991077 | 2018-10-04T00:00:54 | 2018-10-04T00:00:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 512 | py | # coding=utf-8
from __future__ import absolute_import
def ip_addr():
ip_addresses = []
try:
from subprocess import check_output
ip_addresses = check_output(['hostname', '--all-ip-addresses']).split()
except:
pass
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 53))
primary_ip = s.getsockname()[0]
s.close()
if primary_ip not in ip_addresses:
ip_addresses.append(primary_ip)
return ip_addresses
| [
"kenneth.jiang@gmail.com"
] | kenneth.jiang@gmail.com |
9cb7715468af782c612db7ce54ca692ec0455255 | dc1a3796c97438a9e62325ab928e81eea052aca0 | /spam_classifier/runMLModels.py | 4424df6b0b8040e8da57d509729e5655b277265e | [] | no_license | Prajakta16/Spam-email-classifier | e3ae6b9c43357b8f8809a2efb327c979a376548c | 9c37f63d30064c85b76493a77b2526fc6934f0a6 | refs/heads/master | 2022-12-13T20:07:41.446703 | 2020-09-17T22:57:37 | 2020-09-17T22:57:37 | 296,344,168 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,331 | py | from pandas import np
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score
from sklearn.tree import DecisionTreeClassifier
from sklearn.naive_bayes import MultinomialNB
from getTrainTestData import prepareTrainingDataPart1, read_data_from_ES
from getTrainTestData import prepareTrainingDataPart2
part1Flag = True
def rank_docs(predicted_probability_nb, testing_index):
test_spam_score_dict = dict()
print(predicted_probability_nb)
for i in range(0, len(predicted_probability_nb)):
test_spam_score_dict[testing_index[i]] = predicted_probability_nb[i]
top_spam_docs = sorted(test_spam_score_dict.items(), key=lambda x: x[1], reverse=True)[:100]
print("Top spam docs are: ")
print(top_spam_docs)
with open('./top100SpamDocs', mode='w') as file:
for tup in top_spam_docs:
file.write(tup[0] + "\n")
file.close()
pass
if __name__ == "__main__":
if part1Flag:
fitted_x_train, y_train, transformed_x_test, y_test, testing_index = prepareTrainingDataPart1()
else:
fitted_x_train, y_train, transformed_x_test, y_test, testing_index = prepareTrainingDataPart2()
print("Training for linear regression.........")
lr = LogisticRegression(penalty='l1', solver="liblinear")
lr.fit(fitted_x_train, y_train)
predicted_probability = lr.predict_proba(transformed_x_test)
print(predicted_probability)
predicted_probability_lr = lr.predict_proba(transformed_x_test)[:, 1]
score = roc_auc_score(np.array(y_test), predicted_probability_lr)
print("Roc auc score for linear regression " + str(score))
print("Training for decision tree........")
dt = DecisionTreeClassifier()
dt.fit(fitted_x_train, y_train)
predicted_probability_dt = dt.predict_proba(transformed_x_test)[:, 1]
score = roc_auc_score(np.array(y_test), predicted_probability_dt)
print("Roc auc score for decision tree " + str(score))
print("Training for naive bayes........")
nb = MultinomialNB()
nb.fit(fitted_x_train, y_train)
predicted_probability_nb = nb.predict_proba(transformed_x_test)[:, 1]
score = roc_auc_score(np.array(y_test), predicted_probability_nb)
print("Roc auc score for naive bayes "+str(score))
rank_docs(predicted_probability_lr, testing_index)
| [
"prajakta@Prajaktas-MacBook-Pro.local"
] | prajakta@Prajaktas-MacBook-Pro.local |
ba8d9cd4e406de9132641c870ee69edbd84913a0 | cdc2a429197daab83d64072a5132d30b4ff273d0 | /gridworld.py | 5180492981bb77e57720b978d5f64a0d4034da00 | [] | no_license | gopeshh/Deep-Reinforcement-Learning | a4daf6959d94b423499203a1b90d71587a02e917 | f9a68e6120a27c455fd37352d11e05b1206f0da7 | refs/heads/master | 2020-08-30T07:37:40.631184 | 2019-10-29T14:36:19 | 2019-10-29T14:36:19 | 218,307,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,606 | py |
from __future__ import print_function
import numpy as np
import time
start_time = time.time()
WORLD_SIZE = 20
REWARD = -1.0
ACTION_PROB = 0.25
world = np.zeros((WORLD_SIZE, WORLD_SIZE))
# left, up, right, down
actions = ['L', 'U', 'R', 'D']
nextState = []
for i in range(0, WORLD_SIZE):
nextState.append([])
for j in range(0, WORLD_SIZE):
next = dict()
if i == 0:
next['U'] = [i, j]
else:
next['U'] = [i - 1, j]
if i == WORLD_SIZE - 1:
next['D'] = [i, j]
else:
next['D'] = [i + 1, j]
if j == 0:
next['L'] = [i, j]
else:
next['L'] = [i, j - 1]
if j == WORLD_SIZE - 1:
next['R'] = [i, j]
else:
next['R'] = [i, j + 1]
nextState[i].append(next)
states = []
for i in range(0, WORLD_SIZE):
for j in range(0, WORLD_SIZE):
if (i == 0 and j == 0) or (i == WORLD_SIZE - 1 and j == WORLD_SIZE - 1):
continue
else:
states.append([i, j])
# for figure 4.1
while True:
# keep iteration until convergence
newWorld = np.zeros((WORLD_SIZE, WORLD_SIZE))
for i, j in states:
for action in actions:
newPosition = nextState[i][j][action]
# bellman equation
newWorld[i, j] += ACTION_PROB * (REWARD + world[newPosition[0], newPosition[1]])
if np.sum(np.abs(world - newWorld)) < 1e-4:
print('Random Policy')
print(newWorld)
break
world = newWorld
print("--- %s seconds ---" % (time.time() - start_time))
| [
"noreply@github.com"
] | noreply@github.com |
f044e21b7decd864e43dd31f8936ab2b0969a9aa | 0ae22471220c5a2a0810d3034fea34aa8b033419 | /PE_Analysis.py | 8398f3dcc164f52911c1561183b1ab9bf7e791eb | [] | no_license | PepperYouth/PE_analysis | 136e89229bb18e1f3eabfa4f14f6f733429e61a9 | 8d2b6c219e1bf969a8edf75fdcd3550dada62491 | refs/heads/master | 2022-12-05T09:59:26.385609 | 2020-08-26T09:51:04 | 2020-08-26T09:51:04 | 132,348,241 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,539 | py | import sys
import collections
def PrintALL(dic):
print(dic['name']+':\n'+'_'*56)
n = 0
for i in dic['data']:
sys.stdout.write('%02X' % ord(i) + ' ')
if (n+1)%8 == 0:
sys.stdout.write(' ')
if (n+1)%16 == 0:
sys.stdout.write('\n')
n += 1
sys.stdout.write('\n\n')
def Print(dic):
for key in dic:
if key == 'DataDirectory':
PrintALL(dic[key])
else:
print(key,dic[key])
sys.stdout.write('\n')
def Cul(ad):
n = 1
ans = 0
for i in ad:
ans += n*ord(i)
n *= 256
return ans
print('Input file\'s name')
filename = raw_input()
file = open(filename,'rb')
DATA_STR = file.read()
file.close()
IMAGE_DOS_HEADER = {\
'name':'IMAGE_DOS_HEADER',\
'data':DATA_STR[:64]}
e_lfanew = Cul(IMAGE_DOS_HEADER['data'][-4:])
DOS_Stub = {\
'name':'DOS Stub',\
'data':DATA_STR[65:e_lfanew-1]}
IMAGE_FILE_HEADER = collections.OrderedDict()
IMAGE_FILE_HEADER['Machine']=hex(Cul(DATA_STR[e_lfanew+4:e_lfanew+6]))
IMAGE_FILE_HEADER['NumberOfSections']=Cul(DATA_STR[e_lfanew+6:e_lfanew+8])
IMAGE_FILE_HEADER['TimeDateStamp']=Cul(DATA_STR[e_lfanew+8:e_lfanew+12])
IMAGE_FILE_HEADER['PointerToSymbolTable']=hex(Cul(DATA_STR[e_lfanew+12:e_lfanew+16]))
IMAGE_FILE_HEADER['NumberOfSymbols']=Cul(DATA_STR[e_lfanew+16:e_lfanew+20])
IMAGE_FILE_HEADER['SizeOfOptionalHeader']=Cul(DATA_STR[e_lfanew+20:e_lfanew+22])
IMAGE_FILE_HEADER['Characteristics']=Cul(DATA_STR[e_lfanew+22:e_lfanew+24])
ADRESS = e_lfanew + 24
IMAGE_OPTIONAL_HEADER = collections.OrderedDict()
IMAGE_OPTIONAL_HEADER['Magic']=hex(Cul(DATA_STR[ADRESS:ADRESS+2]))
IMAGE_OPTIONAL_HEADER['MajorLinkerVersion']=Cul(DATA_STR[ADRESS+2:ADRESS+3])
IMAGE_OPTIONAL_HEADER['MinorLinkerVersion']=Cul(DATA_STR[ADRESS+3:ADRESS+4])
IMAGE_OPTIONAL_HEADER['SizeOfCode']=Cul(DATA_STR[ADRESS+4:ADRESS+8])
IMAGE_OPTIONAL_HEADER['SizeOfInitializedData']=Cul(DATA_STR[ADRESS+8:ADRESS+12])
IMAGE_OPTIONAL_HEADER['SizeOfUninitializedData']=Cul(DATA_STR[ADRESS+12:ADRESS+16])
IMAGE_OPTIONAL_HEADER['AddressOfEntryPoint']=hex(Cul(DATA_STR[ADRESS+16:ADRESS+20]))
IMAGE_OPTIONAL_HEADER['BaseOfCode']=Cul(DATA_STR[ADRESS+20:ADRESS+24])
IMAGE_OPTIONAL_HEADER['BaseOfData']=Cul(DATA_STR[ADRESS+24:ADRESS+28])
IMAGE_OPTIONAL_HEADER['ImageBase']=Cul(DATA_STR[ADRESS+28:ADRESS+32])
IMAGE_OPTIONAL_HEADER['SectionAlignment']=Cul(DATA_STR[ADRESS+32:ADRESS+36])
IMAGE_OPTIONAL_HEADER['FileAlignment']=Cul(DATA_STR[ADRESS+36:ADRESS+40])
IMAGE_OPTIONAL_HEADER['MajorOperatingSystemVersion']=Cul(DATA_STR[ADRESS+40:ADRESS+42])
IMAGE_OPTIONAL_HEADER['MinorOperatingSystemVersion']=Cul(DATA_STR[ADRESS+42:ADRESS+44])
IMAGE_OPTIONAL_HEADER['MajorImageVersion']=Cul(DATA_STR[ADRESS+44:ADRESS+46])
IMAGE_OPTIONAL_HEADER['MinorImageVersion']=Cul(DATA_STR[ADRESS+46:ADRESS+48])
IMAGE_OPTIONAL_HEADER['MajorSubsystemVersion']=Cul(DATA_STR[ADRESS+48:ADRESS+50])
IMAGE_OPTIONAL_HEADER['MinorSubsystemVersion']=Cul(DATA_STR[ADRESS+50:ADRESS+52])
IMAGE_OPTIONAL_HEADER['Win32VersionValue']=Cul(DATA_STR[ADRESS+52:ADRESS+56])
IMAGE_OPTIONAL_HEADER['SizeOfImage']=Cul(DATA_STR[ADRESS+56:ADRESS+60])
IMAGE_OPTIONAL_HEADER['SizeOfHeaders']=Cul(DATA_STR[ADRESS+60:ADRESS+64])
IMAGE_OPTIONAL_HEADER['CheckSum']=Cul(DATA_STR[ADRESS+64:ADRESS+68])
IMAGE_OPTIONAL_HEADER['Subsystem']=Cul(DATA_STR[ADRESS+68:ADRESS+70])
IMAGE_OPTIONAL_HEADER['DllCharacteristics']=Cul(DATA_STR[ADRESS+70:ADRESS+72])
IMAGE_OPTIONAL_HEADER['SizeOfStackReserve']=Cul(DATA_STR[ADRESS+72:ADRESS+76])
IMAGE_OPTIONAL_HEADER['SizeOfStackCommit']=Cul(DATA_STR[ADRESS+76:ADRESS+80])
IMAGE_OPTIONAL_HEADER['SizeOfHeapReserve']=Cul(DATA_STR[ADRESS+80:ADRESS+84])
IMAGE_OPTIONAL_HEADER['SizeOfHeapCommit']=Cul(DATA_STR[ADRESS+84:ADRESS+88])
IMAGE_OPTIONAL_HEADER['LoaderFlags']=Cul(DATA_STR[ADRESS+88:ADRESS+92])
IMAGE_OPTIONAL_HEADER['NumberOfRvaAndSizes']=Cul(DATA_STR[ADRESS+92:ADRESS+96])
IMAGE_OPTIONAL_HEADER['DataDirectory']={'name':'DataDirectory','data':DATA_STR[ADRESS+96:ADRESS+224]}
Signature = 'Signture: '+DATA_STR[e_lfanew:e_lfanew+4]
IMAGE_NT_HEADER = [Signature,IMAGE_FILE_HEADER,IMAGE_OPTIONAL_HEADER]
ADRESS = e_lfanew+248
IMAGE_SECTION_HEADER = []
SECTIONS = []
for i in range(IMAGE_FILE_HEADER['NumberOfSections']):
temp = collections.OrderedDict()
temp['Name']=DATA_STR[ADRESS:ADRESS+8]
temp['PhysicalAddress/VirtualSize']=hex(Cul(DATA_STR[ADRESS+8:ADRESS+12]))
temp['VirtualAddress']=hex(Cul(DATA_STR[ADRESS+12:ADRESS+16]))
temp['SizeOfRawData']=Cul(DATA_STR[ADRESS+16:ADRESS+20])
temp['PointerToRawData']=hex(Cul(DATA_STR[ADRESS+20:ADRESS+24]))
temp['PointerToRelocations']=hex(Cul(DATA_STR[ADRESS+24:ADRESS+28]))
temp['PointerToLinenumbers']=hex(Cul(DATA_STR[ADRESS+28:ADRESS+32]))
temp['NumberOfRelocations']=Cul(DATA_STR[ADRESS+32:ADRESS+34])
temp['NumberOfLinenumbers']=Cul(DATA_STR[ADRESS+34:ADRESS+36])
temp['Characteristics']=hex(Cul(DATA_STR[ADRESS+36:ADRESS+40]))
IMAGE_SECTION_HEADER.append(temp)
ADRESS+=40
exec('a' + '%d'%i + '= {\'name\':temp[\'Name\'],\
\'data\':DATA_STR[int(temp[\'PointerToRawData\'],16):\
int(temp[\'PointerToRawData\'],16)+temp[\'SizeOfRawData\']]}')
SECTIONS.append(eval('a' + '%d'%i))
PrintALL(IMAGE_DOS_HEADER)
PrintALL(DOS_Stub)
print('IMAGE_NT_HEADER'+':\n'+'_'*56)
print(IMAGE_NT_HEADER[0])
print(' IMAGE_FILE_HEADER'+':\n'+'_'*56)
Print(IMAGE_NT_HEADER[1])
print(' IMAGE_OPTIONAL_HEADER'+':\n'+'_'*56)
Print(IMAGE_NT_HEADER[2])
print('IMAGE_SECTION_HEADER'+':\n'+'_'*56)
for i in range(i+1):
Print(IMAGE_SECTION_HEADER[i])
print('SECTIONS'+':\n'+'_'*56)
for i in range(i+1):
PrintALL(SECTIONS[i]) | [
"591772502@qq.com"
] | 591772502@qq.com |
45c47b7c35fa0ce608bf392a516ea7c7190e9edc | ea0d8c98f2edadac853d7b42de8e243c2ee9ed1e | /minesweep/layout.py | 4563db0a1a8bac7c32f16d9e4ad0491fe45a853c | [] | no_license | fxs123456/Artificial_Intelligence | e790fda357c4ee6c804c74946cb6932f06052dc7 | 3cd746120a0da69d939da9f2484e7c3a6487cca9 | refs/heads/master | 2020-04-03T19:39:47.169315 | 2020-03-06T14:38:59 | 2020-03-06T14:38:59 | 155,530,623 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,230 | py | from tkinter import *
from tkinter.messagebox import *
import minesweep
class Layout:
def __init__(self):
self.MAP_WIDTH = 10
self.BOMBS_NUM = 10
self.frame = Tk()
self.frame.minsize(300, 200)
self.frame.title('layout')
self.createWights()
self.frame.mainloop()
def createWights(self):
selectLabelFrame = LabelFrame(self.frame, text="请自定义地图宽度和地雷数:")
selectLabelFrame.pack(expand=YES)
width_label = Label(selectLabelFrame, text='地图宽度')
bombs_label = Label(selectLabelFrame, text='地雷数')
self.width_input = Entry(selectLabelFrame)
self.bombs_input = Entry(selectLabelFrame)
determineButton = Button(selectLabelFrame, text="确认", command=self.determine)
width_label.pack()
self.width_input.pack()
bombs_label.pack()
self.bombs_input.pack()
determineButton.pack()
def determine(self):
self.MAP_WIDTH = int(self.width_input.get())
self.BOMBS_NUM = int(self.bombs_input.get())
self.frame.destroy()
minesweep.Minsweeping(self.MAP_WIDTH, self.BOMBS_NUM)
if __name__ == '__main__':
Layout()
| [
"913239973@qq.com"
] | 913239973@qq.com |
3119ea6af239d63712279b5d9972ab77083b0507 | d906b38849fcb8eb26dc584dfb03d9ed5a133abb | /pendulum.py | cdc8f0dcaa2b2591c1eb4e512b073da368453a54 | [] | no_license | basicmachines/sim-dynamics | dd5213f10b7a8bbc325e492b41714ceee45e0c1c | 3430651a5b684ecca4a0ceb282213070f379c2fd | refs/heads/master | 2020-03-08T14:11:40.689788 | 2018-04-10T05:53:21 | 2018-04-10T05:53:21 | 128,178,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,355 | py | #!/usr/bin/env python
"""Dynamic simulation of a pendulum.
"""
from pygame.locals import K_z, K_x, K_c, K_b, K_n, K_m
from simulator import Simulator
# Import the model you want to simulate
from models import Pendulum
# Import the controller(s) you want to simulate
from controllers import PIDController, KeyboardInput
TARGET_FPS = 30
# Initialize model
model = Pendulum(position=(16, 12))
# ----------- Setup Keyboard Controller ---------------
# Map the keys to the model inputs
key_actions = {
K_m: 'TP3', # Positive torque values (counter-clockwise)
K_n: 'TP2',
K_b: 'TP1',
K_z: 'TN3', # Negative torque values (clockwise)
K_x: 'TN2',
K_c: 'TN1'
}
kbd_controller = KeyboardInput(model.inputs, key_actions=key_actions)
key_instructions = [
'z, x, c - apply anti-clockwise torque',
'b, n, m - apply clockwise torque'
]
# ----------- Setup PID Controller ---------------
pid_controller = PIDController(
cv=model.outputs['a'],
mv=model.inputs,
kp=75.0,
ki=8.0,
kd=300.0,
set_point=0.0,
mv_max=7,
mv_min=-7,
bool_outputs=model.torque_settings,
time_step=1.0 / TARGET_FPS
)
# ------------- Run Simulation -----------------
simulator = Simulator(
model=model,
controllers=[kbd_controller, pid_controller],
key_instructions=key_instructions
)
simulator.run()
| [
"bill.tubbs@me.com"
] | bill.tubbs@me.com |
073d927bf2d53a5643dc50ffb5497163554adf63 | 28bf56eb5f37414d034e2744a12fe7be1c34b6b7 | /Zadania Programowanie/4. Miejsca zerowe trójmianu.py | 52305a8b71b55ed4335e33545f552c17c967e275 | [] | no_license | lucjaesterastaniek/Zaliczenie---programowanie- | 9c25f770e43e4a9b290c0bc0ca5005c3476a3be4 | a5feffb18b6b64686ffa74fc5a5cd93e020c31c6 | refs/heads/master | 2020-12-22T14:27:18.336510 | 2020-01-28T20:17:19 | 2020-01-28T20:17:19 | 236,822,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 561 | py | print ("Ten program znajduje miejsca zerowe trójmianu kwadratowego.")
print ("Jeśli twój trójmian zapisany jest w postaci: a * x^2 + bx + c = 0")
print ("Podaj a:")
a=int(input())
print ("Podaj b:")
b=int(input())
print ("Podaj c:")
c=int(input())
delta = (b**2) - 4*a*c
if (delta > 0):
x1 = (-b + delta**0.5) / (2 * a)
x2 = (-b - delta**0.5) / (2 * a)
print ("X1: {0} X2: {1}". format(x1,x2))
elif (delta==0):
x = (-b) / (2 * a)
print ("X1: {0}".format(x))
else:
print (" Brak miejsc zerowych. ")
input() | [
"noreply@github.com"
] | noreply@github.com |
b5aa9f19a9d125eb3707e06d131800bfce86c1d8 | 50be5bf642b14bc04a1bccb087468eb708016bbf | /kili/mutations/label/__init__.py | d2a4cfd8920037072dd4a1edb70797896035b733 | [
"Apache-2.0"
] | permissive | marcenacp/kili-playground | 706ac55d3e83da60fb15c6fabafe18b1bb53c002 | d3a711d1908725d127cc99ceb504301482bf41d8 | refs/heads/master | 2022-04-20T09:33:58.662465 | 2020-04-10T15:54:29 | 2020-04-10T15:54:29 | 254,937,647 | 0 | 0 | Apache-2.0 | 2020-04-11T19:13:51 | 2020-04-11T19:13:50 | null | UTF-8 | Python | false | false | 2,837 | py | from json import dumps
from typing import List
from ...helpers import format_result
from .queries import (GQL_APPEND_TO_LABELS, GQL_CREATE_HONEYPOT,
GQL_CREATE_PREDICTIONS, GQL_UPDATE_LABEL,
GQL_UPDATE_PROPERTIES_IN_LABEL)
def create_predictions(client, project_id: str, external_id_array: List[str], model_name_array: List[str], json_response_array: List[dict]):
assert len(external_id_array) == len(
json_response_array), "IDs list and predictions list should have the same length"
assert len(external_id_array) == len(
model_name_array), "IDs list and model names list should have the same length"
variables = {
'projectID': project_id,
'externalIDArray': external_id_array,
'modelNameArray': model_name_array,
'jsonResponseArray': [dumps(elem) for elem in json_response_array]
}
result = client.execute(GQL_CREATE_PREDICTIONS, variables)
return format_result('data', result)
def append_to_labels(client, author_id: str, json_response: dict, label_asset_id: str, label_type: str, seconds_to_label: int, skipped: bool = False):
variables = {
'authorID': author_id,
'jsonResponse': dumps(json_response),
'labelAssetID': label_asset_id,
'labelType': label_type,
'secondsToLabel': seconds_to_label,
'skipped': skipped
}
result = client.execute(GQL_APPEND_TO_LABELS, variables)
return format_result('data', result)
def update_label(client, label_id: str, label_asset_id: str, review_asset_id: str, author_id: str, label_type: str, json_response: dict, seconds_to_label: int):
variables = {
'labelID': label_id,
'labelAssetID': label_asset_id,
'reviewAssetID': review_asset_id,
'authorID': author_id,
'labelType': label_type,
'jsonResponse': dumps(json_response),
'secondsToLabel': seconds_to_label
}
result = client.execute(GQL_UPDATE_LABEL, variables)
return format_result('data', result)
def update_properties_in_label(client, label_id: str, seconds_to_label: int = None, model_name: str = None, json_response: dict = None):
formatted_json_response = None if json_response is None else dumps(
json_response)
variables = {
'labelID': label_id,
'secondsToLabel': seconds_to_label,
'modelName': model_name,
'jsonResponse': formatted_json_response
}
result = client.execute(GQL_UPDATE_PROPERTIES_IN_LABEL, variables)
return format_result('data', result)
def create_honeypot(client, asset_id: str, json_response: dict):
variables = {
'assetID': asset_id,
'jsonResponse': dumps(json_response)
}
result = client.execute(GQL_CREATE_HONEYPOT, variables)
return format_result('data', result)
| [
"edouard@kili-technology.com"
] | edouard@kili-technology.com |
073109a1808364b51f6c6f0eca2a28dead822370 | 1045a1b49c91ba6b57c2f82fbb6b70ee74e8fa0f | /ApiAutoTest/utils/__init__.py | e12566fb0e8390d3e5390aae969517548a3c0118 | [] | no_license | NewDragonZhong/AutomationTest | acd3fd861971d23f60703126f2c42f7f6663ccc1 | 7636ed8c6e0260c7c1b8937599c597ab9a152802 | refs/heads/master | 2020-07-28T02:27:31.861430 | 2019-10-16T07:30:38 | 2019-10-16T07:30:38 | 209,280,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | # -*- coding: utf-8 -*-
#@Time : 2018/5/3 14:44
#@Author : chenjinlin
#@Email : chenjinlin@bbdservice.com
#@File : __init__.py
#@Software: PyCharm | [
"change@qq.com"
] | change@qq.com |
2c8dac7180c8c32bed7b659ef471e00ce7431b22 | 7daadc7fe1c7276fd4b2cf253d04f18b51baab13 | /src/emotion.py | ad7b3fcbc14482b028c35b36d09a96973b109f21 | [] | no_license | Pbatch/FacebookEmotions | bb21a286115909f67eb48034db83e254d3ba2af2 | f54edb929de8f1efc9d356385ef244ccfe48ece8 | refs/heads/main | 2023-02-25T13:14:45.608964 | 2021-02-04T18:13:45 | 2021-02-04T18:13:45 | 336,028,787 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | from transformers import BertTokenizer
from model import BertForMultiLabelClassification
from multilabel_pipeline import MultiLabelPipeline
from tqdm import tqdm
class Emotion:
def __init__(self, threshold=0.8, batch_size=5):
self.threshold = threshold
self.batch_size = batch_size
self.model_path = 'monologg/bert-base-cased-goemotions-ekman'
self.tokenizer = BertTokenizer.from_pretrained(self.model_path)
self.model = BertForMultiLabelClassification.from_pretrained(self.model_path)
self.pipeline = MultiLabelPipeline(
model=self.model,
tokenizer=self.tokenizer,
)
def predict(self, texts):
labels = []
pbar = tqdm(total=len(texts))
for i in range(0, len(texts), self.batch_size):
probs = self.pipeline(texts[i: i + self.batch_size])
for p in probs:
top_idx = [i
for i in range(len(p))
if p[i] > self.threshold]
if len(top_idx) != 1:
label = 'N/A'
else:
label = self.model.config.id2label[top_idx[0]]
labels.append(label)
pbar.update(self.batch_size)
return labels
| [
"peter_batchelor1@hotmail.com"
] | peter_batchelor1@hotmail.com |
552ac5116d0dbc29272076004d4a9b916cb2a96e | a9fc496e0724866093dbb9cba70a8fdce12b67a9 | /scripts/field/q59000_tuto.py | 5925ba6e70b43edd7737755c40baf93619dad4ae | [
"MIT"
] | permissive | ryantpayton/Swordie | b2cd6b605f7f08f725f5e35d23ba3c22ef2ae7c0 | ca6f42dd43f63b1d2e6bb5cdc8fc051c277f326e | refs/heads/master | 2022-12-01T09:46:47.138072 | 2020-03-24T10:32:20 | 2020-03-24T10:32:20 | 253,997,319 | 2 | 0 | MIT | 2022-11-24T08:17:54 | 2020-04-08T05:50:22 | Java | UTF-8 | Python | false | false | 1,222 | py | # Arboren : Stump Town
if not sm.hasQuest(59000): # The Town Prankster
if not sm.hasQuestCompleted(59000): # The Town Prankster
sm.removeEscapeButton()
if sm.sendAskYesNo("Would you like to skip the tutorial cutscenes?"):
#todo add after skipping tutorial
sm.dispose()
else:
sm.setPlayerAsSpeaker()
sm.sendNext("Dun, dun, dun. Hero theme song! I'm #h #, I'm from a town hidden deeep within Arboren Forest!")
sm.sendNext("I've got the coolest ears and tail, dun dun dun. They're super heroic, dun dun dun.")
sm.sendNext("And I'm gonna be a hero somedaaaaay. A hero somedaaaaay! Drumroll!")
sm.sendNext("For reals. Granny Rosanna tells me bedtime stories every night...")
sm.sendNext("Stories about the #bfive brave heroes#k, who sealed away the terrifying #bBlack Mage#k! \r\n Pew, pew, kaboom! I'm gonna be a hero just like 'em someday soon!")
sm.setSpeakerID(9390305)
#todo effects
sm.sendNext("Who'd dig a hole here!?")
sm.setPlayerAsSpeaker()
sm.sendNext("Uh oh, what's this? I smell... the need for a Hero!")
sm.dispose() | [
"thijsenellen@outlook.com"
] | thijsenellen@outlook.com |
74642e8877743f8591bc0e8ec061ab3c92d67f5a | 6a803f0be359651a68107ccc2452be58e178d54b | /test/test_tojson.py | ba97f228529b58dca993c52a07690caadab47f87 | [
"MIT"
] | permissive | pombredanne/javaproperties-cli | 14c8a067ec8a4af6bb8ac25e64117fafb7e0238e | 192f96a9ffa504ed3c0fd9636f7a321b65f8cad4 | refs/heads/master | 2020-12-25T18:42:39.656917 | 2017-05-17T13:10:48 | 2017-05-17T13:10:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,347 | py | from click.testing import CliRunner
from javaproperties_cli.tojson import properties2json
def test_properties2json_empty():
r = CliRunner().invoke(properties2json, input=b'')
assert r.exit_code == 0
assert r.output_bytes == b'{}\n'
def test_properties2json_comment_only():
r = CliRunner().invoke(properties2json, input=b'#This is a comment.\n')
assert r.exit_code == 0
assert r.output_bytes == b'{}\n'
def test_properties2json_simple():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
key = value
foo: bar
zebra apple
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"foo": "bar",
"key": "value",
"zebra": "apple"
}
'''
def test_properties2json_scalarlike():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
key = 42
foo: 3.14
zebra null
true=false
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"foo": "3.14",
"key": "42",
"true": "false",
"zebra": "null"
}
'''
def test_properties2json_empty_value():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
empty=
missing
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"empty": "",
"missing": ""
}
'''
def test_properties2json_escaped_nonascii_input():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
edh: \\u00F0
snowman: \\u2603
goat: \\uD83D\\uDC10
\\u00F0: edh
\\uD83D\\uDC10: goat
\\u2603: snowman
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00f0",
"goat": "\\ud83d\\udc10",
"snowman": "\\u2603",
"\\u00f0": "edh",
"\\u2603": "snowman",
"\\ud83d\\udc10": "goat"
}
'''
def test_properties2json_utf8_input_no_encoding():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
edh: \xC3\xB0
snowman: \xE2\x98\x83
goat: \xF0\x9F\x90\x90
\xC3\xB0: edh
\xF0\x9F\x90\x90: goat
\xE2\x98\x83: snowman
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00c3\\u00b0",
"goat": "\\u00f0\\u009f\\u0090\\u0090",
"snowman": "\\u00e2\\u0098\\u0083",
"\\u00c3\\u00b0": "edh",
"\\u00e2\\u0098\\u0083": "snowman",
"\\u00f0\\u009f\\u0090\\u0090": "goat"
}
'''
def test_properties2json_utf8_input():
r = CliRunner().invoke(properties2json, ['--encoding', 'utf-8'], input=b'''
#Mon Nov 07 15:29:40 EST 2016
edh: \xC3\xB0
snowman: \xE2\x98\x83
goat: \xF0\x9F\x90\x90
\xC3\xB0: edh
\xF0\x9F\x90\x90: goat
\xE2\x98\x83: snowman
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00f0",
"goat": "\\ud83d\\udc10",
"snowman": "\\u2603",
"\\u00f0": "edh",
"\\u2603": "snowman",
"\\ud83d\\udc10": "goat"
}
'''
def test_properties2json_utf16_input():
r = CliRunner().invoke(properties2json, ['--encoding', 'utf-16BE'], input=u'''
#Mon Nov 07 15:29:40 EST 2016
edh: \u00F0
snowman: \u2603
goat: \U0001F410
\u00F0: edh
\U0001F410: goat
\u2603: snowman
'''.encode('UTF-16BE'))
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00f0",
"goat": "\\ud83d\\udc10",
"snowman": "\\u2603",
"\\u00f0": "edh",
"\\u2603": "snowman",
"\\ud83d\\udc10": "goat"
}
'''
# repeated keys?
# invalid \u escape
| [
"git@varonathe.org"
] | git@varonathe.org |
75528b21f1eac2877ca966946d1370e81593004b | 05c6b9f1f769ff359b757a913e0d43aeb1dfb9c6 | /hcf.py | f5cd7c9c45512a220634fefb6a6049cfbdbaad6c | [] | no_license | samarthchadda/pypi | bc92c0f7086ead65cb7242f7ea827470817a3b55 | 0c3308be995c8952c8db6b56aae46e58722d4d82 | refs/heads/master | 2020-04-01T16:48:13.337978 | 2018-11-04T10:17:59 | 2018-11-04T10:17:59 | 153,399,057 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | def computeHCF(x,y):
if x>y:
smaller=y
else:
smaller=x
for i in range(1,smaller+1):
if((x%i==0) and (y%i==0)):
hcf=i
return hcf
num1=int(input("Enter first number:"))
num2=int(input("Enter second nunber:"))
print("H.C.F of",num1,"and",num2,"is",computeHCF(num1,num2))
| [
"samarthchadda@gmail.com"
] | samarthchadda@gmail.com |
fe7ec225c5974261c1e5b62ab61c82217aea3869 | 50435cc917161e9feaf3bf4c4034dec317827b15 | /Comb.py | 44fbe5a67f25537a177bb7f56009cde982c30973 | [] | no_license | MirzaRaiyan/Program-For-Comb-Sort.py | a679f6b2a62c1a6b8704049205dff2c001e4160a | f3ca97bd54f7c35230ed7104c92c32061ee69686 | refs/heads/main | 2023-02-16T19:39:53.765818 | 2021-01-17T09:10:48 | 2021-01-17T09:10:48 | 330,352,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,164 | py | # Python program for implementation of CombSort
# To find next gap from current
def getNextGap(gap):
# Shrink gap by Shrink factor
gap = (gap * 10)/13
if gap < 1:
return 1
return gap
# Function to sort arr[] using Comb Sort
def combSort(arr):
n = len(arr)
# Initialize gap
gap = n
# Initialize swapped as true to make sure that
# loop runs
swapped = True
# Keep running while gap is more than 1 and last
# iteration caused a swap
while gap !=1 or swapped == 1:
# Find next gap
gap = getNextGap(gap)
# Initialize swapped as false so that we can
# check if swap happened or not
swapped = False
# Compare all elements with current gap
for i in range(0, n-gap):
if arr[i] > arr[i + gap]:
arr[i], arr[i + gap]=arr[i + gap], arr[i]
swapped = True
# Driver code to test above
arr = [ 8, 4, 1, 3, -44, 23, -6, 28, 0]
combSort(arr)
print ("Sorted array:")
for i in range(len(arr)):
print (arr[i]), | [
"noreply@github.com"
] | noreply@github.com |
d742b93351c54cfb4adc14548d05f3ad528ce73b | f13e28c12c3bc56b44b952d80b67cc5579cfb50a | /source/validation.py | de230416f125ad9a138d9e8308da5e01f4f9761e | [
"MIT"
] | permissive | Julia-chan/ml_engineering_example | e805298a729f1a17235b50ad4831c1c89acba3c8 | 29d05b7046e74b2848816e4c71de942d9497139a | refs/heads/main | 2023-04-26T01:10:07.594564 | 2021-05-27T12:21:46 | 2021-05-27T12:21:46 | 370,355,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | def month_hardcode_split(pandas_df, month_column='month_n'):
max_month = pandas_df[month_column].max()
pandas_df_copy = pandas_df.copy()
train = pandas_df_copy[pandas_df_copy[month_column] < max_month]
test = pandas_df_copy[pandas_df_copy[month_column] == max_month]
return train, test | [
"yaantok1@mts.ru"
] | yaantok1@mts.ru |
23fc787c792a031893f18ff36949854abf5f4aa8 | ed5758e192cc608f5e00ef0028b94c8d7b7a34f2 | /GIBSDownloader/tile.py | 3305ff71944125bf720b7a93cf6b3d02355e63e3 | [] | no_license | bigdatasciencegroup/GIBS-Downloader | 8757557038548a2efa3f3c73ffbb767a642a90e6 | 707aac63a53800200ab70a92a4ec66df13b962fc | refs/heads/main | 2023-08-03T09:45:35.105815 | 2021-09-21T14:46:54 | 2021-09-21T14:46:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | # Stores tiling information
class Tile():
def __init__(self, width, height, overlap, handling):
self.width = width
self.height = height
self.overlap = overlap
self.handling = handling | [
"flisboa@fnal.gov"
] | flisboa@fnal.gov |
ec75d5d8759e948bbae77fefe5d6d84140b13b9d | 066c0043afd9f8f2a0178f1864dc5169e1930d05 | /house/migrations/0027_house_favorite.py | b5b71cb8af083c9b1a7057a9a7a832b7747f8478 | [] | no_license | okcenmmustafa/DjangoProject | 1941cfda4fd8c7a8e8ac60b7ac6b6a4876da25b7 | b6a210417021a03b8884ece337b7c74919579db0 | refs/heads/master | 2022-09-02T20:47:31.281567 | 2020-05-26T13:44:45 | 2020-05-26T13:44:45 | 251,619,107 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | # Generated by Django 3.0.4 on 2020-05-25 00:02
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('house', '0026_auto_20200525_0009'),
]
operations = [
migrations.AddField(
model_name='house',
name='favorite',
field=models.ManyToManyField(blank=True, related_name='favorite', to=settings.AUTH_USER_MODEL),
),
]
| [
"mustafaokcen@gmail.com"
] | mustafaokcen@gmail.com |
ec7fa1f86c2a000110ed3e35ad2f81201ff443b7 | cb062c48280311134fe22573a41f9c4d6631b795 | /src/xm/core/txs/multisig/MultiSigVote.py | a05b6202d10dcb74e58f0c5ec2605ebb1c0396e9 | [
"MIT"
] | permissive | xm-blockchain/xm-core | da1e6bb4ceb8ab642e5d507796e2cc630ed23e0f | 2282b435a02f061424d656155756d8f50238bcfd | refs/heads/main | 2023-01-15T19:08:31.399219 | 2020-11-19T03:54:19 | 2020-11-19T03:54:19 | 314,127,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,528 | py | from pyxmlib.pyxmlib import bin2hstr
from xm.core.State import State
from xm.core.StateContainer import StateContainer
from xm.core.misc import logger
from xm.core.txs.Transaction import Transaction
from xm.crypto.misc import sha256
class MultiSigVote(Transaction):
"""
MultiSigSpend for the transaction of xm from a multi sig wallet to another wallet.
"""
def __init__(self, protobuf_transaction=None):
super(MultiSigVote, self).__init__(protobuf_transaction)
@property
def shared_key(self):
return self._data.multi_sig_vote.shared_key
@property
def unvote(self):
return self._data.multi_sig_vote.unvote
@property
def prev_tx_hash(self):
return self._data.multi_sig_vote.prev_tx_hash
def set_prev_tx_hash(self, prev_tx_hash: bytes):
self._data.multi_sig_vote.prev_tx_hash = prev_tx_hash
def get_data_hash(self):
tmp_tx_hash = (self.master_addr +
self.fee.to_bytes(8, byteorder='big', signed=False) +
self.shared_key +
self.unvote.to_bytes(1, byteorder='big', signed=False))
return sha256(tmp_tx_hash)
@staticmethod
def create(shared_key: bytes,
unvote: bool,
fee: int,
xmss_pk,
master_addr: bytes = None):
multi_sig_vote = MultiSigVote()
if master_addr:
multi_sig_vote._data.master_addr = master_addr
multi_sig_vote._data.public_key = bytes(xmss_pk)
multi_sig_vote._data.multi_sig_vote.shared_key = shared_key
multi_sig_vote._data.multi_sig_vote.unvote = unvote
multi_sig_vote._data.fee = int(fee)
multi_sig_vote.validate_or_raise(verify_signature=False)
return multi_sig_vote
def _validate_custom(self):
if self.fee < 0:
logger.warning('MultiSigVote [%s] Invalid Fee = %d', bin2hstr(self.txhash), self.fee)
return False
return True
def _validate_extended(self, state_container: StateContainer):
if state_container.block_number < state_container.current_dev_config.hard_fork_heights[0]:
logger.warning("[MultiSigVote] Hard Fork Feature not yet activated")
return False
addr_from_state = state_container.addresses_state[self.addr_from]
vote_stats = state_container.votes_stats[self.shared_key]
if vote_stats is None:
logger.warning("[MultiSigVote] Invalid Shared key %s", bin2hstr(self.shared_key))
return False
multi_sig_spend_tx = state_container.multi_sig_spend_txs[self.shared_key]
block_number = state_container.block_number
if vote_stats.executed:
logger.warning("[MultiSigVote] Invalid Tx as MultiSigSpend has already been executed")
return False
if multi_sig_spend_tx is None:
logger.warning("MultiSigSpend not found, Shared Key %s", bin2hstr(self.shared_key))
return False
if block_number > multi_sig_spend_tx.expiry_block_number:
logger.warning("[MultiSigVote] Voted for expired Multi Sig Spend Txn")
logger.warning("Expiry Block Number: %s, Current Block Number: %s",
multi_sig_spend_tx.expiry_block_number,
block_number)
return False
if self.addr_from not in vote_stats.signatories:
logger.warning("Address not found in signatory list")
logger.warning("Address %s, Shared Key %s, Multi Sig Address %s",
bin2hstr(self.addr_from),
bin2hstr(self.shared_key),
bin2hstr(vote_stats.multi_sig_address))
return False
index = vote_stats.get_address_index(self.addr_from)
if vote_stats.unvotes[index] == self.unvote:
logger.warning("[MultiSigVote] Invalid as Vote type already executed")
logger.warning("Vote type %s", self.unvote)
return False
tx_balance = addr_from_state.balance
if tx_balance < self.fee:
logger.warning('[MultiSigVote] State validation failed for %s because: Insufficient funds',
bin2hstr(self.txhash))
logger.warning('balance: %s, fee: %s', tx_balance, self.fee)
return False
return True
def set_affected_address(self, addresses_set: set):
super().set_affected_address(addresses_set)
def apply(self,
state: State,
state_container: StateContainer) -> bool:
address_state = state_container.addresses_state[self.addr_from]
address_state.update_balance(state_container, self.fee, subtract=True)
state_container.paginated_tx_hash.insert(address_state, self.txhash)
vote_stats = state_container.votes_stats[self.shared_key]
multi_sig_address = vote_stats.multi_sig_address
weight, found = state_container.addresses_state[multi_sig_address].get_weight_by_signatory(self.addr_from)
if not found:
logger.info("[MultiSigVote] Address is not the signatory for the multi sig address")
return False
self.set_prev_tx_hash(vote_stats.get_vote_tx_hash_by_signatory_address(self.addr_from))
if not vote_stats.apply_vote_stats(self, weight, state_container):
logger.info("[MultiSigVote] Failed to apply vote_stats")
return False
return self._apply_state_changes_for_PK(state_container)
def revert(self,
state: State,
state_container: StateContainer) -> bool:
vote_stats = state_container.votes_stats[self.shared_key]
multi_sig_address = vote_stats.multi_sig_address
weight, found = state_container.addresses_state[multi_sig_address].get_weight_by_signatory(self.addr_from)
if not found:
logger.info("[MultiSigVote] Address is not the signatory for the multi sig address")
return False
if not vote_stats.revert_vote_stats(self, weight, state_container):
logger.info("[MultiSigVote] Failed to revert vote_stats")
return False
address_state = state_container.addresses_state[self.addr_from]
address_state.update_balance(state_container, self.fee)
state_container.paginated_tx_hash.remove(address_state, self.txhash)
return self._revert_state_changes_for_PK(state_container)
| [
"74695206+xm-blockchain@users.noreply.github.com"
] | 74695206+xm-blockchain@users.noreply.github.com |
57719b6c5d96224d88bb804591da97a639e2342e | e0a243c3302bad31a31d18aac7e77290fc94f1ff | /MySpider/MySpider/items.py | f9bf3b8869ad9645d1aa279e63eb37a949d5a944 | [] | no_license | SibasisDash/Webscrape-Build-API | f36d57dcb1f9e5357c1b1564336e3f5dc547db74 | 9fb03c0b2f2330e9ede6a5587b2756fedaea28cf | refs/heads/master | 2022-12-14T23:27:08.363468 | 2020-09-23T13:16:17 | 2020-09-23T13:16:17 | 297,968,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
class RedditItem(scrapy.Item):
'''
Defining the storage containers for the data we
plan to scrape
'''
date = scrapy.Field()
date_str = scrapy.Field()
sub = scrapy.Field()
title = scrapy.Field()
url = scrapy.Field()
score = scrapy.Field()
commentsUrl = scrapy.Field()
| [
"sibasis@LAPTOP-7GC8JEUG.localdomain"
] | sibasis@LAPTOP-7GC8JEUG.localdomain |
a1aadb6d641395324d851879b73a6b06411f9e31 | 49c65ec1525873452d3dd2c9a08021124e89ab67 | /usb_video.py | b22e170c709875a0e562bb7760d8b96a693f8cd3 | [] | no_license | heyiheng1024/SimpleFaceRec | 2f170f712045e53126a84e96396c20b19916df13 | c4c0a01cbde3b520d07f726f0173fa6d42ea986f | refs/heads/master | 2020-09-17T10:07:26.009782 | 2019-11-26T01:57:40 | 2019-11-26T01:57:40 | 224,072,179 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | #coding utf-8
import cv2
import sys
#视频流来源(摄像头or本地视频)
cap = cv2.VideoCapture("迪丽热巴.jpg")
# cap.set(cv2.CAP_PROP_FRAME_WIDTH, 720)
# cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
#人脸分类器
classfire = cv2.CascadeClassifier("Classifier/haarcascade_frontalface_alt2.xml")
#颜色BGR
blue = (255,0,0)
green = (0,255,0)
red = (0,0,255)
while cap.isOpened():
ok , frame =cap.read()#当前一帧数据
if not ok:
break
#当前帧灰度处理,减少数据量
gery = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
#检测人脸
faceRects= classfire.detectMultiScale(gery,scaleFactor=1.2,minNeighbors=4,minSize=(20,20))
if len(faceRects)>0:
for face in faceRects :
x,y,w,h = face
print(x,y,w,h)
cv2.rectangle(frame,(x,y),(x+w,y+h),green,thickness=2)
face_img = '1.jpg'
cv2.imwrite(face_img, frame)
cv2.imshow('frame',frame)
if cv2.waitKey(10) & 0xFF == ord('q'):
break
#关闭退出程序
cap.release()
cv2.destroyAllWindows()
| [
"noreply@github.com"
] | noreply@github.com |
2c1f894ce32d930a564aa1411b74807e59aa783e | 4803130273c8eaf06485fb8db309c15732037104 | /3rdday/mysite/mysite/settings.py | 82f5a169df7fec921807c5066b7bd362dd84680e | [] | no_license | imimran/django-practice | b385650495c11880cf202d300caefe5172e7682c | 7307855ca0906e05f4710323175f6fb8a13d074c | refs/heads/master | 2021-09-12T17:59:01.846483 | 2018-04-19T14:30:47 | 2018-04-19T14:30:47 | 113,576,619 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,116 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'mg35klja4p%qiu_fhzu!+m#v2#92+!k6nfza20vqxgh^ahmr@i'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"engr.aih@gmail.com"
] | engr.aih@gmail.com |
003b68e7579fb96c1792d86a0205dfe552beed1d | 2db79faca4bd9b1753b6d7091abb4821c0c59c77 | /Code Snippets/Windows7PrefetchHash.py | 2a4812fbfc33cbeede47e12cf13b284c8a0bd901 | [] | no_license | bufukejo/anti-for-troj-def | 24467b90c89d0f58ca74598760522f0eefb2029f | 0df22100d1c09cda07a7ed69395c3bd47ea2bb68 | refs/heads/master | 2021-01-01T05:23:12.617020 | 2016-04-12T02:15:20 | 2016-04-12T02:15:20 | 56,022,540 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,408 | py | def hashw7(filepath):
#Convert filepath to unicode + uppercase
path=""
for character in filepath:
path += character.upper() + '\x00'
#Windows' algorithm begins here
hash=314159
numof8 = int(len(path)/8)
#For the majority of the path, the string is processed in 8-byte chunks
for i in range(0, numof8):
char0 = ord(path[i*8+0])
char1 = ord(path[i*8+1])
char2 = ord(path[i*8+2])
char3 = ord(path[i*8+3])
char4 = ord(path[i*8+4])
char5 = ord(path[i*8+5])
char6 = ord(path[i*8+6])
char7 = ord(path[i*8+7])
hash = (442596621* char0 + 37*(char6 + 37*(char5 + 37*(char4 + 37*(char3 + 37*(char2 + 37*char1))))) - 803794207*hash + char7) % 4294967296
#Print the hash at the current part of the string
print path[i*8:i*8+8], hash
#The final <8 bytes are processed with this similar algorithm
for k in range(0, len(path) % 8):
hash = (37*hash+ord(path[numof8*8+k])) % 4294967296
#The hash is returned as a hex string
return hex(hash).split('x')[1]
print hashw7('\\DEVICE\\HARDDISKVOLUME2\\WINDOWS\\EXPLORER.EXE')
| [
"brett@jerry"
] | brett@jerry |
0a3c21e99a75a04a3fc14ba53e02be8c8efebdcc | 8e81186012ecbee07ad901ae9dbb79a7e3e4a72d | /Snake.py | 5cfe623c7291c3247f68f72394014d30700d457d | [] | no_license | spnarkdnark/snek | 9dbb67a47ff21c32341421debba00195fe0e8c59 | 048436e7cd578b9f80782d0902a306ece6012a60 | refs/heads/master | 2020-04-15T04:29:21.427487 | 2019-01-09T02:46:30 | 2019-01-09T02:46:30 | 164,385,526 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,892 | py | import operator
UP = (0,1)
class Snake:
"""
A Snake Object
"""
def __init__(self,body,direction):
"""
Initialize the snake with a body and direction
:param body: an array of tuples containing snakes position during game
:param direction: UP,DOWN,LEFT, or RIGHT
"""
self.body = body
self.direction = direction
self.length = len(body)
self.control = {'UP': (0, 1), 'DOWN': (0, -1), 'LEFT': (-1, 0), 'RIGHT': (1, 0)}
self.alive = True
def get_position(self):
"""
add the head and direction to return the snakes next position
:return: a tuple which will be appended to the snake's body
"""
head = self.head()
return tuple(map(operator.add, head, self.direction))
def take_step(self):
"""
add the new position to snake body and pop the last segment off
:return: nothing, mutates self.body
"""
position = self.get_position()
self.body.append(position)
self.body.pop(0)
def set_direction(self,direction):
"""
set the snake's direction value
:param direction: a direction tuple
:return: nothing, mutates snake direction value
"""
self.direction = direction
def head(self):
"""
get the current co-ord value of the snake's head
:return: last element on the body array
"""
return self.body[-1]
def check_self_collide(self):
"""
check if the snake has collided with itself
:return: set the snake's self.alive property to False, securing his untimely demise
"""
if len(self.body) != len(set(self.body)):
self.alive = False
#testuple = tuple(map(lambda x: x%10,snek.position)) # This will give you the coordinate to use for wraparound
| [
"samuelfudala@gmail.com"
] | samuelfudala@gmail.com |
8ba38a6f5ef8558650a4ba5c6605d6a09cbf9ae5 | 860aa5bf3e9bd4de9d9b1d6826663e95c4363d0e | /Python/DynamicProgramming/LongestPalindromicSubSeq.py | 6d1f4b846479d46c4c6dcc0fe7b1c35c24a4a9de | [] | no_license | anirudh-deshpande/Old-Programming | e0e72e691ac094bdcc5180d3777010121f03b9ae | c5a1d9513264ebfe699af304b054ce9e477d04d9 | refs/heads/master | 2020-12-11T05:23:59.157076 | 2016-04-08T01:04:57 | 2016-04-08T01:04:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,142 | py | __author__ = 'Anirudh'
# Recursive
def LCSRec(s1,s2):
l1 = len(s1)
l2 = len(s2)
ret = ""
if l1 is 0 or l2 is 0:
return ret
if s1[l1-1] is s2[l2-1]:
ret=s1[l1-1]+LCSRec(s1[0:l1-1],s2[0:l2-1])
else:
l1rem = LCSRec(s1[0:l1-1],s2)
l2rem = LCSRec(s1,s2[0:l2-1])
if len(l1rem) > len(l2rem):
ret = l1rem
else:
ret = l2rem
return ret
#Dynamic Programming
def computeLCS(s1,s2):
matrix = [[0 for i in xrange(len(s2))]for i in xrange(len(s1))]
pointer = [[0 for i in xrange(len(s2))]for i in xrange(len(s1))]
const_pick_s1_s2 = 0
const_pick_s1 = 1
const_pick_s2 = 2
for i in range(0,len(s1)):
for j in range(0,len(s2)):
if s1[i] is s2[j]:
if i is 0 or j is 0:
matrix[i][j] = 1
else:
matrix[i][j] = matrix[i-1][j-1]
pointer[i][j] = const_pick_s1_s2
else:
if i is 0 and j>0:
matrix[i][j] = matrix[i][j-1]
pointer[i][j] = const_pick_s1
elif j is 0 and i>0:
matrix[i][j] = matrix[i-1][j]
pointer[i][j] = const_pick_s2
elif i>0 and j>0:
if matrix[i][j-1] >= matrix[i-1][j]:
matrix[i][j] = matrix[i][j-1]
pointer[i][j] = const_pick_s2
else:
matrix[i][j] = matrix[i-1][j]
pointer[i][j] = const_pick_s1
return pointer
def LCSDynamicProgramming(s1,s2):
pointer = computeLCS(s1,s2)
i=len(s1)-1
j=len(s2)-1
ret=""
while i>=0 and j>=0:
if pointer[i][j] is 0:
ret=ret+s1[i]
i=i-1
j=j-1
elif pointer[i][j] is 1:
j=j-1
else:
i=i-1
return ret
print LCSRec("abcdefg","aceg")[::-1]
print LCSDynamicProgramming("abcdefg","aceg")[::-1] | [
"deshpana@usc.edu"
] | deshpana@usc.edu |
5f7c0cdac07becdf70d55f1915794e2a91b1e177 | 8c51aff248eb6f463d62e934213660437c3a107b | /django_project/users/views.py | fe5316a90de09acd0372252d67941f52e802b19c | [] | no_license | wonjun0901/WJ_Develop_Individually | 5f839932c189adf2b2b34f7dadbdeaa8744f8d0e | e0402f5dbdda8ae8292cace124d381e29f707183 | refs/heads/master | 2021-01-02T00:13:38.851832 | 2020-02-18T01:10:15 | 2020-02-18T01:10:15 | 239,406,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 613 | py | from django.shortcuts import render, redirect
#from django.contrib.auth.forms import UserCreationForm
from django.contrib import messages
from .forms import UserRegisterForm
def register(request):
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request, f'Account created for {username}!')
return redirect('blog-home')
else:
form = UserRegisterForm()
return render(request, 'users/register.html', {'form': form})
| [
"wonjun0901@gmail.com"
] | wonjun0901@gmail.com |
2310513ea34ee18675a9e0138f458cbe5884acc4 | e169fd5ea9e79bb3be21e7148900eae700778a50 | /gis_utils/out_parsers/geojson_to_topojson.py | 6258ba5f0c3b1614e7b5b2490765fb29c0cd6e34 | [
"MIT"
] | permissive | Daniel-M/gis_utils | e1b8c8f1b6fd0949e57ae41fa17aa0b0c77eabf9 | a3497d5e235b57fb2db3f3e780e7efa6e4b4cbad | refs/heads/master | 2021-09-07T12:08:58.295120 | 2018-02-20T22:00:56 | 2018-02-20T22:00:56 | 122,255,346 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | import os
import sys
import subprocess as sp
from pathlib import PurePath
PARSER_PATH = sys.modules[__name__].__file__
def geojson_to_topojson(infile,outfile):
"""
geojson_to_topojson converts a geojson file into a topojson file.
This function uses [https://github.com/topojson/topojson-server](https://github.com/topojson/topojson-server)
:param infile: string with the path to the input geojson
:param outfile: string with the path to the destination topojson
"""
cmd = os.path.join(PurePath(PARSER_PATH).parents[2], "node_modules/topojson-server/bin/geo2topo")
st,r = sp.getstatusoutput(cmd + " --version")
if st == 0:
process = sp.Popen([cmd, infile, "--out", outfile])
else:
print("Couldn't find geo2topo at {}".format(cmd))
| [
"danielmejia55@gmail.com"
] | danielmejia55@gmail.com |
24789063096590a802a1056294cdd304c87b6226 | b07525d4470aa7f2f38e972e3c9b49b4b4f2d031 | /exercism/python/word-count/word_count_test.py | f22a7a3807d35183ee6214a4b0c781259582b954 | [] | no_license | Supakron123/testPython | 82721df5ce61e85967d1b61f96993bdea8fefcbf | 18303eb605a44fe68c02ed935f04f61b351eb28d | refs/heads/master | 2021-05-13T20:14:25.608935 | 2018-01-15T11:07:50 | 2018-01-15T11:07:50 | 116,908,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,967 | py | import unittest
from word_count import word_count
class WordCountTests(unittest.TestCase):
def test_count_one_word(self):
self.assertEqual(
word_count('word'),
{'word': 1}
)
def test_count_one_of_each(self):
self.assertEqual(
word_count('one of each'),
{'one': 1, 'of': 1, 'each': 1}
)
def test_count_ultiple_occurences(self):
self.assertEqual(
word_count('one fish two fish red fish blue fish'),
{'one': 1, 'fish': 4, 'two': 1, 'red': 1, 'blue': 1}
)
def test_cramped_list(self):
self.assertEqual(
word_count('one,two,three'),
{'one': 1, 'two': 1, 'three': 1}
)
def test_expanded_list(self):
self.assertEqual(
word_count('one,\ntwo,\nthree'),
{'one': 1, 'two': 1, 'three': 1}
)
def test_ignores_punctuation(self):
self.assertEqual(
word_count('car : carpet as java : javascript!!&@$%^&'),
{'car': 1, 'carpet': 1, 'as': 1, 'java': 1, 'javascript': 1}
)
def test_include_numbers(self):
self.assertEqual(
word_count('testing 1 2 testing'),
{'testing': 2, '1': 1, '2': 1}
)
def test_mixed_case(self):
self.assertEqual(
word_count('go Go GO Stop stop'),
{'go': 3, 'stop': 2}
)
def test_apostrophes(self):
self.assertEqual(
word_count("First: don't laugh. Then: don't cry."),
{'first': 1, "don't": 2, 'laugh': 1, 'then': 1, 'cry': 1}
)
def test_quotations(self):
self.assertEqual(
word_count("Joe can't tell between 'large' and large."),
{'joe': 1, "can't": 1, 'tell': 1, 'between': 1, 'large': 2,
'and': 1}
)
# Additional tests for this track
def test_multiple_spaces(self):
self.assertEqual(
word_count('wait for it'),
{'wait': 1, 'for': 1, 'it': 1}
)
def test_newlines(self):
self.assertEqual(
word_count('rah rah ah ah ah\nroma roma ma\n'
'ga ga oh la la\nwant your bad romance'),
{'rah': 2, 'ah': 3, 'roma': 2, 'ma': 1, 'ga': 2, 'oh': 1, 'la': 2,
'want': 1, 'your': 1, 'bad': 1, 'romance': 1}
)
def test_tabs(self):
self.assertEqual(
word_count('rah rah ah ah ah\troma roma ma\tga ga oh la la\t'
'want your bad romance'),
{'rah': 2, 'ah': 3, 'roma': 2, 'ma': 1, 'ga': 2, 'oh': 1, 'la': 2,
'want': 1, 'your': 1, 'bad': 1, 'romance': 1}
)
def test_non_alphanumeric(self):
self.assertEqual(
word_count('hey,my_spacebar_is_broken.'),
{'hey': 1, 'my': 1, 'spacebar': 1, 'is': 1, 'broken': 1}
)
if __name__ == '__main__':
unittest.main()
| [
"supakron.o@codium.co"
] | supakron.o@codium.co |
7f85583384768b3415318e38004daf64dd0a996d | e2def11fa25b0852d4535d45384a6f1c9c762a02 | /Projeto/projetoTCC/projetoTCC/settings.py | db0aaebc745c5ccd3b04c71620ce5d508d6d89f2 | [] | no_license | Olinad21/APIDjango | 4de4ac76c2646632bab313ce4a53c2e8082b7777 | 0e1157855380d233e848e1b1ee7e5487c06c997c | refs/heads/master | 2020-04-01T06:36:45.587500 | 2019-02-06T19:58:53 | 2019-02-06T19:58:53 | 152,955,346 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,283 | py | """
Django settings for projetoTCC project.
Generated by 'django-admin startproject' using Django 2.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7$=a9+h48qzx@$v!9j5cuyvqtucvh8ajhrru7=*&ds2_!0#amv'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['api-django-tcc.herokuapp.com/','localhost','api-uv-tcc.herokuapp.com/']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'core.apps.CoreConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'projetoTCC.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'projetoTCC.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
| [
"danilooliveira79@gmail.com"
] | danilooliveira79@gmail.com |
ce84cfc7e6a9774842cef1a393e8ef625284ae06 | 7f189b1d917785da079276674276f68baa30df7f | /kmmall/pipelines.py | 1d30292b0e08e1835ac775ee2d640e6b34c9e8bb | [] | no_license | eninem123/kangmeimallspider | b79ed43781328d67e893652433e59ed094ec941a | b2bcca0efe0b634ca97f331242351e9cfd52c2f7 | refs/heads/master | 2022-12-24T04:31:31.169097 | 2018-09-20T09:58:41 | 2018-09-20T09:58:41 | 149,369,473 | 1 | 0 | null | 2022-12-08T00:45:48 | 2018-09-19T00:38:33 | Python | UTF-8 | Python | false | false | 2,637 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
# import json
# from scrapy.exporters import CsvItemExporter
import csv
# class KmmallPipeline(object):
# def process_item(self, item, spider):
# return item
class KmCsvPipeline(object):
def open_spider(self, spider):
# 创建文件对象
self.f = open("km2.csv", "w+")
# 这种写法还没搞清楚怎么用
# self.csv_exporter = CsvItemExporter(self.f)
# 开始进行csv数据读写
# self.csv_exporter.start_exporting()
# 创建csv文件读写对象,用来将item数据写入到指定的文件中
self.csv_writer = csv.writer(self.f, delimiter=',')
def process_item(self, item, spider):
# 将item数据通过csv文件读写对象,写入到csv文件中
# 将item变成字典对象
item = dict(item)
# 如果需要保存json就dumps一下
# item = json.dumps(item, ensure_ascii=False)
# self.csv_exporter.export_item(item.encode("utf8"))
print('*******************************************************item:', item)
print('*******************************************************item:', type(item))
# print('*******************************************************item:', item['goods_url'])
# 提取字典对象的数据类型是class:dict 数据格式类似 {item:{"key1":"val1","key2":"val2"...}}
one=item['one']
two=item['two']
two_url=item['two_url']
three=item['three']
three_url=item['three_url']
title=item['title']
title_two=item['title_two']
price=item['price']
goods_url=item['goods_url']
market_price = item['market_price']
spec=item['spec']
count_comment=item['count_comment']
goods_name=item['goods_name']
goods_no=item['goods_no']
goods_pz=item['goods_pz']
goods_logo=item['goods_logo']
goods_spec=item['goods_spec']
goods_jx=item['goods_jx']
goods_cj=item['goods_cj']
self.csv_writer.writerow([one,two,two_url,three,three_url,title,title_two,price,market_price,spec, goods_url,count_comment,goods_name,goods_no,goods_pz,goods_logo,goods_spec,goods_jx,goods_cj])
return item
def close_spider(self, spider):
# 结束csv文件读写
# self.csv_exporter.finish_exporting()
# 关闭文件,将内存缓冲区的数据写入到磁盘中
self.f.close()
| [
"xwp_fullstack@163.com"
] | xwp_fullstack@163.com |
32ff723e88cc0136c0d4eff75b81dc9ae0a3acb7 | 91100e58fb55b1a0f17f4e0bf4d1be2430f910ef | /numpy_exercises.py | 0b5d8df1d8fc2e6524349fe95337334d0a76875b | [] | no_license | mdalton87/numpy-pandas-visualization-exercises | 54c0b964477d7e1dde2c59710160b5c6dfd53923 | 429794f30bbd755d35c3bebc48adae9236acb98d | refs/heads/main | 2023-04-25T22:20:57.307144 | 2021-05-02T12:31:58 | 2021-05-02T12:31:58 | 331,115,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,371 | py | import numpy as np
a = np.array([4, 10, 12, 23, -2, -1, 0, 0, 0, -6, 3, -7])
# 1. How many negative numbers are there?
len(a[a < 0])
# 2. How many positive numbers are there?
len(a[a > 0])
# 3. How many even positive numbers are there?
a_pos = a[a > 0]
len(a_pos[a_pos % 2 == 0])
# 4. If you were to add 3 to each data point, how many positive numbers would there be?
a_plus_3 = a + 3
len(a_plus_3[a_plus_3 > 0])
# 5. If you squared each number, what would the new mean and standard deviation be?
a_squared = a ** 2
print("Array a's mean:",a.mean())
print("Array a's std dev:", round(a.std(), 2))
print("Array a_squared's mean", a_squared.mean())
print("Array a_squared's std dev:", round(a_squared.std(), 2))
# 6. A common statistical operation on a dataset is centering.
# This means to adjust the data such that the mean of the data is 0.
# This is done by subtracting the mean from each data point.
# Center the data set.
a_centered = a - a.mean()
a_centered
#7. Calculate the z-score for each data point.
# Z = (x - mu) / stddev
z_score_of_a = a_centered / a.std()
z_score_of_a
# 8. Copy the setup and exercise directions from More Numpy Practice
# into your numpy_exercises.py and add your solutions.
import numpy as np
# Life w/o numpy to life with numpy
## Setup 1
a = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
# Use python's built in functionality/operators to determine the following:
# Exercise 1 - Make a variable called sum_of_a to hold the sum of all the numbers in above list
sum_of_a = sum(a)
# Exercise 2 - Make a variable named min_of_a to hold the minimum of all the numbers in the above list
min_of_a = min(a)
# Exercise 3 - Make a variable named max_of_a to hold the max number of all the numbers in the above list
max_of_a = max(a)
# Exercise 4 - Make a variable named mean_of_a to hold the average of all the numbers in the above list
mean_of_a = (sum(a) / len(a))
# Exercise 5 - Make a variable named product_of_a to hold the product of multiplying all the numbers in the above list together
def multiplying_list(lst):
result = 1
for x in lst:
result *= x
return result
product_of_a = multiplying_list(a)
# Exercise 6 - Make a variable named squares_of_a. It should hold each number in a squared like [1, 4, 9, 16, 25...]
squares_of_a = [x ** 2 for x in a]
# Exercise 7 - Make a variable named odds_in_a. It should hold only the odd numbers
odds_in_a = [x for x in a if x % 2 != 0]
# Exercise 8 - Make a variable named evens_in_a. It should hold only the evens.
evens_in_a = [x for x in a if x % 2 == 0]
## What about life in two dimensions? A list of lists is matrix, a table, a spreadsheet, a chessboard...
## Setup 2: Consider what it would take to find the sum, min, max, average, sum, product, and list of squares for this list of two lists.
b = [
[3, 4, 5],
[6, 7, 8]
]
b = np.array([
[3, 4, 5],
[6, 7, 8]
])
# Exercise 1 - refactor the following to use numpy. Use sum_of_b as the variable. **Hint, you'll first need to make sure that the "b" variable is a numpy array**
sum_of_b = 0
for row in b:
sum_of_b += sum(row)
b.sum()
# Exercise 2 - refactor the following to use numpy.
min_of_b = min(b[0]) if min(b[0]) <= min(b[1]) else min(b[1])
b.min()
# Exercise 3 - refactor the following maximum calculation to find the answer with numpy.
max_of_b = max(b[0]) if max(b[0]) >= max(b[1]) else max(b[1])
b.max()
# Exercise 4 - refactor the following using numpy to find the mean of b
mean_of_b = (sum(b[0]) + sum(b[1])) / (len(b[0]) + len(b[1]))
b.mean()
# Exercise 5 - refactor the following to use numpy for calculating the product of all numbers multiplied together.
product_of_b = 1
for row in b:
for number in row:
product_of_b *= number
b.prod()
# Exercise 6 - refactor the following to use numpy to find the list of squares
squares_of_b = []
for row in b:
for number in row:
squares_of_b.append(number**2)
b ** 2
# Exercise 7 - refactor using numpy to determine the odds_in_b
odds_in_b = []
for row in b:
for number in row:
if(number % 2 != 0):
odds_in_b.append(number)
b[b % 2 != 0]
# Exercise 8 - refactor the following to use numpy to filter only the even numbers
evens_in_b = []
for row in b:
for number in row:
if(number % 2 == 0):
evens_in_b.append(number)
b[b % 2 == 0]
# Exercise 9 - print out the shape of the array b.
b.shape
# Exercise 10 - transpose the array b.
b.transpose()
# Exercise 11 - reshape the array b to be a single list of 6 numbers. (1 x 6)
b.reshape(1,6)
# Exercise 12 - reshape the array b to be a list of 6 lists, each containing only 1 number (6 x 1)
b.reshape(6,1)
## Setup 3
c = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
]
# HINT, you'll first need to make sure that the "c" variable is a numpy array prior to using numpy array methods.
c = np.array([
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
])
## Exercise 1 - Find the min, max, sum, and product of c.
c.min(), c.max(), c.sum(), c.prod()
# (1, 9, 45, 362880)
## Exercise 2 - Determine the standard deviation of c.
c.std()
# 2.581988897471611
## Exercise 3 - Determine the variance of c.
c.var()
# 6.666666666666667
## Exercise 4 - Print out the shape of the array c
c.shape
# (3,3)
## Exercise 5 - Transpose c and print out transposed result.
c.transpose()
# array([[1, 4, 7],
# [2, 5, 8],
# [3, 6, 9]])
## Exercise 6 - Get the dot product of the array c with c.
c.dot(c)
# array([[ 7560, 9288, 11016],
# [17118, 21033, 24948],
# [26676, 32778, 38880]])
## Exercise 7 - Write the code necessary to sum up the result of c times c transposed. Answer should be 261
(c * c.transpose()).sum()
# 261
## Exercise 8 - Write the code necessary to determine the product of c times c transposed. Answer should be 131681894400.
(c * c.transpose()).prod()
# 131681894400
## Setup 4
d = [
[90, 30, 45, 0, 120, 180],
[45, -90, -30, 270, 90, 0],
[60, 45, -45, 90, -45, 180]
]
d = np.array(d)
# Exercise 1 - Find the sine of all the numbers in d
np.sin(d)
# array([[ 0.89399666, -0.98803162, 0.85090352, 0. , 0.58061118, -0.80115264],
# [ 0.85090352, -0.89399666, 0.98803162, -0.17604595, 0.89399666, 0. ],
# [-0.30481062, 0.85090352, -0.85090352, 0.89399666, -0.85090352, -0.80115264]])
# in Radians
np.sin(d * (np.pi / 180))
# array([[ 1. , 0.5 , 0.7071, 0. , 0.866 , 0. ],
# [ 0.7071, -1. , -0.5 , -1. , 1. , 0. ],
# [ 0.866 , 0.7071, -0.7071, 1. , -0.7071, 0. ]])
# Exercise 2 - Find the cosine of all the numbers in d
np.cos(d)
# array([[-0.44807362, 0.15425145, 0.52532199, 1. , 0.81418097, -0.59846007],
# [ 0.52532199, -0.44807362, 0.15425145, 0.98438195, -0.44807362, 1. ],
# [-0.95241298, 0.52532199, 0.52532199, -0.44807362, 0.52532199, -0.59846007]])
# in Radians
np.cos(d * (np.pi / 180))
# array([[ 0. , 0.866 , 0.7071, 1. , -0.5 , -1. ],
# [ 0.7071, 0. , 0.866 , -0. , 0. , 1. ],
# [ 0.5 , 0.7071, 0.7071, 0. , 0.7071, -1. ]])
# Exercise 3 - Find the tangent of all the numbers in d
np.tan(d)
# array([[-1.99520041, -6.4053312 , 1.61977519, 0. , 0.71312301, 1.33869021],
# [ 1.61977519, 1.99520041, 6.4053312 , -0.17883906, -1.99520041, 0. ],
# [ 0.32004039, 1.61977519, -1.61977519, -1.99520041, -1.61977519, 1.33869021]])
# Exercise 4 - Find all the negative numbers in d
d[d < 0]
# array([-90, -30, -45, -45])
# Exercise 5 - Find all the positive numbers in d
d[d > 0]
# array([ 90, 30, 45, 120, 180, 45, 270, 90, 60, 45, 90, 180])
# Exercise 6 - Return an array of only the unique numbers in d.
np.unique(d)
# array([-90, -45, -30, 0, 30, 45, 60, 90, 120, 180, 270])
# Exercise 7 - Determine how many unique numbers there are in d.
len(np.unique(d))
# 11
# Exercise 8 - Print out the shape of d.
d.shape
# (3, 6)
# Exercise 9 - Transpose and then print out the shape of d.
d.transpose().shape
# (6, 3)
# Exercise 10 - Reshape d into an array of 9 x 2
d.reshape(9, 2)
# array([[ 90, 30],
# [ 45, 0],
# [120, 180],
# [ 45, -90],
# [-30, 270],
# [ 90, 0],
# [ 60, 45],
# [-45, 90],
# [-45, 180]]) | [
"matthew.w.dalton@gmail.com"
] | matthew.w.dalton@gmail.com |
e82585fce52c800d045ff51b94242a83f0126930 | 653a3d9d66f3d359083cb588fc7c9ece8bb48417 | /test/runtime/frontend_test/onnx_test/defs_test/math_test/max_test.py | 8aaf5db7ac2b8cfd0529aa58330acb3221cbd3dc | [
"Zlib",
"MIT"
] | permissive | leonskim/webdnn | fec510254b15f3dec00f5bed8f498737b372e470 | f97c798c9a659fe953f9dc8c8537b8917e4be7a2 | refs/heads/master | 2020-04-15T18:42:43.632244 | 2019-01-10T10:07:18 | 2019-01-10T10:07:18 | 164,921,764 | 0 | 0 | NOASSERTION | 2019-01-09T19:07:35 | 2019-01-09T19:07:30 | Python | UTF-8 | Python | false | false | 1,306 | py | import numpy as np
from test.runtime.frontend_test.onnx_test.util import make_node, make_tensor_value_info, make_model
from test.util import wrap_template, generate_kernel_test_case
from webdnn.frontend.onnx import ONNXConverter
@wrap_template
def template(n_x, x_shape, description: str = ""):
vxs = [np.random.rand(*x_shape) for _ in range(n_x)]
vys = list(vxs)
while len(vys) > 1:
vx1, vx2 = vys.pop(0), vys.pop(0)
vy = np.maximum(vx1, vx2)
vys.append(vy)
vy = vys[0]
xs = [make_tensor_value_info(f"x{i}", vx.shape) for i, vx in enumerate(vxs)]
y = make_tensor_value_info("y", vy.shape)
operator = make_node("Max", [x.name for x in xs], ["y"])
model = make_model([operator], xs, [y])
graph = ONNXConverter().convert(model)
assert tuple(vy.shape) == tuple(graph.outputs[0].shape), f"vy: {vy.shape}, graph.outputs[0]: {graph.outputs[0].shape}"
generate_kernel_test_case(
description=f"[ONNX] Max {description}",
graph=graph,
inputs={graph.inputs[i]: vx for i, vx in enumerate(vxs)},
expected={graph.outputs[0]: vy},
)
def test_2():
template(n_x=2, x_shape=[2, 3, 4, 5])
def test_3():
template(n_x=3, x_shape=[2, 3, 4, 5])
def test_4():
template(n_x=4, x_shape=[2, 3, 4, 5])
| [
"y.kikura@gmail.com"
] | y.kikura@gmail.com |
6c3a45b7e30af37035ddeb17f4a158de42f658c1 | af3b8f03ac53cafada0394a46306bdca5d4c8fce | /timaknormblog/asgi.py | 8ba0d46a0fdca52732f7375adc7b7dee409c9a2b | [] | no_license | timaknormtak/timaknormblog-project | 95869bc9d6186621e806a512c874beb5bec8c2f9 | 34c346cafa64196a2e80842d8569250b5b93cf97 | refs/heads/main | 2023-01-19T10:06:17.370618 | 2020-11-23T06:37:56 | 2020-11-23T06:37:56 | 312,988,272 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | """
ASGI config for timaknormblog project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'timaknormblog.settings')
application = get_asgi_application()
| [
"timakov.1308@gmail.com"
] | timakov.1308@gmail.com |
bcf3423ae08422d25b002fef9f89225e22e04cd2 | f1cf923e6e819edf2444d3ae74e1f400415075ab | /dftbpp.py | 5ee69ec3b24091c6bc1b8b5edc70800d173cfd54 | [] | no_license | fbonafe/realtimedftb | f62b66883100f2161bb3aace70ef8edfb23d2c21 | 7e186bd03ac058751ec85799a94c1f04f5c62841 | refs/heads/master | 2020-03-25T00:33:31.114419 | 2018-08-01T18:31:11 | 2018-08-01T18:31:11 | 143,192,312 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,583 | py | import numpy as np
import os
class mycoords:
""" Object to accumulate coordinates, each in an (natoms x nsteps) array"""
def __init__(self, x, y, z, name, time=None, vels=None):
self.name = name
self.x = x
self.y = y
self.z = z
self.nframes = x.shape[1]
self.natoms = x.shape[0]
if time is not None:
self.time = time
if vels is not None:
self.vels = vels
def centercoords(self):
"""Center the coordinates"""
self.x = self.x - self.x.sum(axis=0)/self.x.shape[0]
self.y = self.y - self.y.sum(axis=0)/self.y.shape[0]
self.z = self.z - self.z.sum(axis=0)/self.z.shape[0]
def dist(atom1,atom2,step):
return np.sqrt((self.x[atom1,step] - self.x[atom2,step])**2 + \
(self.y[atom1,step] - self.y[atom2,step])**2 + (self.z[atom1,step] - self.z[atom2,step])**2)
def radius(self, atom, time):
return np.sqrt((self.x[atom, time])**2 + (self.y[atom, time])**2 + (self.z[atom, time])**2)
def esfradius(self):
"""Calculates the radius of the outer sphere (longest distance to the center)"""
r = np.sqrt((self.x - self.x[0,:])**2 + (self.y - self.y[0,:])**2 + (self.z - self.z[0,:])**2)
return abs(r).max(axis=0)
def getsurfatoms(self,r):
rads = {55:4.47, 147:6.9, 309:9}
surfats = []
for i in range(self.natoms):
if r[i,0] > rads[self.natoms]:
surfats.append(i)
return surfats
def rad_esferas(self,surfatoms=None):
""" Calcula el radio vs t de la esfera más chica capaz de contener a la nanopartícula (radext), \
y el radio vs t de la esfera más grande contenida por los átomos superficiales (radint)"""
r = np.sqrt((self.x - self.x[0,:])**2 + (self.y - self.y[0,:])**2 + (self.z - self.z[0,:])**2)
surfatoms = self.getsurfatoms(r)
rsurfall = np.array([r[i,:] for i in surfatoms])
radext = abs(r).max(axis=0)
radint = rsurfall.min(axis=0)
return radext, radint
def rad_promedio(self):
"""
Weighted average of radius of all surface atoms (<= than rad_esferas).
"""
r = np.sqrt((self.x - self.x[0,:])**2 + (self.y - self.y[0,:])**2 + (self.z - self.z[0,:])**2)
surfatoms = self.getsurfatoms(r)
rsurfall = np.array([r[i,:] for i in surfatoms])
return rsurfall.sum(axis=0)/len(surfatoms)
def printoneframe(self, frame, filename):
""" Prints one snapshot of coordinates. Hardcoded for silver for the moment. """
with open(filename, 'a') as out:
out.write(str(self.natoms)+'\n')
out.write('\n')
for at in range(self.natoms):
out.write('Ag {:15.8f} {:15.8f} {:15.8f}\n'.format(\
self.x[at,frame], self.y[at,frame], self.z[at,frame]))
def printcoords(self, printevery, savetomanyfiles=False):
""" Prints coordinates to file every many frames """
#newframes = int(self.nframes/printevery)
if not savetomanyfiles:
if os.path.isfile('tdcoords.pp.xyz'):
os.remove('tdcoords.pp.xyz')
for idx,frame in enumerate(range(0, self.nframes, printevery)):
#idx = frame*printevery
if savetomanyfiles:
os.mkdir('frame'+str(idx))
self.printoneframe(frame, 'frame'+str(idx)+'/coords.xyz')
else:
self.printoneframe(frame, 'tdcoords.pp.xyz')
class myenergies:
"""Object to accumulate different componentes of the energy"""
def __init__(self, tot, nonscc, scc, ext, rep, kin):
self.tot = tot
self.nonscc = nonscc
self.scc = scc
self.ext = ext
self.rep = rep
self.kin = kin
def getcoords_MD(file):
"""Function to read the coordinates from a BOMD run and generate a mycoords object containing them"""
md = open(file,'r')
md = md.readlines()
natoms = int(md[0].strip().split()[0])
nframes = int(len(md)/(natoms+2))
x = [0]*natoms
y = [0]*natoms
z = [0]*natoms
for i in range(natoms):
x[i] = []
y[i] = []
z[i] = []
for i in range(nframes):
line = (natoms+2)*i+2
for atom in range(0,natoms):
x[atom].append(float(md[line+atom].strip().split()[1]))
y[atom].append(float(md[line+atom].strip().split()[2]))
z[atom].append(float(md[line+atom].strip().split()[3]))
return mycoords(np.array(x), np.array(y), np.array(z))
def getcoords(file, bomd=False, veloc=False):
"""Function to read the coordinates and generate a mycoords object containing them"""
md = open(file,'r')
md = md.readlines()
natoms = int(md[0].strip().split()[0])
x = [0]*natoms
y = [0]*natoms
z = [0]*natoms
names = []
if not bomd:
time = []
for i in range(natoms):
x[i] = []
y[i] = []
z[i] = []
if veloc:
vx = [0]*natoms
vy = [0]*natoms
vz = [0]*natoms
for i in range(natoms):
vx[i] = []
vy[i] = []
vz[i] = []
for i in range(len(md)):
if 'MD' in md[i]:
if not bomd:
time.append(float(md[i].strip().split()[4]))
if i==1:
names, thiscoords = readCoords(md, natoms, names, MDline=i)
else:
thiscoords = readCoords(md, natoms, names, MDline=i)
for atom in range(natoms):
x[atom].append(thiscoords[0][atom])
y[atom].append(thiscoords[1][atom])
z[atom].append(thiscoords[2][atom])
if veloc:
for atom in range(1,natoms+1):
vx[atom-1].append(float(md[i+atom].strip().split()[4]))
vy[atom-1].append(float(md[i+atom].strip().split()[5]))
vz[atom-1].append(float(md[i+atom].strip().split()[6]))
if not bomd:
if veloc:
return mycoords(np.array(x), np.array(y), np.array(z), names, np.array(time), \
np.array([vx, vy, vz]))
else:
return mycoords(np.array(x), np.array(y), np.array(z), names, np.array(time))
else:
return mycoords(np.array(x), np.array(y), np.array(z), names)
def readCoords(thisfile, natoms, names, MDline=1):
"""Read one set of coordinates """
coords = [[],[],[]]
for atom in range(1,natoms+1):
if MDline == 1:
names.append(thisfile[MDline+atom].strip().split()[0])
coords[0].append(float(thisfile[MDline+atom].strip().split()[1]))
coords[1].append(float(thisfile[MDline+atom].strip().split()[2]))
coords[2].append(float(thisfile[MDline+atom].strip().split()[3]))
if MDline == 1:
return names, coords
else:
return coords
def getenergies(filename):
"""Function to read the energy componentes and generate a myenergies object containing them"""
data = np.genfromtxt(filename)
time = data[:,0]
etot = data[:,1] # etot
enonscc = data[:,2] #e non scc
escc = data[:,3] # e scc
eext = data[:,5] # e ext
erep = data[:,6] # e rep
ekin = data[:,7] # e kin
return time, myenergies(etot, enonscc, escc, eext, erep, ekin)
def cutArrays(array, maximo):
"""Function to cut the array length for bond arrays and histogram arrays"""
if array[:, maximo:].all() == 0. or not array[:, maximo:]: # Note aux is the last one calculated
newArray = array[:, :maximo]
return newArray
else:
print('Number of bonds in range has changed! Check range')
def importDat(filename, saveevery, emin, emax):
"""Function to import energy per bond files when written in ASCII format"""
with open(filename) as f:
natomssqr = len(f.readline().split()) - 2
nlines = sum(1 for line in f)
time = np.zeros((int(nlines/saveevery)+1))
bonds = np.zeros((int(nlines/saveevery)+1, natomssqr))
with open(filename, 'r') as f:
for i,line in enumerate(f):
if i % saveevery == 0:
index = int(i/saveevery)
a = line.split()
aux = [float(x) for x in a[2:] if (abs(float(x)) < emax and abs(float(x)) > emin)]
bonds[index,:len(aux)] = aux[:]
time[index] = float(a[0])
bonds = cutArrays(bonds, len(aux))
return time, bonds
def importBin(filename, saveevery, emin, emax, natoms, nlines):
"""Function to import energy per bond files when written in binary format (unformatted),
selecting only energies between emin and emax"""
f = open(filename,'rb')
field = np.fromfile(f,dtype='float64',count=(natoms**2+2)*(nlines+1))
field = np.reshape(field,((nlines+1),(natoms**2+2)))
f.close()
# nlines = field.shape[0]
time = np.zeros((nlines))
bonds = np.zeros((nlines, natoms**2))
for i in range(nlines):
index = i #int(i/saveevery)
aux = [float(x) for x in field[i,2:] if (abs(float(x)) < emax and abs(float(x)) > emin)]
bonds[index,:len(aux)] = aux[:]
time[index] = field[i,0]
bonds = cutArrays(bonds, len(aux))
return time, bonds
def importBinAll(filename, saveevery, natoms, nlines):
"""Function to import energy per bond files when written in binary format (unformatted)"""
f = open(filename,'rb')
field = np.fromfile(f,dtype='float64',count=(natoms**2+2)*(nlines+1))
field = np.reshape(field,((nlines+1),(natoms**2+2)))
f.close()
time = np.zeros((nlines))
bonds = np.zeros((nlines, natoms, natoms))
for i in range(nlines):
aux = np.array([float(x) for x in field[i,2:]])
bonds[i,:len(aux)] = aux.reshape(natoms,natoms)
time[i] = field[i,0]
return time, bonds
def createHistogram(bonds, binsize, binmin=None, binmax=None):
"""Function to create histograms from the bonds array"""
hists = np.zeros_like(bonds)
bmin = min(bonds[0,:]) # a tiempo 0
bmax = max(bonds[0,:]) # a tiempo 0
if binmin is not None:
bmin = binmin
if binmax is not None:
bmax = binmax
bins = np.arange(bmin, bmax+binsize, binsize)
for i in range(bonds.shape[0]):
hist, binsidx = np.histogram(bonds[i,:], bins)
hists[i,:len(hist)] = hist
hists = cutArrays(hists, len(hist))
return hists, binsidx, bins
| [
"fbonafe@unc.edu.ar"
] | fbonafe@unc.edu.ar |
f6e1d812b7b8da141c5f17cca2a99a1ec105bdae | 7b6b96be5e7dbc3cdda900f11614c161c2821200 | /lib_wp_remover_2014_10_24/get_items.py | ecdb77af4fb571e2ecb443396f686174a2d6fc33 | [] | no_license | polymorphm/wp-remover | 8c59715ebe63969ca467de93c2d8d549eb26c310 | e1efb8a458991c9774dc3c3c4b2c7a7d6450245c | refs/heads/master | 2021-01-01T16:13:46.928136 | 2014-10-24T16:03:23 | 2014-10-24T16:03:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,146 | py | # -*- mode: python; coding: utf-8 -*-
#
# Copyright (c) 2012, 2013, 2014 Andrej Antonov <polymorphm@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
assert str is not bytes
import os, os.path
import csv
import itertools
import random
import re
class NotFoundError(IOError):
pass
def file_items_open(path):
with open(path, 'r', encoding='utf-8', newline='\n', errors='replace') \
as fd:
for line in fd:
if not line:
continue
item = line.strip()
if not item:
continue
yield item
def dir_items_open(path):
for name in os.listdir(path):
file_path = os.path.join(path, name)
if not file_path.endswith('.txt'):
continue
with open(file_path, 'r', encoding='utf-8', newline='\n', errors='replace') \
as fd:
data = fd.read()
if not data:
continue
item = data.strip()
if not item:
continue
yield item
def csv_items_open(path):
with open(path, 'r', encoding='utf-8', newline='\n', errors='replace') \
as fd:
csv_reader = csv.reader(fd)
for csv_row in csv_reader:
# TODO: for Python-3.3+ -- need fix to PEP-0380
yield csv_row
def items_open(path, is_csv=None):
if is_csv is None:
is_csv = False
if is_csv:
if not os.path.isfile(path):
path = '{}.csv'.format(path)
return csv_items_open(path)
if os.path.isdir(path):
return dir_items_open(path)
if os.path.isfile(path):
return file_items_open(path)
d_path = '{}.d'.format(path)
txt_path = '{}.txt'.format(path)
if os.path.isdir(d_path):
return dir_items_open(d_path)
if os.path.isfile(txt_path):
return file_items_open(txt_path)
raise NotFoundError('No such file or directory: ' + repr(path))
def get_finite_items(path, is_csv=None):
return items_open(path, is_csv=is_csv)
def get_infinite_items(path, is_csv=None):
for item in itertools.cycle(items_open(path, is_csv=is_csv)):
# TODO: for Python-3.3+ -- need fix to PEP-0380
yield item
def get_random_finite_items(path, is_csv=None):
items = []
for item in items_open(path, is_csv=is_csv):
items.append(item)
random.shuffle(items)
for item in items:
# TODO: for Python-3.3+ -- need fix to PEP-0380
yield item
def get_random_infinite_items(path, is_csv=None):
items = []
for item in items_open(path, is_csv=is_csv):
items.append(item)
if not items:
return
while True:
random.shuffle(items)
for item in items:
# TODO: for Python-3.3+ -- need fix to PEP-0380
yield item
def clean_title(title):
m = re.match(
r'^\<h1\>(?P<h1>[^<>]*)\<\/h1\>$',
title,
re.S | re.U,
)
if m is None:
return title
h1 = m.group('h1')
if h1:
return h1
return title
def split_title_and_content(items_iter):
for item in items_iter:
spl_item = item.lstrip().split('\n', 1)
if len(spl_item) != 2:
continue
title, body = spl_item
title = clean_title(title.rstrip())
body = body.lstrip()
if not title or not body:
continue
yield title, body
def get_title_and_content(get_func, title_path, content_path):
content_iter = get_func(content_path)
if title_path == '__use_first_line__':
for title, content in split_title_and_content(content_iter):
# TODO: for Python-3.3+ -- need fix to PEP-0380
yield title, content
title_iter = get_func(title_path)
while True:
yield next(title_iter), next(content_iter)
| [
"polymorphm@gmail.com"
] | polymorphm@gmail.com |
ffc250bf877ff503e92d4c78eb2aec0583876523 | 741df1fd9f835f86e864fe82d964c90e6e6a7d9a | /accounts/migrations/0001_initial.py | 09a59efd72c6d66b999ae43d4c685981fd6a534e | [] | no_license | RishavRaj19/OnlineExam | 8dd6c09cd643bd915eaec8498567286896fc064e | 358f6f0a34928fa1abe075d65083d8bbbe4ddef8 | refs/heads/master | 2022-11-13T00:32:51.617367 | 2020-07-03T16:37:10 | 2020-07-03T16:37:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,946 | py | # Generated by Django 2.2.13 on 2020-07-02 21:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Student',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('username', models.CharField(max_length=100)),
('email', models.CharField(max_length=100)),
('classs', models.CharField(max_length=100)),
('section', models.CharField(max_length=100)),
('roll_no', models.CharField(max_length=100)),
('mobile_no', models.CharField(max_length=100)),
('school_code', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Teacher',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('username', models.CharField(max_length=100)),
('email', models.CharField(max_length=100)),
('mobile_no', models.CharField(max_length=100)),
('school_code', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Test',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject', models.CharField(max_length=100)),
('classs', models.CharField(max_length=100)),
('title', models.CharField(max_length=100)),
('desc', models.TextField()),
('author', models.CharField(max_length=100)),
('starts_at', models.DateTimeField()),
('ends_at', models.DateTimeField()),
('school_code', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='QuestionAnswer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('label', models.TextField()),
('op1', models.TextField()),
('op2', models.TextField()),
('op3', models.TextField()),
('op4', models.TextField()),
('ans', models.TextField()),
('test', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Test')),
],
),
]
| [
"rishavraj.sbg@gmail.com"
] | rishavraj.sbg@gmail.com |
870f951e60904860957c4cbf1986c8aaa11fa262 | 7dab00e63b7193010344a0f05e0cc641d7091f5f | /students/david_russo/lesson07/RDBMS/RDBMS/personjobdepartment_model.py | 8b75c2d98afa74545922c5fed997878bc21a5157 | [] | no_license | aurel1212/Sp2018-Online | 9307e872c14c5ddd795bdc738b325de087895d55 | 263685ca90110609bfd05d621516727f8cd0028f | refs/heads/master | 2020-04-05T18:35:49.761140 | 2018-06-19T18:24:27 | 2018-06-19T18:24:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,855 | py | """
Simple database example with Peewee ORM, sqlite and Python
Here we define the schema
Use logging for messages so they can be turned off
"""
import logging
from peewee import *
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.info('One off program to build the classes from the model in the database')
logger.info('Here we define our data (the schema)')
logger.info('First name and connect to a database (sqlite here)')
logger.info('The next 3 lines of code are the only database specific code')
database = SqliteDatabase('personjob.db')
database.connect()
database.execute_sql('PRAGMA foreign_keys = ON;') # needed for sqlite only
logger.info('This means we can easily switch to a different database')
logger.info('Enable the Peewee magic! This base class does it all')
logger.info('By inheritance only we keep our model (almost) technology neutral')
class BaseModel(Model):
class Meta:
database = database
class Person(BaseModel):
"""
This class defines Person, which maintains details of someone
for whom we want to research career to date.
"""
logger.info('Note how we defined the class')
logger.info('Specify the fields in our model, their lengths and if mandatory')
logger.info('Must be a unique identifier for each person')
person_name = CharField(primary_key = True, max_length = 30)
lives_in_town = CharField(max_length = 40)
nickname = CharField(max_length = 20, null = True)
class Job(BaseModel):
"""
This class defines Job, which maintains details of past Jobs
held by a Person.
"""
job_name = CharField(primary_key = True, max_length = 30)
start_date = DateField(formats = 'YYYY-MM-DD')
end_date = DateField(formats = 'YYYY-MM-DD')
salary = DecimalField(max_digits = 7, decimal_places = 2)
person_employed = ForeignKeyField(Person, related_name='was_filled_by', null = False)
class PersonNumKey(BaseModel):
"""
This class defines Person, which maintains details of someone
for whom we want to research career to date.
"""
logger.info('An alternate Person class')
logger.info("Note: no primary key so we're give one 'for free'")
person_name = CharField(max_length = 30)
lives_in_town = CharField(max_length = 40)
nickname = CharField(max_length = 20, null = True)
class DepartmentID(Field):
"""
This class defines a custom Department ID field. The first character
must be a letter, and the DepartmentID must be 4 characters long.
"""
logger.info('A custom field for the department class.')
logger.info('The field forces DepartmentID to be 4 characters long.')
logger.info('The field forces DepartmentID to start with an alpha')
def db_value(self, value):
"""
Ensure that value has 4 characters where the first character is numeric.
"""
if len(value) != 4 or not value[0].isalpha():
raise TypeError(
"DepartmentID must be 4 characters long and start with an alpha. "
)
return value
class Department(BaseModel):
"""
This class defines Department, which maintains details of the
departments in which a person has held a job.
"""
logger.info('Now we define the Department class')
logger.info('First we enter the custom DepartmentID')
department_number = DepartmentID()
logger.info('Now we populate departmet name, manager, job duration, and title')
department_name = CharField(max_length = 30)
department_manager_name = CharField(max_length = 30)
job_duration = IntegerField()
job_title = ForeignKeyField(Job, related_name = 'title_of_job', null = False)
database.create_tables([
Job,
Person,
Department
])
database.close()
| [
"david.luiz.russo@gmail.com"
] | david.luiz.russo@gmail.com |
e41d0c75f331e9e116ec8f60283bcfd4f5d53701 | def49b9d97c44b54d5efd4e88c759b26949221b3 | /schemas.py | 44c72ad8072d2d3bca43c73e326f15bf21713190 | [] | no_license | Keovkevin/fast_api | 28576244213231f8e67bc0304231cf61fba2aebd | 4b71a197844c9bcc7c3f1fa85c5abd820ff544c7 | refs/heads/main | 2023-06-01T10:55:46.166407 | 2021-07-03T16:11:41 | 2021-07-03T16:11:41 | 382,623,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | import pydantic as _pydantic
import datetime as _dt
class Store(_pydantic.BaseModel):
id:int
name:str
code:str
active:bool
address:str
gst_no:int
email:str
mobile_no:int
service_tax:int
commission_percentage:int
is_online_payment:bool
class config:
orm_mode=True
class StoreUpdate(_pydantic.BaseModel):
name:str
code:str
address:str
mobile_no:int
email:str
class config:
orm_mode=True
| [
"shubhamshekhar089@gmail.com"
] | shubhamshekhar089@gmail.com |
229180ad1e2533852864dffc7899175eec257119 | 44175eeef9109b9879ca1318b7e73af8f50dbbc5 | /Lab2_11.py | 2ea93603a2bd578efee6485b1983ebc77fd1a73e | [] | no_license | Alhzz/Python3 | fb0dd79d8839bdfef5569958bfdbe289d30cf0aa | c20db5e6f14d4b2875846f56be55b1174fdb0eab | refs/heads/master | 2020-03-26T05:23:48.399950 | 2018-11-16T17:02:11 | 2018-11-16T17:02:11 | 144,554,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | ''' FoodGrade I '''
def main():
''' Main Function '''
good = 0
good += check()
good += check()
good += check()
good += check()
print(good)
def check():
''' Input and Check '''
good = 0
num = int(input())
if 50 <= num <= 70:
good += 1
num = int(input())
if 50 <= num <= 70:
good += 1
num = int(input())
if 50 <= num <= 70:
good += 1
num = int(input())
if 50 <= num <= 70:
good += 1
num = int(input())
if 50 <= num <= 70:
good += 1
num = int(input())
if 50 <= num <= 70:
good += 1
return good
main()
| [
"Aloha_sonza@hotmail.com"
] | Aloha_sonza@hotmail.com |
18e350c9f21878bc1409a1ec2b3304e103c6c660 | 528c811306faa4a34bf51fca7955b7a24ac2e30c | /Python/Triangle.py | da2d8206529278895eea530d8c2d8f3d4bc40ef4 | [] | no_license | ganjingcatherine/LeetCode-1 | 1addbd7e4d9254a146601f9d5e28b8becb8235a6 | 488782d3f1e759da2d32b4e82dbf55b96c431244 | refs/heads/master | 2021-05-11T03:15:16.810035 | 2016-02-06T06:19:18 | 2016-02-06T06:19:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,529 | py | """
Given a triangle, find the minimum path sum from top to bottom. Each step you may move to adjacent numbers on the row below.
For example, given the following triangle
[
[2],
[3,4],
[6,5,7],
[4,1,8,3]
]
The minimum path sum from top to bottom is 11 (i.e., 2 + 3 + 5 + 1 = 11).
Note:
Bonus point if you are able to do this using only O(n) extra space, where n is the total number of rows in the triangle.
"""
class Solution:
# @param triangle, a list of lists of integers
# @return an integer
def minimumTotal(self, triangle):
d = [[0 for _ in range(len(triangle[j]))] for j in range(len(triangle))]
for i in range(len(triangle)):
for j in range(len(triangle[i])):
if i == 0 and j == 0:
d[0][0] = triangle[0][0]
elif j == 0:
d[i][0] = triangle[i][0] + d[i-1][0]
elif j == len(triangle[i]) - 1:
d[i][j] = triangle[i][j] + d[i-1][j-1]
else:
d[i][j] = min(d[i-1][j-1],d[i-1][j]) + triangle[i][j]
result = sorted(d[len(triangle)-1])
return result[0]
class Solution:
# @param triangle, a list of lists of integers
# @return an integer
def minimumTotal(self, triangle):
N = len(triangle)
d = triangle[len(triangle)-1]
for i in reversed(range(N-1)):
for j in range(i+1):
d[j] = min(d[j],d[j+1]) + triangle[i][j]
return d[0]
| [
"anthonyjin0619@gmail.com"
] | anthonyjin0619@gmail.com |
717bca05b8a8d2018645de701f06918e166c9a41 | 7d8f781b4d20cb61f9acbad0be3af677183f2a45 | /main.py | 4a68b7219b6e8bcc17e332bd5365bebf4bb300d2 | [] | no_license | viicrow/yes_no | a0985d7243180299b5d10bc58f4375ef607046d2 | 783d0e7ce596dbddaadaeb14536b87db6d7ad709 | refs/heads/master | 2023-04-27T08:59:09.380913 | 2021-05-20T22:23:32 | 2021-05-20T22:23:32 | 369,353,575 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,136 | py |
# functions go here...
def yes_no(question):
valid = False
while not valid:
response = input(question).lower()
if display_instructions == "yes" or display_instructions == "y":
response = "yes"
return response
elif display_instructions == "no" or display_instructions == "n":
response = "no"
return response
else:
print("Please answer yes / no")
def instructions():
print("**** how to Play ****")
print()
print("The rules of the game go here")
print()
return ""
# main routine goes here...
played_before = yes_no("have you played the "
"game before")
print("you chose {}".format(display_instructions)):
print()
having_fun = yes_no("are you having fun? ")
print("you said {} to having fun".format(having_fun))
display_instructions = ""
while display_instructions.lower() != "xxx":
# ask the user if they have played before
display_instructions = input("have you played this game" "before?").lower()
# If they say yes, output 'program continues'
# If they say no, output 'display instructions'
# If the answer is invalid, print an error. | [
""
] | |
e15fa7f87879efa8feb072d967fe6fed1b84646c | be4f4362cca2761706e0b23321f7c456f78e49fc | /Algorithms/Quick Union-Find - Application (Percolation).py | 59c8362d733345a4b746f4645dde3628f3dda027 | [] | no_license | Suhail727/GeneralProgramming | 8947ded48c970fa8975445523c42e1d0371d0c44 | 80fb482960f5bd1cdace78b552d3cd08a914bd72 | refs/heads/master | 2021-11-28T07:12:51.198277 | 2021-10-06T15:38:17 | 2021-10-06T15:38:17 | 167,709,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,908 | py | #-----------------------------------------------------------------------
# percolation.py
#-----------------------------------------------------------------------
import stdio
import stdarray
#-----------------------------------------------------------------------
# isOpen is a matrix that represents the open sites of a system.
# isFull is a partially completed matrix that represents the full sites
# of that system. Update isFull by marking every site of that system
# that is open and reachable from site (i, j).
def _flow(isOpen, isFull, i, j):
n = len(isFull)
if (i < 0) or (i >= n):
return
if (j < 0) or (j >= n):
return
if not isOpen[i][j]:
return
if isFull[i][j]:
return
isFull[i][j] = True
_flow(isOpen, isFull, i+1, j ) # Down.
_flow(isOpen, isFull, i , j+1) # Right.
_flow(isOpen, isFull, i , j-1) # Left.
_flow(isOpen, isFull, i-1, j ) # Up.
#-----------------------------------------------------------------------
# isOpen is a matrix that represents the open sites of a system.
# Compute and return a matrix that represents the full sites of
# that system.
def flow(isOpen):
n = len(isOpen)
isFull = stdarray.create2D(n, n, False)
for j in range(n):
_flow(isOpen, isFull, 0, j)
return isFull
#-----------------------------------------------------------------------
# isOpen is matrix that represents the open sites of a system. Return
# True if that system percolates, and False otherwise.
def percolates(isOpen):
# Compute the full sites of the system.
isFull = flow(isOpen)
# If any site in the bottom row is full, then the system
# percolates.
n = len(isFull)
for j in range(n):
if isFull[n-1][j]:
return True
return False
#-----------------------------------------------------------------------
# Read from standard input a boolean matrix that represents the
# open sites of a system. Write to standard output a boolean
# matrix representing the full sites of the system. Then write
# True if the system percolates and False otherwise.
def main():
isOpen = stdarray.readBool2D()
stdarray.write2D(flow(isOpen))
stdio.writeln(percolates(isOpen))
#isOpen = stdarray.readBool2D()
#stdarray.write2D(flow(isOpen))
#draw(isOpen, False)
#stddraw.setPenColor(stddraw.BLUE)
#draw(flow(isOpen), True)
#stdio.writeln(percolates(isOpen))
#stddraw.show()
if __name__ == '__main__':
main()
#-----------------------------------------------------------------------
# python percolation.py < test5.txt
# 5 5
# 0 1 1 0 1
# 0 0 1 1 1
# 0 0 0 1 1
# 0 0 0 0 1
# 0 1 1 1 1
# True
# python percolation.py < test8.txt
# 8 8
# 0 0 1 1 1 0 0 0
# 0 0 0 1 1 1 1 1
# 0 0 0 0 0 1 1 0
# 0 0 0 0 0 1 1 1
# 0 0 0 0 0 1 1 0
# 0 0 0 0 0 0 1 1
# 0 0 0 0 1 1 1 1
# 0 0 0 0 0 1 0 0
# True | [
"noreply@github.com"
] | noreply@github.com |
3396d6c761448a379ff7daa72b1bf3da2deb0c49 | 00d7e9321d418a2d9a607fb9376b862119f2bd4e | /utils/appendix_figure_renderer.py | 3b234fb896d68ad1a3003e3c74ea40a594132ff7 | [
"MIT"
] | permissive | baluneboy/pims | 92b9b1f64ed658867186e44b92526867696e1923 | 5a07e02588b1b7c8ebf7458b10e81b8ecf84ad13 | refs/heads/master | 2021-11-16T01:55:39.223910 | 2021-08-13T15:19:48 | 2021-08-13T15:19:48 | 33,029,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,405 | py | #!/usr/bin/env python
import os
from collections import OrderedDict
from pims.files.pdfs.pdfjam import PdfjamCommand
from pims.files.pdfs.pdftk import convert_odt2pdf, PdftkCommand
from pims.files.pdfs.pdfjam import CpdfScalePageCommand, CpdfStampCommand
from pims.files.utils import listdir_filename_pattern
from appy.pod.renderer import Renderer
# FIXME see about totally avoiding ODT template and using cpdf for header and footer placeholder text, etc.
# FIXME see about why some pages (all portrait, 3-panel subplots get scaled differently)
# create PDF output file rendered from ODT template that has conditional text (appy.pod) placeholders
def render_pdf_background_from_odt_template(input_odt_template_file, header_dict, page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir):
"""create PDF output file rendered from ODT template that has conditional text (appy.pod) placeholders"""
# add specifics for this page to dict
page_dict = header_dict
page_dict['PN'] = page_num
page_dict['TN'] = total_num
page_dict['AL'] = appendix_letter
page_dict['FN'] = fig_num
page_dict['Caption'] = caption
# now page_dict contains all expected names for appy/pod template substitution
# create output filename
pagestr = '_page%03d' % page_num
tmp_name = 'appendix' + appendix_letter.upper() + pagestr + '.odt'
tmp_odt_file = os.path.join(pdf_out_dir, tmp_name)
# render odt
odt_renderer = Renderer( input_odt_template_file, page_dict, tmp_odt_file )
odt_renderer.run()
# convert to pdf
convert_odt2pdf(tmp_odt_file)
# return PDF fullfilename
return tmp_odt_file.replace('.odt', '.pdf')
# return list of PDF files for this drop number (i.e. drop dir)
def get_analysis_template_plot_pdfs(drop_num):
"""return list of PDF files for this drop number (i.e. drop dir)"""
dirpath = '/misc/yoda/www/plots/user/urban/sams_zgf_2016/publication/drop%d' % drop_num
fname_pat = 'drop.*\.pdf'
tmp_list = listdir_filename_pattern(dirpath, fname_pat)
# filter tmp_list to ignore previous run's _cpdf_ filenames
return [ x for x in tmp_list if "_cpdf_" not in x ]
# return filename of scaled plot PDF file
def scale_plot_pdf_file(plot_pdf_file, xscale=0.8, yscale=0.8):
"""return filename of scaled plot PDF file"""
cmd = CpdfScalePageCommand(plot_pdf_file, xscale=xscale, yscale=yscale)
cmd.run()
return cmd.outfile
# return filename of newly created background (header, page number, title, etc.)
def create_background_onto_file(page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir, title, subtitle, mon_year):
"""return filename of newly created background (header, page number, title, etc.)"""
# trusted template ODT file
odt_template_file = '/home/pims/Documents/appendix_plots_and_figures_template.odt'
# these dict items apply to all pages (header lines) in appendices
header_dict = {'title': title, 'subtitle': subtitle, 'mon_year': mon_year} # applies to all pages
return render_pdf_background_from_odt_template(odt_template_file, header_dict, page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir)
def do_drop(drop_num):
appendix_letter = DROP_MAP[drop_num]
print 'Working on Drop %d for Appendix %s' % (drop_num, appendix_letter)
# get pdf files for this drop
drop_files = get_analysis_template_plot_pdfs(drop_num)
def three_appendices_at_once():
FIRST_ATP_APP_PAGE_NUM = 27 # 1st page num from Word document's 1st "Analysis Template..." appendix
INTERIM_PAGES_ADDED = 2 # one each for Drop 3's and Drop 4's first (non-fig) page
NUM_PAGES_AFTER_LAST_ATP_PAGE = 3 # how many pages in Word doc come after last "Analysis Template..." appendix page
DROP_MAP = {
#DROP_NUM APPENDIX
2: 'C',
3: 'D',
4: 'E',
}
# these dict items apply to all pages (header lines) in appendices
title = 'Analysis of SAMS Measurements on M-Vehicle in Zero Gravity Research Facility for Characterization Drops from January 20 to February 3, 2016'
subtitle = 'SAMS-DOC-013'
mon_year = 'September, 2016'
# get list of analysis template plot PDFs
pdf_files = []
for drop_num in [2, 3, 4]:
drop_files = get_analysis_template_plot_pdfs(drop_num)
pdf_files.extend(drop_files)
#print pdf_files[0:3]
#print pdf_files[-2:]
#raise SystemExit
# get list of captions; FIXME as this naively assumes one-to-one match with pdf_files gotten above
captions_file = '/misc/yoda/www/plots/user/urban/sams_zgf_2016/publication/captions_for_analysis_template_plots.txt'
with open(captions_file) as f:
caption_lines = f.readlines()
captions = [x.strip('\n') for x in caption_lines]
if len(pdf_files) != len(captions):
raise Exception('Abort: the number of PDFs found does not match the number of captions.')
total_num = FIRST_ATP_APP_PAGE_NUM + len(captions) + INTERIM_PAGES_ADDED + NUM_PAGES_AFTER_LAST_ATP_PAGE
top_offset = 99 # used by CpdfStampCommand to offset scaled plot from top of page during stamping
# for each plot PDF file, scale it and stamp on background PDF with header, page number, page total, etc.
count = 0
page_num = FIRST_ATP_APP_PAGE_NUM
old_drop = None
for tup in zip(pdf_files, captions):
pdf_file = tup[0]
caption = tup[1]
count += 1
drop_num = int(os.path.basename(pdf_file)[4])
appendix_letter = DROP_MAP[drop_num]
# FIXME what is better, pythonic way to get page_num reset for each new appendix
if old_drop and drop_num != old_drop:
page_num += 1 # advance to allow for first Appendix (non-fig) page
count = 1 # reset count within appendix
# scale plot PDF (portrait) file and return scaled filename
scaled_plot_pdf_file = scale_plot_pdf_file(pdf_file)
# specifics for this page
fig_num = count
page_num += 1
# FIXME use temp files and do clean-up of those
# FIXME see about quieting down the logging output crap
pdf_out_dir = '/tmp'
onto_file = create_background_onto_file(page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir, title, subtitle, mon_year)
cmd = CpdfStampCommand(scaled_plot_pdf_file, top_offset, onto_file)
cmd.run()
print 'p.%02d Fig.%s-%02d %s\n%s' % (page_num, appendix_letter, count, os.path.basename(pdf_file), cmd.outfile)
old_drop = drop_num
print 'NOW DO pdfjoin appendix*_cpdf_stamp-on_99.pdf IN /tmp DIR'
def one_appendix(drop_num, appendix_letter, fig1_page_num, total_doc_pages, title, subtitle, mon_year):
# get list of analysis template plot PDFs
pdf_files = get_analysis_template_plot_pdfs(drop_num)
print 'Found %d PDF files for Drop %d in Appendix %s' % (len(pdf_files), drop_num, appendix_letter)
# get list of captions; FIXME as this naively assumes one-to-one match with pdf_files gotten above
captions_file = '/misc/yoda/www/plots/user/urban/sams_zgf_2016/publication/captions_for_analysis_template_plots_appendix_%s.txt' % appendix_letter.lower()
with open(captions_file) as f:
caption_lines = f.readlines()
captions = [x.strip('\n') for x in caption_lines]
if len(pdf_files) != len(captions):
raise Exception('Abort: the number of PDFs found does not match the number of captions.')
top_offset = 99 # used by CpdfStampCommand to offset scaled plot from top of page during stamping
# for each plot PDF file, scale it and stamp on background PDF with header, page number, page total, etc.
fig_num = 1
page_num = fig1_page_num
for tup in zip(pdf_files, captions):
pdf_file = tup[0]
caption = tup[1]
# scale plot PDF (portrait) file and return scaled filename
scaled_plot_pdf_file = scale_plot_pdf_file(pdf_file)
# FIXME use temp files and do clean-up of those
# FIXME see about quieting down the logging output crap
pdf_out_dir = '/tmp'
onto_file = create_background_onto_file(page_num, total_doc_pages, appendix_letter, fig_num, caption, pdf_out_dir, title, subtitle, mon_year)
cmd = CpdfStampCommand(scaled_plot_pdf_file, top_offset, onto_file)
cmd.run()
print 'p.%02d Fig.%s-%02d %s\n%s' % (page_num, appendix_letter, fig_num, os.path.basename(pdf_file), cmd.outfile)
fig_num += 1
page_num += 1
print 'IN /tmp DIR, NOW DO FOLLOWING:'
print 'pdfjoin appendixC_*_cpdf_stamp-on_99.pdf -o /tmp/appendixC.pdf'
print 'pdfjoin appendixD_*_cpdf_stamp-on_99.pdf -o /tmp/appendixD.pdf'
print 'pdfjoin appendixE_*_cpdf_stamp-on_99.pdf -o /tmp/appendixE.pdf'
print 'THEN, THIS:'
print '/usr/bin/gs -o gs-repaired-appendixC.pdf -dPDFSETTINGS=/prepress -sDEVICE=pdfwrite appendixC.pdf'
print '/usr/bin/gs -o gs-repaired-appendixD.pdf -dPDFSETTINGS=/prepress -sDEVICE=pdfwrite appendixD.pdf'
print '/usr/bin/gs -o gs-repaired-appendixE.pdf -dPDFSETTINGS=/prepress -sDEVICE=pdfwrite appendixE.pdf'
if __name__ == "__main__":
#three_appendices_at_once()
# FIXME -- modify these (maybe just mon_year and total_doc_pages)
# these dict items apply to all pages (header lines) in all appendices
title = 'Analysis of SAMS Measurements on M-Vehicle in Zero Gravity Research Facility for Characterization Drops from January 20 to February 3, 2016'
subtitle = 'SAMS-DOC-013'
mon_year = 'February, 2017'
total_doc_pages = 119
# FIXME -- modify these (in Word, try Ctrl+G and go to page nums shown and verify those are first fig pages)
# Check your Word doc to see how these value should be set:
drop_info = {
# DROP APPENDIX FIRST_FIG_PAGE_NUM
2: ('C', 28),
3: ('D', 57),
4: ('E', 86),
}
for drop_num, tup in drop_info.iteritems():
appendix_letter, fig1_page_num = tup[0], tup[1]
one_appendix(drop_num, appendix_letter, fig1_page_num, total_doc_pages, title, subtitle, mon_year)
print drop_num, appendix_letter, fig1_page_num, "done" | [
"silversnoopy2002@gmail.com"
] | silversnoopy2002@gmail.com |
adb17ca10b7a353fcc42e56c097b7fa3a4337871 | 43f0952a8e5f99e4e63d8155c27d7a3d4819bb05 | /model/decoder2.py | 7085389fbe968928e65c008c9a98ea28d2bab531 | [] | no_license | DeepLearnXMU/ABD-NMT | dc39b7f7a92d543111162958214cda800ba46766 | 833d28d0c55faf5b644e744837ac17124141736f | refs/heads/master | 2021-01-23T14:21:54.345472 | 2017-09-12T08:20:27 | 2017-09-12T08:20:27 | 102,686,231 | 35 | 9 | null | null | null | null | UTF-8 | Python | false | false | 9,157 | py | import itertools
import theano
import theano.tensor as T
import nn
import ops
from bridge import attention, map_key
class Decoder:
def __init__(self, n_src, dim_y, dim_hid, dim_key, dim_value, n_y_vocab, *args, **kwargs):
"""
:param dim_y:
:param dim_hid: dimension of decoder's hidden state
:param dim_key: dimension of query keys
:param dim_value: dimension of context values
"""
self.n_src = n_src
self.dim_y = dim_y
self.dim_hid = dim_hid
self.dim_key = dim_key
self.dim_value = dim_value
def step(self, y_prev, mask, state, keys, values, key_mask):
"""
forward step
return (state, context) pair
"""
raise NotImplementedError
def prediction(self, y_emb, state, context, keep_prob=1.0):
raise NotImplementedError
def build_sampling(self, src_seq, src_mask, target_embedding, target_bias, keys, values, initial_state):
# sampling graph, this feature is optional
raise NotImplementedError
def build_attention(self, src_seq, src_mask, target_inputs, tgt_seq, tgt_mask, keys, values,
initial_state):
# attention graph, this feature is optional
raise NotImplementedError
def get_cost(self, y_seq, mask, probs):
assert probs.ndim == 2
idx = T.arange(y_seq.flatten().shape[0])
ce = -T.log(probs[idx, y_seq.flatten()])
ce = ce.reshape(y_seq.shape)
ce = T.sum(ce * mask, 0)
cost = T.mean(ce)
snt_cost = ce
return cost, snt_cost
def scan(self, y_emb, mask, keys, key_mask, values, initial_state):
"""
build model
:return:
"""
seq = [y_emb, mask]
outputs_info = [initial_state, None]
non_seq = keys + values + key_mask
(states, contexts) = ops.scan(self.step, seq, outputs_info, non_seq)
return states, contexts
def forward(self, y_seq, y_emb, mask, keys, key_mask, values, initial_state, keep_prob=1.0):
"""
return states,contexts,cost
"""
raise NotImplementedError
class DecoderGruCond(Decoder):
"""
prediction: s1, y0 -> y1
recurrence: s0, y0 -> s1
"""
def __init__(self, n_src,method, dim_y, dim_hid, dim_key, dim_value, dim_readout, n_y_vocab, *args, **kwargs):
"""
see `https://github.com/nyu-dl/dl4mt-tutorial/blob/master/docs/cgru.pdf`
1. s_j^{\prime} = GRU^1(y_{j-1}, s_{j-1})
2. c_j = att(H, s_j^{\prime})
3. s_j = GRU^2(c_j, s_j^{\prime})
"""
Decoder.__init__(self, n_src, dim_y, dim_hid, dim_key, dim_value, n_y_vocab)
self.method=method
self.dim_readout = dim_readout
self.n_y_vocab = n_y_vocab
# s_j^{\prime} = GRU^1(y_{j-1}, s_{j-1})
self.cell1 = nn.rnn_cell.gru_cell([dim_y, dim_hid])
# s_j = GRU^2(c_j, s_j^{\prime})
self.cell2 = nn.rnn_cell.gru_cell([dim_value, dim_hid])
def step(self, y_prev, mask, state, *args):
n_src = self.n_src
assert len(args) == self.n_src * 3
src_keys = args[:n_src]
src_values = args[n_src:2 * n_src]
src_masks = args[2 * n_src:]
mask = mask[:, None]
# s_j^{\prime} = GRU^1(y_{j-1}, s_{j-1})
_, state_prime = self.cell1(y_prev, state, scope="gru1")
state_prime = (1.0 - mask) * state + mask * state_prime
# c_j = att(H, s_j^{\prime})
contexts = []
for i, _key, _val, _mask in itertools.izip(itertools.count(), src_keys, src_values, src_masks):
alpha = attention(state_prime, _key, _mask, self.dim_hid, self.dim_key, scope='attn_alpha_%d' % i)
context = theano.tensor.sum(alpha[:, :, None] * _val, 0)
contexts.append(context)
if self.method=="attn":
contexts = T.reshape(T.concatenate(contexts, 0), [n_src] + list(contexts[0].shape))
with ops.variable_scope("beta"):
beta_keys = map_key(contexts, self.dim_value, self.dim_key)
beta = attention(state_prime, beta_keys, T.ones(contexts.shape[:2]), self.dim_hid, self.dim_key,
scope='beta')
context = T.sum(beta[:, :, None] * contexts, 0)
elif self.method=="concat":
context=T.concatenate(contexts,-1)
# s_j = GRU^2(c_j, s_j^{\prime})
output, next_state = self.cell2(context, state_prime, scope="gru2")
next_state = (1.0 - mask) * state + mask * next_state
return next_state, context
def build_sampling(self, src_seq, src_mask, target_embedding, target_bias, keys, values, initial_state):
# sampling graph, this feature is optional
max_len = T.iscalar()
def sampling_loop(inputs, state, keys, values, key_mask):
_, state_prime = self.cell1(inputs, state, scope="gru1")
alpha = attention(state_prime, keys, key_mask, self.dim_hid, self.dim_key)
context = T.sum(alpha[:, :, None] * values, 0)
output, next_state = self.cell2(context, state_prime, scope="gru2")
probs = self.prediction(inputs, next_state, context) # p(y_j) \propto f(y_{j-1}, c_j, s_j)
next_words = ops.random.multinomial(probs).argmax(axis=1)
new_inputs = nn.embedding_lookup(target_embedding, next_words)
new_inputs = new_inputs + target_bias
return [next_words, new_inputs, next_state]
with ops.variable_scope("decoder"):
batch = src_seq.shape[1]
initial_inputs = T.zeros([batch, self.dim_y], theano.config.floatX)
outputs_info = [None, initial_inputs, initial_state]
nonseq = [keys, values, src_mask]
outputs, updates = theano.scan(sampling_loop, [], outputs_info,
nonseq, n_steps=max_len)
sampled_words = outputs[0]
sampling_inputs = [src_seq, src_mask, max_len]
sampling_outputs = sampled_words
sample = theano.function(sampling_inputs, sampling_outputs,
updates=updates)
return sample
def build_attention(self, src_seq, src_mask, target_inputs, tgt_seq, tgt_mask, keys, values, initial_state):
# attention graph, this feature is optional
def attention_loop(inputs, mask, state, keys, values, key_mask):
mask = mask[:, None]
# s_j^{\prime} = GRU^1(y_{j-1}, s_{j-1})
_, state_prime = self.cell1(inputs, state, scope="gru1")
# c_j = att(H, s_j^{\prime})
alpha = attention(state_prime, keys, key_mask, self.dim_hid, self.dim_key)
context = T.sum(alpha[:, :, None] * values, 0)
# s_j = GRU^2(c_j, s_j^{\prime})
output, next_state = self.cell2(context, state_prime, scope="gru2")
next_state = (1.0 - mask) * state + mask * next_state
return [alpha, next_state]
with ops.variable_scope("decoder"):
seq = [target_inputs, tgt_mask]
outputs_info = [None, initial_state]
nonseq = [keys, values, src_mask]
(alpha, state), updaptes = theano.scan(attention_loop, seq,
outputs_info, nonseq)
attention_score = alpha
alignment_inputs = [src_seq, src_mask, tgt_seq, tgt_mask]
alignment_outputs = attention_score
align = theano.function(alignment_inputs, alignment_outputs)
return align
def prediction(self, y_emb, state, context, keep_prob=1.0):
"""
readout -> softmax
p(y_j) \propto f(y_{j-1}, s_{j}, c_{j})
:param y_emb:
:param state:
:param context:
:param keep_prob:
:return:
"""
features = [state, y_emb, context]
readout = nn.feedforward(features, [[self.dim_hid, self.dim_y, self.dim_value], self.dim_readout], True,
activation=T.tanh,
scope="readout")
if keep_prob < 1.0:
readout = nn.dropout(readout, keep_prob=keep_prob)
logits = nn.linear(readout, [self.dim_readout, self.n_y_vocab], True,
scope="logits")
if logits.ndim == 3:
new_shape = [logits.shape[0] * logits.shape[1], -1]
logits = logits.reshape(new_shape)
probs = T.nnet.softmax(logits)
return probs
def forward(self, y_seq, y_emb, mask, keys, key_mask, values, initial_state, keep_prob=1.0):
# shift embedding
y_shifted = T.zeros_like(y_emb)
y_shifted = T.set_subtensor(y_shifted[1:], y_emb[:-1])
y_emb = y_shifted
# feed
states, contexts = Decoder.scan(self, y_emb, mask, keys, key_mask, values, initial_state)
# p(y_j) \propto f(y_{j-1}, s_{j}, c_{j})
probs = self.prediction(y_emb, states, contexts, keep_prob)
# compute cost
cost, snt_cost = self.get_cost(y_seq, mask, probs)
return states, contexts, cost, snt_cost
| [
"angel123goo@gmail.com"
] | angel123goo@gmail.com |
c0d01549392c14b63f25cf3ca994a4bb47d47047 | 770537437474c63f6878c26a10a5853a9687c649 | /Service/app/subscriber.py | fc7c5667b54bc22b29bbde8c6796ec4cd403f98a | [] | no_license | Makalolu/BRKACI-2945-CLUS | 89013da0a2c828abe43b2ab39f8bb85587c625ff | 197702202ca146e6c82cb39ad48fad8569d1393d | refs/heads/master | 2022-02-22T19:00:47.438095 | 2018-06-17T17:27:52 | 2018-06-17T17:27:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,783 | py |
import logging, sys
from .utils import (setup_logger, get_app, pretty_print, db_is_alive, init_db,
get_apic_session, get_class, subscribe,
)
# module level logging
logger = logging.getLogger(__name__)
def dns_subscriptions(db):
""" build subscription to APIC dns objects and keep consistent values in
database. On startup, simply wipe the db since we'll be pulling new
objects (and any cached entries can be considered invalid on startup)
dnsDomain
- multiple domains supported, only one is 'default'
- track 'name' and 'isDefault' (yes/no)
- only support dnsp-default
dnsProv
- multiple providers supported, only one is preferred
- track 'addr' which should be unique and 'preferred' (yes/no)
- only support dnsp-default
"""
# initialize db to clear out all existing objects
init_db()
# read initial state and insert into database
(domains, providers) = ([], [])
session = get_apic_session()
if session is None:
logger.error("unable to connect to APIC")
return
dnsDomain = get_class(session, "dnsDomain")
dnsProv = get_class(session, "dnsProv")
if dnsDomain is None or dnsProv is None:
logger.error("failed to perform dns init")
return
for obj in dnsDomain:
attr = obj[obj.keys()[0]]["attributes"]
if "name" in attr and "dn" in attr and "isDefault" in attr:
if "/dnsp-default/" in attr["dn"]:
domains.append({
"dn": attr["dn"],
"name":attr["name"],
"isDefault": True if attr["isDefault"]=="yes" else False
})
for obj in dnsProv:
attr = obj[obj.keys()[0]]["attributes"]
if "addr" in attr and "dn" in attr and "preferred" in attr:
if "/dnsp-default/" in attr["dn"]:
providers.append({
"dn": attr["dn"],
"addr":attr["addr"],
"preferred": True if attr["preferred"]=="yes" else False
})
# insert domains and providers into database
logger.debug("inserting domains: %s, and providers: %s"%(domains,providers))
db.dnsDomain.insert_many(domains)
db.dnsProv.insert_many(providers)
# setup subscriptions to interesting objects
interests = {
"dnsDomain": {"callback": handle_dns_event},
"dnsProv": {"callback": handle_dns_event},
}
subscribe(interests)
logger.error("subscription unexpectedly ended")
def handle_dns_event(event):
""" handle created, deleted, modified events for dnsProv and dnsDomain by
updating corresponding object in db.
On successful create/delete clear dnsCache
"""
if "imdata" in event and type(event["imdata"]) is list:
for obj in event["imdata"]:
cname = obj.keys()[0]
attr = obj[cname]["attributes"]
if "status" not in attr or "dn" not in attr or \
attr["status"] not in ["created","modified", "deleted"]:
logger.warn("skipping invalid event for %s: %s" % (attr,cname))
continue
if cname not in ["dnsProv", "dnsDomain"]:
logger.debug("skipping event for classname %s" % cname)
continue
db_attr = ["dn"]
if cname == "dnsDomain": db_attr+=["name", "isDefault"]
else: db_attr+=["addr", "preferred"]
# create object that will be added/deleted/updated in db
obj = {}
for a in db_attr:
if a in attr: obj[a] = attr[a]
if "isDefault" in obj:
obj["isDefault"] = True if obj["isDefault"]=="yes" else False
if "preferred" in obj:
obj["preferred"] = True if obj["preferred"]=="yes" else False
logger.debug("%s %s obj:%s" % (cname, attr["status"], obj))
if attr["status"] == "created" or attr["status"] == "modified":
ret = db[cname].update_one(
{"dn":attr["dn"]}, {"$set":obj}, upsert=True
)
logger.debug("update_one match/modify/upsert: [%s,%s,%s]" % (
ret.matched_count, ret.modified_count, ret.upserted_id))
if attr["status"] == "deleted":
ret = db[cname].delete_one({"dn":attr["dn"]})
logger.debug("delete_one deleted: %s" % ret.deleted_count)
if attr["status"] == "created" or attr["status"] == "deleted":
logger.debug("clearing dnsCache")
db["dnsCache"].drop()
if __name__ == "__main__":
# main can be used to run subscription or just to test db access
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--check_db", action="store_true", dest="check_db",
help="check for successful db connection")
args = parser.parse_args()
try:
# setup local logger along with 'app' logger
logger = setup_logger(logger, "subscriber.log", quiet=True)
setup_logger(logging.getLogger("app"), "subscriber.log", quiet=True)
# check db is alive before executing background subscriber
if not db_is_alive():
logger.error("unable to connect to db")
sys.exit(1)
if args.check_db:
# successfully checked db already
sys.exit(0)
# run subscriptions which only stop on error
app = get_app()
with app.app_context():
db = app.mongo.db
dns_subscriptions(db)
except KeyboardInterrupt as e:
print "\ngoodbye!\n"
sys.exit(1)
| [
"agossett@cisco.com"
] | agossett@cisco.com |
ac914612ce2117c1765de9ca6321750ef3079aef | 838b09e0e25280cccf754e788af8febbfb1275f7 | /app/__init__.py | f460a89951f37c5aa6fabc5ec077d2699df1e3c1 | [] | no_license | Pincenti/fakebook-march | acf1dcb0cb16770353026f3e7f112709cda8bc5e | 46be080bcc26ea661817bcb295804ff443a02b6d | refs/heads/main | 2023-04-09T05:06:16.000529 | 2021-04-23T19:02:10 | 2021-04-23T19:02:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | from flask import Flask
app = Flask(__name__)
from .import routes, models | [
"derekh@codingtemple.com"
] | derekh@codingtemple.com |
68b03c73feb62075825cdf3e27d8073937e5af7c | 79ecfdc98e9d44cf9c7d698f057a66f2c2cad2f7 | /main.py | 8e13e343f031c4e5d9029b68c43e901c06e96f4b | [] | no_license | bohunicka14/Automorphism-and-symmetries | d099a5be4d8aa02ef2db3c2b5449dfd7baafe666 | c3307fe1db87f81026c6250c4988d93284939b8a | refs/heads/master | 2021-07-24T16:07:37.509945 | 2019-03-27T15:11:24 | 2019-03-27T15:11:24 | 174,871,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,678 | py |
class Node(object):
def __init__(self, value, children = []):
self.value = value
self.children = children
def __str__(self, level=0):
ret = "\t"*level+repr(self.value)+"\n"
for child in self.children:
ret += child.__str__(level+1)
return ret
def is_leaf(self):
return len(self.children) == 1
class Graph():
def __init__(self):
self.g = dict()
def add_edge(self, v1, v2):
if self.g.get(v1, None) is not None:
self.g[v1].add(v2)
else:
self.g[v1] = {v2}
if self.g.get(v2, None) is not None:
self.g[v2].add(v1)
else:
self.g[v2] = {v1}
def is_leaf(self, vertex, graph):
if graph.get(vertex, None) is not None:
return len(graph.get(vertex)) == 1
else:
return None
def del_leaf(self, vertex, graph):
if self.is_leaf(vertex, graph):
adjacent = next(iter(graph.get(vertex)))
del graph[vertex]
graph[adjacent].remove(vertex)
return adjacent
return None
def prufer_code(self):
copy = dict(self.g)
result = ''
while len(copy) > 2:
sorted_vertices = sorted(list(copy.keys()))
for vertex in sorted_vertices:
if self.is_leaf(vertex, copy):
result += str(self.del_leaf(vertex, copy))
break
return result
if __name__ == '__main__':
g = Graph()
g.add_edge(1, 4)
g.add_edge(2, 4)
g.add_edge(3, 4)
g.add_edge(5, 4)
g.add_edge(5, 6)
print(g.prufer_code())
| [
"bohunicka.ingrid@gmail.com"
] | bohunicka.ingrid@gmail.com |
60c9b57a2eda6c2eaf8e4755cfd166017886804e | af172343e2d564dacefd382ea4689dd8c6646e7d | /backend/selenium/driverFactory.py | e1e510dd7a7052a6ffc03464008a437060419efa | [] | no_license | simalda/Quiz | ba6187d600e8b7f93640ecac82b00a6d165e43b8 | 5a66d85a4dce4dc98db67212c11cdf556c49c8a2 | refs/heads/master | 2021-06-23T15:28:45.422751 | 2020-06-02T11:47:06 | 2020-06-02T11:47:06 | 194,924,234 | 0 | 0 | null | 2021-03-12T10:12:10 | 2019-07-02T19:37:32 | JavaScript | UTF-8 | Python | false | false | 449 | py | from selenium import webdriver
# import LogInPage from "./LogInPage";
class DriverFactory():
def createDriverChrome(self):
driver = webdriver.Chrome()
driver.implicitly_wait(10)
return driver
def createAndOpenChrome(self):
driver = webdriver.Chrome()
driver.implicitly_wait(10)
driver.get('http://localhost:3000/')
driver.implicitly_wait(10)
return driver
| [
"simalda83@gmail.com"
] | simalda83@gmail.com |
70a5a2a8d97d47e4470a414ce3590f34bca83b74 | 22e076588057d200c7119f87d330678e7ed7d168 | /posts/forms.py | 8094a686d31545ae191391fd805ca09373a1291f | [] | no_license | DylanMsK/Insta_clone_project | 16088926bda8f66fe412016f1764076dd46a7629 | 7921bef90aad1128021bd4e2bb60f96fd0efab01 | refs/heads/master | 2020-05-05T13:03:29.896332 | 2019-04-18T09:01:33 | 2019-04-18T09:01:33 | 180,057,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | from django import forms
from .models import Post, Comment
class PostModelForm(forms.ModelForm):
content = forms.CharField(
label='content',
widget=forms.Textarea(
attrs={
'class': '',
'rows': 5,
'cols': 50,
'placeholder': '지금 뭘 하고 계신가요?'
})
)
class Meta:
model = Post
# input을 받을 컬럼 값을 list로 만들어 넣어줌
fields = ['content', 'image',]
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ['comment',]
| [
"kms920612@gmail.com"
] | kms920612@gmail.com |
d7da85ffc7cbcb65f57bb197ce31ee9419b6cef7 | 806dba902649dbc8c26db0bc105b26217c9f5f45 | /src/react_ssr/mixins/default_state.py | ffabcb8706782061d655ce9672c26501e847b5c3 | [
"BSD-2-Clause"
] | permissive | urantialife/react-ssr-backend | b03ec62fd1b745e82ac8f7a7d239de5c4dd42be4 | f4d094fbd15ff1eb2178aeac0bf9bec8c70414f7 | refs/heads/master | 2020-06-09T06:54:23.345503 | 2019-06-03T16:23:55 | 2019-06-03T16:23:55 | 193,395,009 | 1 | 0 | BSD-2-Clause | 2019-06-23T20:59:54 | 2019-06-23T20:59:54 | null | UTF-8 | Python | false | false | 1,462 | py | import requests
from ..exceptions import GetDefaultStateError
from ..settings.default_state import (
DEFAULT_STATE_TIMEOUT,
DEFAULT_STATE_URL,
DEFAULT_STATE_HEADERS
)
class DefaultStateMixin(object):
default_state_timeout = DEFAULT_STATE_TIMEOUT
default_state_url = DEFAULT_STATE_URL
default_state_headers = DEFAULT_STATE_HEADERS
def get_default_state_headers(self):
headers = self.default_state_headers.copy()
return headers
def get_default_state(self, reducer_name):
headers = self.get_default_state_headers()
timeout = self.default_state_timeout
base_url = self.default_state_url
url = "/".join([base_url, reducer_name])
try:
response = requests.get(url, timeout=timeout, headers=headers)
status_code = response.status_code
if status_code != 200:
raise GetDefaultStateError(
"Could not get default state from {} for {}.\n\n{}: {}"
.format(reducer_name, url, status_code, response.text))
return response.json()
except requests.ReadTimeout:
raise GetDefaultStateError(
"Could not get default state from {} for {} within {} seconds."
.format(reducer_name, url, self.default_state_timeout))
except requests.ConnectionError:
raise GetDefaultStateError("Could not connect to {}.".format(url))
| [
"16756928+alexseitsinger@users.noreply.github.com"
] | 16756928+alexseitsinger@users.noreply.github.com |
08e5a9b538fd82c4fcfea17f6b179b383278f0a9 | 3499ea55947cb8d71202a1ea795af1480ed61f32 | /venv/Scripts/pip3.7-script.py | 17fc0769f266a67dd72333fd5491f6596b371936 | [] | no_license | qianhuideng/BankCard | 67ef424940f5f059cf2d0de41a2544cb84ac899a | 9b898b2cc1d71088ad1230ca8459a65cc3e78202 | refs/heads/master | 2020-12-04T08:53:31.917798 | 2020-10-10T02:23:23 | 2020-10-10T02:23:23 | 231,702,502 | 1 | 0 | null | null | null | null | WINDOWS-1256 | Python | false | false | 432 | py | #!C:\Users\22018\Desktop\deng\بي¼±\BankCard\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| [
"2201891435@qq.com"
] | 2201891435@qq.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.